gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.dxmio.games.breakout;
import java.awt.*;
import java.awt.event.*;
import java.util.Hashtable;
import javax.swing.*;
/**
* @author byte
*
* The canvas.
*/
@SuppressWarnings("serial")
public class BasicCanvas extends JPanel implements MouseListener, MouseMotionListener, ActionListener
{
protected JLabel _statusBar;
private PlayField _playField;
private BasicFrame _parentFrame;
private Timer _ballTimer;
private double _currentMovementX;
private double _currentMovementY;
/**
* Instantiates a new canvas.
*
* @param theFrame The frame to use.
*/
public BasicCanvas(BasicFrame theFrame)
{
_parentFrame = theFrame;
_statusBar = theFrame.getStatusBar();
_playField = new PlayField(this, getGameOptions().get("brickCount"));
setBackground(Color.WHITE);
setLayout(new BorderLayout());
setPreferredSize(new Dimension(getGameOptions().get("fieldPixelWidth"), getGameOptions().get("fieldPixelHeight")));
addMouseListener(this);
addMouseMotionListener(this);
_currentMovementX = Constants.ballMovement.initialXmovement;
_currentMovementY = Constants.ballMovement.initialYmovement;
_ballTimer = new Timer(Constants.ballMovement.ballSpeed, this);
//_ballTimer.setActionCommand("Timer"); //1.6 only
_ballTimer.start();
repaint();
}
protected void newPlayField()
{
_playField = new PlayField(this, getGameOptions().get("brickCount"));
}
protected BasicFrame getParentFrame()
{
return _parentFrame;
}
protected Hashtable<String, Integer> getGameOptions()
{
return getParentFrame().getGameOptions();
}
protected Paddle getPaddle()
{
return _playField.getPaddle();
}
protected Ball getBall()
{
return _playField.getBall();
}
protected PlayField getPlayField()
{
return _playField;
}
protected void togglePause()
{
_parentFrame.togglePause();
}
public void mousePressed(MouseEvent e)
{
}
public void mouseDragged(MouseEvent e)
{
}
public void mouseReleased(MouseEvent e)
{
}
public void mouseExited(MouseEvent e)
{
}
public void mouseEntered(MouseEvent e)
{
}
public void mouseClicked(MouseEvent e)
{
togglePause();
}
public void mouseMoved(MouseEvent e)
{
Point paddlePoint = e.getPoint();
Graphics2D myGraphics = (Graphics2D)getGraphics();
getPaddle().erase(myGraphics);
getPaddle().updatePosition(paddlePoint.x);
getPaddle().draw(myGraphics);
}
protected void paintComponent(Graphics myGraphics)
{
super.paintComponent(myGraphics);
Graphics2D g = (Graphics2D)myGraphics;
setPreferredSize(new Dimension(getGameOptions().get("fieldPixelWidth"), getGameOptions().get("fieldPixelHeight")));
drawField(g); //redraw all the bricks
}
public void actionPerformed(ActionEvent e)
{
String action = e.getActionCommand();
if((action == null) && (getParentFrame().getAppState() == Constants.appStates.Initialized))
{
drawBall();
}
}
protected void drawField(Graphics2D g)
{
//Graphics2D g = (Graphics2D)getGraphics();
getPlayField().drawAllBricks(g);
getPlayField().drawPaddle(g);
getPlayField().drawBall(g);
}
protected void drawBall()
{
Ball theBall = getBall();
Paddle thePaddle = getPaddle();
Graphics2D myGraphics = (Graphics2D)getGraphics();
//_playField.determineLevelEnded();
if(theBall.getCollisionRectangle().intersects(thePaddle.getCollisionRectangle()))
{
getPaddle().paddleContacted();
getFrame().collision();
}
else if(theBall.getCollisionRectangle().intersectsLine(getPlayField().getCeilingCollision()))
{
flipMovementY();
getFrame().collision();
}
else if(theBall.getCollisionRectangle().intersectsLine(getPlayField().getRightWallCollision()))
{
flipMovementX();
getFrame().collision();
}
else if(theBall.getCollisionRectangle().intersectsLine(getPlayField().getLeftWallCollision()))
{
flipMovementX();
getFrame().collision();
}
else if(theBall.getCollisionRectangle().intersectsLine(getPlayField().getFloorCollision()))
{
theBall.erase(myGraphics);
getFrame().levelFailed(myGraphics);
}
_playField.findBricksToKnock(myGraphics);
theBall.erase(myGraphics);
theBall.movePosition(_currentMovementX, _currentMovementY);
theBall.draw(myGraphics);
getFrame().setStatus();
if(getPlayField().getBrickCount() == 0)
getFrame().levelWon();
}
protected BasicFrame getFrame()
{
return _parentFrame;
}
protected void flipMovementY()
{
_currentMovementY = 0 - _currentMovementY;
}
protected void flipMovementX()
{
_currentMovementX = 0 - _currentMovementX;
}
protected void setCurrentMovementX(double x)
{
_currentMovementX = x;
}
protected void setCurrentMovementY(double y)
{
_currentMovementY = y;
}
protected double getCurrentMovementX()
{
return _currentMovementX;
}
protected double getCurrentMovementY()
{
return _currentMovementY;
}
}
| |
/* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.myfaces.portlet.faces.context;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.Principal;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import javax.faces.FacesException;
import javax.faces.application.ViewHandler;
import javax.faces.context.ExternalContext;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.PortletConfig;
import javax.portlet.PortletContext;
import javax.portlet.PortletException;
import javax.portlet.PortletMode;
import javax.portlet.PortletRequest;
import javax.portlet.PortletRequestDispatcher;
import javax.portlet.PortletResponse;
import javax.portlet.PortletURL;
import javax.portlet.RenderRequest;
import javax.portlet.RenderResponse;
import javax.portlet.WindowState;
import javax.portlet.faces.Bridge;
import javax.portlet.faces.BridgeDefaultViewNotSpecifiedException;
import javax.portlet.faces.BridgeUtil;
import org.apache.myfaces.portlet.faces.util.QueryString;
import org.apache.myfaces.portlet.faces.util.URLUtils;
import org.apache.myfaces.portlet.faces.util.map.EnumerationIterator;
import org.apache.myfaces.portlet.faces.util.map.PortletApplicationMap;
import org.apache.myfaces.portlet.faces.util.map.PortletInitParameterMap;
import org.apache.myfaces.portlet.faces.util.map.PortletRequestHeaderMap;
import org.apache.myfaces.portlet.faces.util.map.PortletRequestHeaderValuesMap;
import org.apache.myfaces.portlet.faces.util.map.PortletRequestHeaders;
import org.apache.myfaces.portlet.faces.util.map.PortletRequestMap;
import org.apache.myfaces.portlet.faces.util.map.PortletRequestParameterMap;
import org.apache.myfaces.portlet.faces.util.map.PortletRequestParameterValuesMap;
import org.apache.myfaces.portlet.faces.util.map.PortletSessionMap;
/**
* This implementation of {@link ExternalContext} is specific to the portlet implementation.
*
* Methods of interests are: - encodeActionURL - redirect
*/
public class PortletExternalContextImpl extends ExternalContext
{
public static final String FACES_MAPPING_ATTRIBUTE = "org.apache.myfaces.portlet.faces.context.facesMapping";
public static final String RENDER_POLICY_ATTRIBUTE = Bridge.BRIDGE_PACKAGE_PREFIX
+ "." + Bridge.RENDER_POLICY;
// Query parameter to store the original viewId in the query string
public static final String VIEW_ID_QUERY_PARAMETER = "_VIEW_ID";
// Render parameter to store the viewId
public static final String ACTION_ID_PARAMETER_NAME = "_ACTION_ID";
public static final String RESOURCE_METHOD_QUERY_PARAMETER = "_xResourceMethod";
public static final String RESOURCE_URL_QUERY_PARAMETER = "_xResourceUrl";
public static final String FACES_RESOURCE_QUERY_PARAMETER = "_xFacesResource";
public static final String PROCESS_AS_RENDER_QUERY_PARAMETER = "_xProcessAsRender";
public static final String REQUIRES_REWRITE_PARAMETER = "_xRequiresRewrite";
private PortletContext mPortletContext;
private PortletConfig mPortletConfig;
private PortletRequest mPortletRequest;
private PortletResponse mPortletResponse;
// Needed for distpach() which requires the actual PortletRequest/Response
// objects not wrapped one's (since wrapping isn't official in 168)
private PortletRequest mOrigPortletRequest = null;
private PortletResponse mOrigPortletResponse = null;
// External context maps
private Map<String, Object> mApplicationMap = null;
private Map<String, Object> mSessionMap = null;
private Map<String, Object> mRequestMap = null;
private Map<String, String> mRequestParameterMap = null;
private Map<String, String[]> mRequestParameterValuesMap = null;
private Map<String, String> mRequestHeaderMap = null;
private Map<String, String[]> mRequestHeaderValuesMap = null;
private Map<String, String> mInitParameterMap = null;
// maps for internal parameters (eg, those specified in query string of
// any defaultViewId)
private Map<String, String> mInternalRequestParameterMap = Collections.emptyMap();
private Map<String, String[]> mInternalRequestParameterValuesMap = Collections.emptyMap();
private PortletRequestHeaders mPortletRequestHeaders = null;
// Requested Faces view
private String mViewId = null;
// Reverse engineered serlvet paths from mappings
private List<String> mFacesMappings = null;
private String mServletPath = null;
private String mPathInfo = null;
// Current Portlet phase
private Bridge.PortletPhase mPhase = null;
@SuppressWarnings("unchecked")
public PortletExternalContextImpl(PortletConfig portletConfig, PortletRequest portletRequest,
PortletResponse portletResponse) throws FacesException
{
mPortletConfig = portletConfig;
mPortletContext = mPortletConfig.getPortletContext();
mPortletRequest = mOrigPortletRequest = portletRequest;
mPortletResponse = mOrigPortletResponse = portletResponse;
mPhase = (Bridge.PortletPhase) mPortletRequest.getAttribute(Bridge.PORTLET_LIFECYCLE_PHASE);
// viewId is the actual context relative path to the resource
mViewId = getViewId();
// Now reverse engineer the servlet paths from the mappings
// So Faces thinks was a client request
mFacesMappings = (List<String>) mPortletRequest.getAttribute(FACES_MAPPING_ATTRIBUTE);
mapPathsFromViewId(mViewId, mFacesMappings);
// JSF RI relies on a request attribute setting to properly handle
// suffix mapping -- but because their suffix mapping code is servlet dependent
// we need to set it for them
setFacesMapping();
}
public void release()
{
mPortletConfig = null;
mPortletContext = null;
mPortletRequest = null;
mPortletResponse = null;
mOrigPortletRequest = null;
mOrigPortletResponse = null;
mApplicationMap = null;
mSessionMap = null;
mRequestMap = null;
mRequestParameterMap = null;
mRequestParameterValuesMap = null;
mRequestHeaderMap = null;
mRequestHeaderValuesMap = null;
mInitParameterMap = null;
mViewId = null;
}
/**
* This method is the gatekeeper for managing the viewId across action/render + subsequent
* renders.
*
* For the render case, when rendering the actionURL, we call this method to write the viewId in
* the interaction state when calling createActionURL() This allows us to get the viewId in action
* request. eg, /adf-faces-demo/componentDemos.jspx?_VIEW_ID=/componentDemos.jspx
*
* For the action with redirect case, we call this method when the redirect() is called and we
* encode the viewId in the navigational state so we can get the viewId in the subsequent render
* request eg, /adf-faces-demo/componentDemos.jspx?_VIEW_ID=/componentDemos.jspx
*
* We do the same as above for the action with non-redirect case as well by calling the redirect()
* method at the end of action lifecycle in ADFBridgePorttlet.process() by passing in an URL
* created by ViewHandler.getActionURL()
*
* A special case to handle direct call from the goLink/goButton component in render request (bug
* 5259313) eg, /components/goButton.jspx or http://www.oracle.com
*/
@Override
public String encodeActionURL(String url)
{
String viewId = null, path = null;
QueryString queryStr = null;
int queryStart = -1;
if (url.startsWith("#") || isExternalURL(url) || isDirectLink(url))
{
return url;
}
// url might contain DirectLink=false parameter -- spec says remove if
// it does.
url = removeDirectLink(url);
// Now determine the target viewId
// First: split URL into path and query string
// Hold onto QueryString for later processing
queryStart = url.indexOf('?');
if (queryStart != -1)
{
// Get the query string
queryStr = new QueryString(url.substring(queryStart + 1), "UTF8");
path = url.substring(0, queryStart);
}
else
{
path = url;
}
// Determine the viewId by inspecting the URL
if (!isRelativePath(path))
{
viewId = getViewIdFromPath(path);
}
else
{
viewId = getViewIdFromRelativePath(path);
}
if (viewId == null)
{
throw new FacesException("encodeActionURL: unable to recognize viewId");
}
if (mPhase == Bridge.PortletPhase.RENDER_PHASE)
{ // render - write
// the viewId into
// the response
// (interaction
// state)
RenderResponse renderResponse = (RenderResponse) getResponse();
PortletURL actionURL = renderResponse.createActionURL();
actionURL.setParameter(ACTION_ID_PARAMETER_NAME, viewId);
// Add extra parameters so they don't get lost
if (queryStr != null)
{
Enumeration<String> list = queryStr.getParameterNames();
while (list.hasMoreElements())
{
String param = list.nextElement().toString();
if (param.equals(Bridge.PORTLET_MODE_PARAMETER))
{
try
{
actionURL.setPortletMode(new PortletMode(queryStr.getParameter(param)));
}
catch (Exception e)
{
; // do nothing -- just ignore
}
}
else if (param.equals(Bridge.PORTLET_WINDOWSTATE_PARAMETER))
{
try
{
actionURL.setWindowState(new WindowState(queryStr.getParameter(param)));
}
catch (Exception e)
{
; // do nothing -- just ignore
}
}
else if (param.equals(Bridge.PORTLET_SECURE_PARAMETER))
{
try
{
actionURL.setSecure(Boolean.getBoolean(queryStr.getParameter(param)));
}
catch (Exception e)
{
; // do nothing -- just ignore
}
}
else
{
actionURL.setParameter(param, queryStr.getParameter(param));
}
}
}
// TODO hack to workaround double encoding problem
String actionURLStr = actionURL.toString();
actionURLStr = actionURLStr.replaceAll("\\&\\;", "&");
return actionURLStr;
}
else
{ // action - write the viewId to navigational state
ActionResponse actionResponse = (ActionResponse) getResponse();
actionResponse.setRenderParameter(ACTION_ID_PARAMETER_NAME, viewId);
// set other request params (if any) into navigational states
if (queryStr != null)
{
Enumeration<String> list = queryStr.getParameterNames();
while (list.hasMoreElements())
{
String param = list.nextElement();
if (param.equals(Bridge.PORTLET_MODE_PARAMETER))
{
try
{
actionResponse.setPortletMode(new PortletMode(queryStr.getParameter(param)));
}
catch (Exception e)
{
//TODO: Ignoring is probably dangerous here as it means that we are
// EITHER using exceptions for flow control (which is extreemly
// inefficient) or we should log a message saying what the issue
// is. According to the Javadocs an exception is thrown here if the
// portlet mode is not allowed or if sendRedirect has already been
// called. In either case we should log an information type message
// here.
; // do nothing -- just ignore
}
}
else if (param.equals(Bridge.PORTLET_WINDOWSTATE_PARAMETER))
{
try
{
actionResponse.setWindowState(new WindowState(queryStr.getParameter(param)));
}
catch (Exception e)
{
; // do nothing -- just ignore
}
}
else if (param.equals(Bridge.PORTLET_SECURE_PARAMETER))
{
; // ignore -- do nothing as can't encode into an actionResponse
}
else
{
actionResponse.setRenderParameter(param, queryStr.getParameter(param));
}
}
}
return url;
}
}
@Override
public void redirect(String url) throws IOException
{
// Distinguish between redirects within this app and external links
// redirects within this app are dealt (elsewhere) as navigations
// so do nothing. External links are redirected
if (mPhase == Bridge.PortletPhase.ACTION_PHASE
&& (url.startsWith("#") || isExternalURL(url) || isDirectLink(url)))
{
((ActionResponse) getResponse()).sendRedirect(url);
}
// TODO: Should we recognize a redirect during a rendere to an internal
// link and treat as a navigation?
}
@Override
public String encodeResourceURL(String s)
{
if (!isExternalURL(s))
{
if (!s.startsWith("/"))
{
// must be a relative path -- convert it to contextPath relative
// construct our cwd (servletPath + pathInfo);
String pi = null;
String path = getRequestServletPath();
if (path == null)
{
path = getRequestPathInfo();
}
else
{
pi = getRequestPathInfo();
}
if (pi != null)
{
path = path.concat(pi);
}
// remove target
path = path.substring(0, path.lastIndexOf("/"));
s = URLUtils.convertFromRelative(path, s);
}
// prepend the context path since portletResponse.encodeURL() requires a full path URI
// Don't need to check return from getRequestContextPath because there must
// always be a vlaue even if an empty string
String ctxPath = getRequestContextPath();
if (ctxPath.length() > 0 && !s.startsWith(ctxPath))
{
s = ctxPath + s;
}
}
String resourceURLStr = mPortletResponse.encodeURL(s);
// Avoid double encoding
resourceURLStr = resourceURLStr.replaceAll("\\&\\;", "&");
return resourceURLStr;
}
@Override
public void dispatch(String requestURI) throws IOException, FacesException
{
if (requestURI == null)
{
throw new java.lang.NullPointerException();
}
if (mPhase == Bridge.PortletPhase.ACTION_PHASE)
{
throw new IllegalStateException("Request cannot be an ActionRequest");
}
PortletRequestDispatcher prd = mPortletContext.getRequestDispatcher(requestURI);
if (prd == null)
{
throw new IllegalArgumentException(
"No request dispatcher can be created for the specified path: "
+ requestURI);
}
try
{
prd.include((RenderRequest) mOrigPortletRequest, (RenderResponse) mOrigPortletResponse);
}
catch (PortletException e)
{
if (e.getMessage() != null)
{
throw new FacesException(e.getMessage(), e);
}
else
{
throw new FacesException(e);
}
}
}
@Override
public Object getSession(boolean create)
{
return mPortletRequest.getPortletSession(create);
}
@Override
public Object getContext()
{
return mPortletContext;
}
@Override
public Object getRequest()
{
return mPortletRequest;
}
@Override
public Object getResponse()
{
return mPortletResponse;
}
@Override
public Map<String, Object> getApplicationMap()
{
if (mApplicationMap == null)
{
mApplicationMap = new PortletApplicationMap(mPortletContext);
}
return mApplicationMap;
}
@Override
public Map<String, Object> getSessionMap()
{
if (mSessionMap == null)
{
mSessionMap = new PortletSessionMap(mPortletRequest);
}
return mSessionMap;
}
@Override
public Map<String, Object> getRequestMap()
{
if (mRequestMap == null)
{
mRequestMap = new PortletRequestMap(mPortletRequest);
}
return mRequestMap;
}
@Override
public Map<String, String> getRequestParameterMap()
{
if (mRequestParameterMap == null)
{
mRequestParameterMap = Collections.unmodifiableMap(new PortletRequestParameterMap( mPortletRequest,
mInternalRequestParameterMap));
}
return mRequestParameterMap;
}
public Map<String, String[]> getRequestParameterValuesMap()
{
if (mRequestParameterValuesMap == null)
{
mRequestParameterValuesMap = Collections
.unmodifiableMap(new PortletRequestParameterValuesMap(
mPortletRequest,
mInternalRequestParameterValuesMap));
}
return mRequestParameterValuesMap;
}
public Iterator<String> getRequestParameterNames()
{
//Map is unmodifiable, so the iterator will be as well
return getRequestParameterMap().keySet().iterator();
}
public Map<String, String> getRequestHeaderMap()
{
if (mRequestHeaderMap == null)
{
if (mPortletRequestHeaders == null)
{
mPortletRequestHeaders = new PortletRequestHeaders(mOrigPortletRequest);
}
mRequestHeaderMap = new PortletRequestHeaderMap(mPortletRequestHeaders);
}
return mRequestHeaderMap;
}
@Override
public Map<String, String[]> getRequestHeaderValuesMap()
{
if (mRequestHeaderValuesMap == null)
{
if (mPortletRequestHeaders == null)
{
mPortletRequestHeaders = new PortletRequestHeaders(mOrigPortletRequest);
}
mRequestHeaderValuesMap = new PortletRequestHeaderValuesMap(mPortletRequestHeaders);
}
return mRequestHeaderValuesMap;
}
@Override
public Map<String, Object> getRequestCookieMap()
{
Map<String, Object> dummy = Collections.emptyMap();
return dummy;
}
@Override
public Locale getRequestLocale()
{
return mPortletRequest.getLocale();
}
@Override
public String getRequestPathInfo()
{
return mPathInfo;
}
@Override
public String getRequestContextPath()
{
return mPortletRequest.getContextPath();
}
@Override
public String getInitParameter(String s)
{
return mPortletContext.getInitParameter(s);
}
@Override
public Map<String, String> getInitParameterMap()
{
if (mInitParameterMap == null)
{
mInitParameterMap = new PortletInitParameterMap(mPortletContext);
}
return mInitParameterMap;
}
@SuppressWarnings("unchecked")
public Set<String> getResourcePaths(String s)
{
return mPortletContext.getResourcePaths(s);
}
public InputStream getResourceAsStream(String s)
{
return mPortletContext.getResourceAsStream(s);
}
public String encodeNamespace(String s)
{
if (BridgeUtil.getPortletRequestPhase() != Bridge.PortletPhase.RENDER_PHASE)
{
throw new IllegalStateException("Only RenderResponse can be used to encode a namespace");
}
else
{
return ((RenderResponse) mPortletResponse).getNamespace() + s;
}
}
@Override
public String getRequestServletPath()
{
return mServletPath;
}
@Override
public String getAuthType()
{
return mPortletRequest.getAuthType();
}
@Override
public String getRemoteUser()
{
return mPortletRequest.getRemoteUser();
}
@Override
public boolean isUserInRole(String role)
{
return mPortletRequest.isUserInRole(role);
}
@Override
public Principal getUserPrincipal()
{
return mPortletRequest.getUserPrincipal();
}
@Override
public void log(String message)
{
mPortletContext.log(message);
}
@Override
public void log(String message, Throwable t)
{
mPortletContext.log(message, t);
}
@SuppressWarnings("unchecked")
@Override
public Iterator<Locale> getRequestLocales()
{
//TODO: Cache this value...
return new EnumerationIterator<Locale>(mPortletRequest.getLocales());
}
@Override
public URL getResource(String s) throws MalformedURLException
{
return mPortletContext.getResource(s);
}
// Start of JSF 1.2 API
/**
* <p>
* Set the environment-specific request to be returned by subsequent calls to {@link #getRequest}.
* This may be used to install a wrapper for the request.
* </p>
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
*
* @since 1.2
*/
@Override
public void setRequest(Object request)
{
mPortletRequest = (PortletRequest) request;
// clear out request based cached maps
mRequestMap = null;
mRequestParameterMap = null;
mRequestParameterValuesMap = null;
mRequestHeaderMap = null;
mRequestHeaderValuesMap = null;
}
/**
*
* <p>
* Overrides the name of the character encoding used in the body of this request.
* </p>
*
* <p>
* Calling this method after the request has been accessed will have no no effect, unless a
* <code>Reader</code> or <code>Stream</code> has been obtained from the request, in which
* case an <code>IllegalStateException</code> is thrown.
* </p>
*
* <p>
* <em>Servlet:</em> This must call through to the <code>javax.servlet.ServletRequest</code>
* method <code>setCharacterEncoding()</code>.
* </p>
*
* <p>
* <em>Portlet:</em> This must call through to the <code>javax.portlet.ActionRequest</code>
* method <code>setCharacterEncoding()</code>.
* </p>
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
* @throws java.io.UnsupportedEncodingException
* if this is not a valid encoding
*
* @since 1.2
*
*/
@Override
public void setRequestCharacterEncoding(String encoding) throws UnsupportedEncodingException,
IllegalStateException
{
/* TODO: Temporary workaround for JIRA PORTLETBRIDGE-14 until EG
* decides on best course of action.
*
if (mPhase != Bridge.PortletPhase.ACTION_PHASE)
{
throw new IllegalStateException(
"PortletExternalContextImpl.setRequestCharacterEncoding(): Request must be an ActionRequest");
}
*/
//Part of temp workaround. Do a noop if we are not in action phase
if(mPhase == Bridge.PortletPhase.ACTION_PHASE)
{
((ActionRequest) mPortletRequest).setCharacterEncoding(encoding);
}
}
/**
*
* <p>
* Return the character encoding currently being used to interpret this request.
* </p>
*
* <p>
* <em>Servlet:</em> This must return the value returned by the
* <code>javax.servlet.ServletRequest</code> method <code>getCharacterEncoding()</code>.
* </p>
*
* <p>
* <em>Portlet:</em> This must return the value returned by the
* <code>javax.portlet.ActionRequest</code> method <code>getCharacterEncoding()</code>.
* </p>
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
* @since 1.2
*
*/
@Override
public String getRequestCharacterEncoding()
{
if (mPhase == Bridge.PortletPhase.ACTION_PHASE)
{
return ((ActionRequest) mPortletRequest).getCharacterEncoding();
}
else
{
// RENDER_PHASE -- return null as per spec
return null;
}
}
/**
*
* <p>
* Return the MIME Content-Type for this request. If not available, return <code>null</code>.
* </p>
*
* <p>
* <em>Servlet:</em> This must return the value returned by the
* <code>javax.servlet.ServletRequest</code> method <code>getContentType()</code>.
* </p>
*
* <p>
* <em>Portlet:</em> This must return <code>null</code>.
* </p>
*
* NOTE: We are deviating from the javadoc based on recommendation from JSR 301 expert group
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
* @since 1.2
*/
@Override
public String getRequestContentType()
{
if (mPhase == Bridge.PortletPhase.ACTION_PHASE)
{
return ((ActionRequest) mPortletRequest).getContentType();
}
else
{
// RENDER_PHASE: return null as per spec
return null;
}
}
/**
*
* <p>
* Returns the name of the character encoding (MIME charset) used for the body sent in this
* response.
* </p>
*
* <p>
* <em>Servlet:</em> This must return the value returned by the
* <code>javax.servlet.ServletResponse</code> method <code>getCharacterEncoding()</code>.
* </p>
*
* <p>
* <em>Portlet:</em> This must return <code>null</code>.
* </p>
*
* NOTE: We are deviating from the javadoc based on recommendation from JSR 301 expert group
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
* @since 1.2
*/
@Override
public String getResponseCharacterEncoding()
{
if (mPhase == Bridge.PortletPhase.ACTION_PHASE)
{
throw new IllegalStateException(
"PortletExternalContextImpl.getResponseCharacterEncoding(): Response must be a RenderRequest");
}
return ((RenderResponse) mPortletResponse).getCharacterEncoding();
}
/**
*
* <p>
* Return the MIME Content-Type for this response. If not available, return <code>null</code>.
* </p>
*
* <p>
* <em>Servlet:</em> This must return the value returned by the
* <code>javax.servlet.ServletResponse</code> method <code>getContentType()</code>.
* </p>
*
* <p>
* <em>Portlet:</em> This must return <code>null</code>.
* </p>
*
* NOTE: We are deviating from the javadoc based on recommendation from JSR 301 expert group
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
* @since 1.2
*/
@Override
public String getResponseContentType()
{
if (mPhase == Bridge.PortletPhase.ACTION_PHASE)
{
throw new IllegalStateException(
"PortletExternalContextImpl.getResponseContentType(): Response must be a RenderRequest");
}
return ((RenderResponse) mPortletResponse).getContentType();
}
/**
* <p>
* Set the environment-specific response to be returned by subsequent calls to
* {@link #getResponse}. This may be used to install a wrapper for the response.
* </p>
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
*
* @since 1.2
*/
@Override
public void setResponse(Object response)
{
mPortletResponse = (PortletResponse) response;
}
/**
*
* <p>
* Sets the character encoding (MIME charset) of the response being sent to the client, for
* example, to UTF-8.
* </p>
*
* <p>
* <em>Servlet:</em> This must call through to the <code>javax.servlet.ServletResponse</code>
* method <code>setCharacterEncoding()</code>.
* </p>
*
* <p>
* <em>Portlet:</em> This method must take no action.
* </p>
*
* <p>
* The default implementation throws <code>UnsupportedOperationException</code> and is provided
* for the sole purpose of not breaking existing applications that extend this class.
* </p>
*
*
* @since 1.2
*
*/
@Override
public void setResponseCharacterEncoding(String encoding)
{
// JSR 168 has no corresponding API.
}
// End of JSF 1.2 API
/**
* Gets the view identifier we should use for this request.
*/
private String getViewId() throws BridgeDefaultViewNotSpecifiedException
{
String viewId = mPortletRequest.getParameter(ACTION_ID_PARAMETER_NAME);
log("PortletExternalContextImpl.getViewId: found action_id = " + viewId);
// If no defaultview then throw an exception
if (viewId == null)
{
viewId = (String) mPortletRequest.getAttribute(Bridge.DEFAULT_VIEWID);
if (viewId == null)
{
throw new BridgeDefaultViewNotSpecifiedException();
}
log("PortletExternalContextImpl.getViewId: action_id not found, defaulting to: " + viewId);
}
// Some viewId may have query string, so handle that here
// (e.g., TaskFlow has the following viewId:
// /adf.task-flow?_document=/WEB-INF/task-flow.xml&_id=task1
int queryStart = viewId.indexOf('?');
QueryString queryStr = null;
if (queryStart != -1)
{
// parse the query string and add the parameters to internal maps
// delay the creation of ParameterMap and ParameterValuesMap until
// they are needed/called by the client
queryStr = new QueryString(viewId.substring(queryStart + 1), "UTF8");
// TODO: Constants
mInternalRequestParameterMap = new HashMap<String, String>(5);
mInternalRequestParameterValuesMap = new HashMap<String, String[]>(5);
Enumeration<String> list = queryStr.getParameterNames();
while (list.hasMoreElements())
{
String param = list.nextElement();
mInternalRequestParameterMap.put(param, queryStr.getParameter(param));
mInternalRequestParameterValuesMap.put(param, new String[]{queryStr.getParameter(param)});
}
viewId = viewId.substring(0, queryStart);
log("PortletExternalContextImpl.getViewId: special viewId: " + viewId);
}
return viewId;
}
private void mapPathsFromViewId(String viewId, List<String> mappings)
{
if (viewId == null || mappings == null)
{
// Fail safe -- even if we didn't find a servlet mapping set path
// info
// as if we did as this value is all anything generally depends on
mPathInfo = viewId;
return;
}
// The only thing that matters is we use a configured mapping
// So just use the first one
String mapping = mappings.get(0);
if (mapping.startsWith("*"))
{
// we are using suffix mapping
viewId = viewId.substring(0, viewId.lastIndexOf('.'))
+ mapping.substring(mapping.indexOf('.'));
// we are extension mapped
mServletPath = viewId;
mPathInfo = null;
// Workaround Faces RI that has Servlet dependencies if this isn't set
mPortletRequest.setAttribute("javax.servlet.include.servlet_path", mServletPath);
}
else
{
// we are using prefix mapping
int j = mapping.lastIndexOf("/*");
if (j != -1)
{
mServletPath = mapping.substring(0, j);
}
else
{
// is it valid to omit the trailing /*????
mServletPath = mapping;
}
// Fail safe -- even if we didn't find a servlet mapping set path info
// as if we did as this value is all anything generally depends on
mPathInfo = viewId;
}
}
private String extensionMappingFromViewId(String viewId)
{
// first remove/ignore any querystring
int i = viewId.indexOf('?');
if (i != -1)
{
viewId = viewId.substring(0, i);
}
int extLoc = viewId.lastIndexOf('.');
if (extLoc != -1 && extLoc > viewId.lastIndexOf('/'))
{
StringBuilder sb = new StringBuilder("*");
sb.append(viewId.substring(extLoc));
return sb.toString();
}
return null;
}
private String getViewIdFromPath(String url)
{
// Get a string that holds the path after the Context-Path through the
// target
// First remove the query string
int i = url.indexOf("?");
if (i != -1)
{
url = url.substring(0, i);
}
// Now remove up through the ContextPath
String ctxPath = getRequestContextPath();
i = url.indexOf(ctxPath);
if (i != -1)
{
url = url.substring(i + ctxPath.length());
}
String viewId = null;
// Okay now figure out whether this is prefix or suffixed mapped
if (isSuffixedMapped(url, mFacesMappings))
{
viewId = viewIdFromSuffixMapping(
url,
mFacesMappings,
mPortletContext
.getInitParameter(ViewHandler.DEFAULT_SUFFIX_PARAM_NAME));
}
else if (isPrefixedMapped(url, mFacesMappings))
{
viewId = viewIdFromPrefixMapping(url, mFacesMappings);
}
else
{
// Set to what follows the URL
viewId = url;
}
return viewId;
}
private boolean isSuffixedMapped(String url, List<String> mappings)
{
// see if the viewId terminates with an extension
// if non-null value contains *.XXX where XXX is the extension
String ext = extensionMappingFromViewId(url);
return ext != null && mappings.contains(ext);
}
private String viewIdFromSuffixMapping(String url, List<String> mappings, String ctxDefault)
{
// replace extension with the DEFAULT_SUFFIX
if (ctxDefault == null)
{
ctxDefault = ViewHandler.DEFAULT_SUFFIX;
}
int i = url.lastIndexOf(".");
if (ctxDefault != null && i != -1)
{
if (ctxDefault.startsWith("."))
{
url = url.substring(0, i) + ctxDefault;
}
else
{
// shouldn't happen
url = url.substring(0, i) + "." + ctxDefault;
}
}
return url;
}
private boolean isPrefixedMapped(String url, List<String> mappings)
{
for (int i = 0; i < mappings.size(); i++)
{
String prefix = null;
String mapping = mappings.get(i);
if (mapping.startsWith("/"))
{
int j = mapping.lastIndexOf("/*");
if (j != -1)
{
prefix = mapping.substring(0, j);
}
}
if (prefix != null && url.startsWith(prefix))
{
return true;
}
}
return false;
}
private String viewIdFromPrefixMapping(String url, List<String> mappings)
{
for (int i = 0; i < mappings.size(); i++)
{
String prefix = null;
String mapping = mappings.get(i);
if (mapping.startsWith("/"))
{
int j = mapping.lastIndexOf("/*");
if (j != -1)
{
prefix = mapping.substring(0, j);
}
}
if (prefix != null && url.startsWith(prefix))
{
return url.substring(prefix.length());
}
}
return null;
}
private void setFacesMapping()
{
String mapping = null;
String servletPath = this.getRequestServletPath();
// if PathInfo == null we are suffixed mapped
if (this.getRequestPathInfo() == null)
{
mapping = servletPath.substring(servletPath.lastIndexOf('.'));
}
else
{
mapping = servletPath;
}
this.getRequestMap().put("com.sun.faces.INVOCATION_PATH", mapping);
}
private boolean isAbsoluteURL(String url)
{
// Quick check for most common case
if (url.startsWith("http:") || url.startsWith("https:"))
{
return true;
}
// now deal with other possible protocols -- find the potential scheme
int i = url.indexOf(":");
if (i == -1)
{
return false;
}
// Now make sure that the substring before the : is a valid scheme
// i.e. contains no URI reserved characters
String scheme = url.substring(0, i);
if (scheme.indexOf(";") != -1) return false;
else if (scheme.indexOf("/") != -1) return false;
else if (scheme.indexOf("#") != -1) return false;
else if (scheme.indexOf("?") != -1) return false;
else if (scheme.indexOf(" ") != -1) return false;
else return true;
}
private boolean isExternalURL(String url)
{
if (!isAbsoluteURL(url))
{
return false;
}
// otherwise see if the URL contains the ContextPath
// Simple test is that the url doesn't contain
// the CONTEXT_PATH -- though ultimately may want to test
// if we are on the same server
String ctxPath = getRequestContextPath();
int i = url.indexOf(ctxPath);
int j = url.indexOf("?");
if (i != -1 && (j == -1 || i < j))
{
return false;
}
return true;
}
private boolean isDirectLink(String url)
{
int queryStart = url.indexOf('?');
QueryString queryStr = null;
String directLink = null;
if (queryStart != -1)
{
queryStr = new QueryString(url.substring(queryStart + 1), "UTF8");
directLink = queryStr.getParameter(Bridge.DIRECT_LINK);
return Boolean.parseBoolean(directLink);
}
return false;
}
private String removeDirectLink(String url)
{
int queryStart = url.indexOf('?');
QueryString queryStr = null;
String directLink = null;
if (queryStart != -1)
{
queryStr = new QueryString(url.substring(queryStart + 1), "UTF8");
directLink = queryStr.getParameter(Bridge.DIRECT_LINK);
if (!Boolean.parseBoolean(directLink))
{
queryStr.removeParameter(Bridge.DIRECT_LINK);
String query = queryStr.toString();
if (query != null && query.length() != 0)
{
url = url.substring(0, queryStart + 1) + query;
}
}
}
return url;
}
private String getViewIdFromRelativePath(String url)
{
String currentViewId = getViewId();
int i = currentViewId.indexOf('?');
if (i != -1)
{
currentViewId = currentViewId.substring(0, i);
}
String prefixURL = currentViewId.substring(0, currentViewId.lastIndexOf('/'));
if (prefixURL.length() != 0 && !prefixURL.startsWith("/"))
{
return null; // this shouldn't happen, if so just return
}
if (url.startsWith("./"))
{
url = url.substring(2);
}
while (url.startsWith("../") && prefixURL.length() != 0)
{
url = url.substring(3);
prefixURL = prefixURL.substring(0, prefixURL.lastIndexOf('/'));
}
url = prefixURL + "/" + url;
// Now check to see if suffix mapped because we need to do the extension
// mapping
if (isSuffixedMapped(url, mFacesMappings))
{
url = viewIdFromSuffixMapping(
url,
mFacesMappings,
mPortletContext
.getInitParameter(ViewHandler.DEFAULT_SUFFIX_PARAM_NAME));
}
return url;
}
private boolean isRelativePath(String url)
{
// relative path doesn't start with a '/'
if (url.startsWith("/"))
{
return false;
}
// relative path if starts with a '.' or doesn't contain a '/'
if (url.startsWith("."))
{
return true;
}
// neither obviously a relative path or not -- now discount protocol
int i = url.indexOf("://");
if (i == -1)
{
return true;
}
// make sure : isn't in querystring
int j = url.indexOf('?');
if (j != -1 && j < i)
{
return true;
}
return false;
}
}
| |
/**
* Licensed to Apereo under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright ownership. Apereo
* licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use
* this file except in compliance with the License. You may obtain a copy of the License at the
* following location:
*
* <p>http://www.apache.org/licenses/LICENSE-2.0
*
* <p>Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apereo.portal.concurrency.caching;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.reset;
import static org.mockito.Mockito.when;
import org.apereo.portal.url.IPortalRequestUtils;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.mock.web.MockHttpServletRequest;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
/** @author Eric Dalquist */
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration(locations = "classpath:requestCacheAspectTestContext.xml")
public class RequestCacheAspectTest {
@Autowired private IPortalRequestUtils portalRequestUtils;
@Autowired private CacheTestInterface cacheTestInterface;
@Before
public void setup() {
reset(portalRequestUtils);
cacheTestInterface.reset();
}
@Test
public void testNoRequestCache() {
assertEquals(0, cacheTestInterface.testMethodNoCacheCount());
String result = cacheTestInterface.testMethodNoCache("1", false, false);
assertEquals("testMethodNoCache(1)", result);
assertEquals(1, cacheTestInterface.testMethodNoCacheCount());
result = cacheTestInterface.testMethodNoCache("1", true, false);
assertNull(result);
assertEquals(2, cacheTestInterface.testMethodNoCacheCount());
result = cacheTestInterface.testMethodNoCache("1", true, false);
assertNull(result);
assertEquals(3, cacheTestInterface.testMethodNoCacheCount());
try {
result = cacheTestInterface.testMethodNoCache("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(4, cacheTestInterface.testMethodNoCacheCount());
try {
result = cacheTestInterface.testMethodNoCache("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(5, cacheTestInterface.testMethodNoCacheCount());
result = cacheTestInterface.testMethodNoCache("1", false, false);
assertEquals("testMethodNoCache(1)", result);
assertEquals(6, cacheTestInterface.testMethodNoCacheCount());
result = cacheTestInterface.testMethodNoCache("2", false, false);
assertEquals("testMethodNoCache(2)", result);
assertEquals(7, cacheTestInterface.testMethodNoCacheCount());
}
@Test
public void testMethodCacheDefaultNoArgs() {
when(this.portalRequestUtils.getCurrentPortalRequest())
.thenReturn(new MockHttpServletRequest());
assertEquals(0, cacheTestInterface.testMethodNoCacheCountNoArgsCount());
String result = cacheTestInterface.testMethodCacheDefaultNoArgs();
assertEquals("testMethodCacheDefaultNoArgs()", result);
assertEquals(1, cacheTestInterface.testMethodNoCacheCountNoArgsCount());
result = cacheTestInterface.testMethodCacheDefaultNoArgs();
assertEquals("testMethodCacheDefaultNoArgs()", result);
assertEquals(1, cacheTestInterface.testMethodNoCacheCountNoArgsCount());
}
@Test
public void testMethodCacheDefault() {
when(this.portalRequestUtils.getCurrentPortalRequest())
.thenReturn(new MockHttpServletRequest());
assertEquals(0, cacheTestInterface.testMethodCacheDefaultCount());
String result = cacheTestInterface.testMethodCacheDefault("1", false, false);
assertEquals("testMethodCacheDefault(1)", result);
assertEquals(1, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("1", true, false);
assertNull(result);
assertEquals(2, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("1", true, false);
assertNull(result);
assertEquals(3, cacheTestInterface.testMethodCacheDefaultCount());
try {
result = cacheTestInterface.testMethodCacheDefault("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(4, cacheTestInterface.testMethodCacheDefaultCount());
try {
result = cacheTestInterface.testMethodCacheDefault("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(5, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("1", false, false);
assertEquals("testMethodCacheDefault(1)", result);
assertEquals(5, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("2", false, false);
assertEquals("testMethodCacheDefault(2)", result);
assertEquals(6, cacheTestInterface.testMethodCacheDefaultCount());
}
@Test
public void testMethodCacheDefaultNoRequest() {
when(this.portalRequestUtils.getCurrentPortalRequest())
.thenThrow(new IllegalStateException());
assertEquals(0, cacheTestInterface.testMethodCacheDefaultCount());
String result = cacheTestInterface.testMethodCacheDefault("1", false, false);
assertEquals("testMethodCacheDefault(1)", result);
assertEquals(1, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("1", true, false);
assertNull(result);
assertEquals(2, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("1", true, false);
assertNull(result);
assertEquals(3, cacheTestInterface.testMethodCacheDefaultCount());
try {
result = cacheTestInterface.testMethodCacheDefault("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(4, cacheTestInterface.testMethodCacheDefaultCount());
try {
result = cacheTestInterface.testMethodCacheDefault("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(5, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("1", false, false);
assertEquals("testMethodCacheDefault(1)", result);
assertEquals(6, cacheTestInterface.testMethodCacheDefaultCount());
result = cacheTestInterface.testMethodCacheDefault("2", false, false);
assertEquals("testMethodCacheDefault(2)", result);
assertEquals(7, cacheTestInterface.testMethodCacheDefaultCount());
}
@Test
public void testMethodCacheNull() {
when(this.portalRequestUtils.getCurrentPortalRequest())
.thenReturn(new MockHttpServletRequest());
assertEquals(0, cacheTestInterface.testMethodCacheNullCount());
String result = cacheTestInterface.testMethodCacheNull("1", false, false);
assertEquals("testMethodCacheNull(1)", result);
assertEquals(1, cacheTestInterface.testMethodCacheNullCount());
result = cacheTestInterface.testMethodCacheNull("1", true, false);
assertNull(result);
assertEquals(2, cacheTestInterface.testMethodCacheNullCount());
result = cacheTestInterface.testMethodCacheNull("1", true, false);
assertNull(result);
assertEquals(2, cacheTestInterface.testMethodCacheNullCount());
try {
result = cacheTestInterface.testMethodCacheNull("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(3, cacheTestInterface.testMethodCacheNullCount());
try {
result = cacheTestInterface.testMethodCacheNull("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(4, cacheTestInterface.testMethodCacheNullCount());
result = cacheTestInterface.testMethodCacheNull("1", false, false);
assertEquals("testMethodCacheNull(1)", result);
assertEquals(4, cacheTestInterface.testMethodCacheNullCount());
result = cacheTestInterface.testMethodCacheNull("2", false, false);
assertEquals("testMethodCacheNull(2)", result);
assertEquals(5, cacheTestInterface.testMethodCacheNullCount());
}
@Test
public void testMethodCacheThrows() {
when(this.portalRequestUtils.getCurrentPortalRequest())
.thenReturn(new MockHttpServletRequest());
assertEquals(0, cacheTestInterface.testMethodCacheThrowsCount());
String result = cacheTestInterface.testMethodCacheThrows("1", false, false);
assertEquals("testMethodCacheThrows(1)", result);
assertEquals(1, cacheTestInterface.testMethodCacheThrowsCount());
result = cacheTestInterface.testMethodCacheThrows("1", true, false);
assertNull(result);
assertEquals(2, cacheTestInterface.testMethodCacheThrowsCount());
result = cacheTestInterface.testMethodCacheThrows("1", true, false);
assertNull(result);
assertEquals(3, cacheTestInterface.testMethodCacheThrowsCount());
try {
result = cacheTestInterface.testMethodCacheThrows("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(4, cacheTestInterface.testMethodCacheThrowsCount());
try {
result = cacheTestInterface.testMethodCacheThrows("1", false, true);
fail();
} catch (Throwable t) {
//expected
}
assertEquals(4, cacheTestInterface.testMethodCacheThrowsCount());
result = cacheTestInterface.testMethodCacheThrows("1", false, false);
assertEquals("testMethodCacheThrows(1)", result);
assertEquals(4, cacheTestInterface.testMethodCacheThrowsCount());
result = cacheTestInterface.testMethodCacheThrows("2", false, false);
assertEquals("testMethodCacheThrows(2)", result);
assertEquals(5, cacheTestInterface.testMethodCacheThrowsCount());
}
}
| |
package com.tapit.adview;
import android.content.Context;
import android.view.Gravity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.FrameLayout;
import android.widget.RelativeLayout;
import com.tapit.advertising.internal.*;
import com.tapit.advertising.internal.TapItAdActivity;
import com.tapit.core.TapItLog;
public abstract class AdInterstitialBaseView extends AdView implements AdViewCore.OnAdDownload {
@Deprecated
public enum FullscreenAdSize {
AUTOSIZE_AD ( -1, -1),
MEDIUM_RECTANGLE(300, 250);
public final int width;
public final int height;
FullscreenAdSize(int width, int height) {
this.width = width;
this.height = height;
}
}
protected final Context context;
// protected Context callingActivityContext;
protected RelativeLayout interstitialLayout = null;
protected boolean isLoaded = false;
protected OnInterstitialAdDownload interstitialListener = null;
public AdInterstitialBaseView(Context ctx, String zone) {
super(ctx, zone);
context = ctx;
setOnAdDownload(this);
// setOnAdClickListener(this);
super.setUpdateTime(0); // disable add cycling
}
@Deprecated
public final void setAdSize(FullscreenAdSize adSize) {}
public abstract View getInterstitialView(Context ctx);
protected void removeViews() {
RelativeLayout parent = (RelativeLayout)this.getParent();
if(parent != null) {
(parent).removeAllViews();
}
}
// public void closeInterstitial() {
// final AdInterstitialBaseView adView = this;
// handler.post(new Runnable() {
// @Override
// public void run() {
// if(callingActivityContext == null) {
// // interstitial was never displayed; nothing to do here...
// return;
// }
// ((Activity)callingActivityContext).finish();
// if(interstitialListener != null) {
// interstitialListener.didClose(adView);
// }
//
// removeViews();
// }
// });
// }
public boolean isLoaded() {
return isLoaded;
}
public void load() {
update(true);
}
public void showInterstitial() {
if(interstitialListener != null) {
interstitialListener.willOpen(this);
}
AdActivityContentWrapper wrapper = new AdActivityContentWrapper() {
@Override
public View getContentView(TapItAdActivity activity) {
setMraidExpandedActivity(activity);
return AdInterstitialBaseView.this;
}
@Override
public ViewGroup.LayoutParams getContentLayoutParams() {
if (mraid) {
return new FrameLayout.LayoutParams(
ViewGroup.LayoutParams.MATCH_PARENT,
ViewGroup.LayoutParams.MATCH_PARENT,
Gravity.CENTER);
}
else {
return super.getContentLayoutParams();
}
}
@Override
public void done() {
//To change body of implemented methods use File | Settings | File Templates.
willDismissFullScreen();
if(interstitialListener != null) {
interstitialListener.didClose(AdInterstitialBaseView.this);
}
}
};
if (mraid) {
syncMraidState();
fireMraidEvent(Mraid.MraidEvent.VIEWABLECHANGE, "true");
}
TapItAdActivity.startActivity(context, wrapper);
}
/**
* This event is fired before banner download begins.
*/
public void begin(AdViewCore adView) {
isLoaded = false;
if(interstitialListener != null) {
interstitialListener.willLoad(adView);
}
}
/**
* This event is fired after banner content fully downloaded.
*/
public void end(AdViewCore adView) {
isLoaded = true;
if(interstitialListener != null) {
interstitialListener.ready(adView);
}
}
/**
* This event is fired after a user taps the ad.
* @param adView
*/
public void clicked(AdViewCore adView) {
if(interstitialListener != null) {
interstitialListener.clicked(adView);
}
}
/**
* This event is fired just before the app will be sent to the background.
* @param adView
*/
public void willLeaveApplication(AdViewCore adView) {
if(interstitialListener != null) {
interstitialListener.willLeaveApplication(adView);
}
}
/**
* This event is fired after fail to download content.
*/
@Override
public void error(AdViewCore adView, String error) {
if(interstitialListener != null) {
interstitialListener.error(adView, error);
}
}
// @Override
// public void click(String url) {
// if (!url.toLowerCase().startsWith("http://") && !url.toLowerCase().startsWith("https://")){
// if(interstitialListener != null) {
// interstitialListener.willLeaveApplication(this);
// }
// Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(url));
// Activity thisActivity = ((Activity)callingActivityContext);
// thisActivity.startActivityForResult(intent,2);
// }
// else {
// loadUrl(url);
// }
// }
@Override
public void willPresentFullScreen(AdViewCore adView) {
// noop
}
@Override
public void didPresentFullScreen(AdViewCore adView) {
// noop
}
@Override
public void willDismissFullScreen(AdViewCore adView) {
// noop
// if(interstitialListener != null) {
// interstitialListener.didClose(adView);
// }
}
public OnInterstitialAdDownload getOnInterstitialAdDownload() {
return interstitialListener;
}
public void setOnInterstitialAdDownload(OnInterstitialAdDownload listener) {
interstitialListener = listener;
}
/**
* setUpdateTime(Integer) is not supported in AdFullscreenView
*/
@Override
public final void setUpdateTime(int updateTime) {
// not supported for interstitials
}
/**
* called once the interstitial action is full-screened
*/
public void interstitialShowing() {
// no-op
}
/**
* called once the interstitial action is closed
*/
public void interstitialClosing() {
// no-op
}
// @Override
// public boolean onKeyDown(int keyCode, KeyEvent event) {
// TapItLog.d(TAG, "AdInterstitialBaseView.onKeyDown");
// // Close interstitial properly on back button press
// if (keyCode == KeyEvent.KEYCODE_BACK && event.getRepeatCount() == 0) {
// closeInterstitial();
// return true;
// }
// return super.onKeyDown(keyCode, event);
// }
/**
* Allows lookup of resource id's from jars at runtime
* http://stackoverflow.com/questions/1995004/packaging-android-resource-files-within-a-distributable-jar-file/2825174#7117422
* @param packageName the package name of your app. e.g. context.getPackageName()
* @param className e.g. layout, string, drawable
* @param name the name of the resource you're looking for
* @return the id of the resource
*/
public static int getResourceIdByName(String packageName, String className, String name) {
Class<?> r = null;
int id = 0;
try {
r = Class.forName(packageName + ".R");
Class<?>[] classes = r.getClasses();
Class<?> desiredClass = null;
for (int i = 0; i < classes.length; i++) {
if(classes[i].getName().split("\\$")[1].equals(className)) {
desiredClass = classes[i];
break;
}
}
if(desiredClass != null)
id = desiredClass.getField(name).getInt(desiredClass);
} catch (ClassNotFoundException e) {
TapItLog.e(TAG, "An error occurred!", e);
} catch (IllegalArgumentException e) {
TapItLog.e(TAG, "An error occurred", e);
} catch (SecurityException e) {
TapItLog.e(TAG, "An error occurred", e);
} catch (IllegalAccessException e) {
TapItLog.e(TAG, "An error occurred", e);
} catch (NoSuchFieldException e) {
TapItLog.e(TAG, "An error occurred", e);
}
return id;
}
}
| |
// This file was generated by Mendix Modeler.
//
// WARNING: Code you write here will be lost the next time you deploy the project.
package system.proxies;
public class Error
{
private final com.mendix.systemwideinterfaces.core.IMendixObject errorMendixObject;
private final com.mendix.systemwideinterfaces.core.IContext context;
/**
* Internal name of this entity
*/
public static final java.lang.String entityName = "System.Error";
/**
* Enum describing members of this entity
*/
public enum MemberNames
{
ErrorType("ErrorType"),
Message("Message"),
Stacktrace("Stacktrace");
private java.lang.String metaName;
MemberNames(java.lang.String s)
{
metaName = s;
}
@Override
public java.lang.String toString()
{
return metaName;
}
}
public Error(com.mendix.systemwideinterfaces.core.IContext context)
{
this(context, com.mendix.core.Core.instantiate(context, "System.Error"));
}
protected Error(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject errorMendixObject)
{
if (errorMendixObject == null)
throw new java.lang.IllegalArgumentException("The given object cannot be null.");
if (!com.mendix.core.Core.isSubClassOf("System.Error", errorMendixObject.getType()))
throw new java.lang.IllegalArgumentException("The given object is not a System.Error");
this.errorMendixObject = errorMendixObject;
this.context = context;
}
/**
* @deprecated Use 'Error.load(IContext, IMendixIdentifier)' instead.
*/
@Deprecated
public static system.proxies.Error initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
return system.proxies.Error.load(context, mendixIdentifier);
}
/**
* Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called.
* The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.getSudoContext() can be used to obtain sudo access).
*/
public static system.proxies.Error initialize(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixObject mendixObject)
{
if (com.mendix.core.Core.isSubClassOf("System.SoapFault", mendixObject.getType()))
return system.proxies.SoapFault.initialize(context, mendixObject);
return new system.proxies.Error(context, mendixObject);
}
public static system.proxies.Error load(com.mendix.systemwideinterfaces.core.IContext context, com.mendix.systemwideinterfaces.core.IMendixIdentifier mendixIdentifier) throws com.mendix.core.CoreException
{
com.mendix.systemwideinterfaces.core.IMendixObject mendixObject = com.mendix.core.Core.retrieveId(context, mendixIdentifier);
return system.proxies.Error.initialize(context, mendixObject);
}
/**
* Commit the changes made on this proxy object.
*/
public final void commit() throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Commit the changes made on this proxy object using the specified context.
*/
public final void commit(com.mendix.systemwideinterfaces.core.IContext context) throws com.mendix.core.CoreException
{
com.mendix.core.Core.commit(context, getMendixObject());
}
/**
* Delete the object.
*/
public final void delete()
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* Delete the object using the specified context.
*/
public final void delete(com.mendix.systemwideinterfaces.core.IContext context)
{
com.mendix.core.Core.delete(context, getMendixObject());
}
/**
* @return value of ErrorType
*/
public final java.lang.String getErrorType()
{
return getErrorType(getContext());
}
/**
* @param context
* @return value of ErrorType
*/
public final java.lang.String getErrorType(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.ErrorType.toString());
}
/**
* Set value of ErrorType
* @param errortype
*/
public final void setErrorType(java.lang.String errortype)
{
setErrorType(getContext(), errortype);
}
/**
* Set value of ErrorType
* @param context
* @param errortype
*/
public final void setErrorType(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String errortype)
{
getMendixObject().setValue(context, MemberNames.ErrorType.toString(), errortype);
}
/**
* @return value of Message
*/
public final java.lang.String getMessage()
{
return getMessage(getContext());
}
/**
* @param context
* @return value of Message
*/
public final java.lang.String getMessage(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Message.toString());
}
/**
* Set value of Message
* @param message
*/
public final void setMessage(java.lang.String message)
{
setMessage(getContext(), message);
}
/**
* Set value of Message
* @param context
* @param message
*/
public final void setMessage(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String message)
{
getMendixObject().setValue(context, MemberNames.Message.toString(), message);
}
/**
* @return value of Stacktrace
*/
public final java.lang.String getStacktrace()
{
return getStacktrace(getContext());
}
/**
* @param context
* @return value of Stacktrace
*/
public final java.lang.String getStacktrace(com.mendix.systemwideinterfaces.core.IContext context)
{
return (java.lang.String) getMendixObject().getValue(context, MemberNames.Stacktrace.toString());
}
/**
* Set value of Stacktrace
* @param stacktrace
*/
public final void setStacktrace(java.lang.String stacktrace)
{
setStacktrace(getContext(), stacktrace);
}
/**
* Set value of Stacktrace
* @param context
* @param stacktrace
*/
public final void setStacktrace(com.mendix.systemwideinterfaces.core.IContext context, java.lang.String stacktrace)
{
getMendixObject().setValue(context, MemberNames.Stacktrace.toString(), stacktrace);
}
/**
* @return the IMendixObject instance of this proxy for use in the Core interface.
*/
public final com.mendix.systemwideinterfaces.core.IMendixObject getMendixObject()
{
return errorMendixObject;
}
/**
* @return the IContext instance of this proxy, or null if no IContext instance was specified at initialization.
*/
public final com.mendix.systemwideinterfaces.core.IContext getContext()
{
return context;
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (obj != null && getClass().equals(obj.getClass()))
{
final system.proxies.Error that = (system.proxies.Error) obj;
return getMendixObject().equals(that.getMendixObject());
}
return false;
}
@Override
public int hashCode()
{
return getMendixObject().hashCode();
}
/**
* @return String name of this class
*/
public static java.lang.String getType()
{
return "System.Error";
}
/**
* @return String GUID from this object, format: ID_0000000000
* @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object.
*/
@Deprecated
public java.lang.String getGUID()
{
return "ID_" + getMendixObject().getId().toLong();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.percolator;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.DelegatingAnalyzerWrapper;
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.ReaderUtil;
import org.apache.lucene.index.memory.MemoryIndex;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreMode;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.search.Weight;
import org.apache.lucene.search.join.BitSetProducer;
import org.apache.lucene.store.ByteBuffersDirectory;
import org.apache.lucene.store.Directory;
import org.apache.lucene.util.BitDocIdSet;
import org.apache.lucene.util.BitSet;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.SetOnce;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.logging.DeprecationLogger;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
import org.elasticsearch.common.xcontent.NamedXContentRegistry;
import org.elasticsearch.common.xcontent.ParseField;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.core.RestApiVersion;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.LuceneDocument;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.QueryRewriteContext;
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.function.BiConsumer;
import java.util.function.Supplier;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg;
import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg;
import static org.elasticsearch.core.RestApiVersion.equalTo;
import static org.elasticsearch.search.SearchService.ALLOW_EXPENSIVE_QUERIES;
public class PercolateQueryBuilder extends AbstractQueryBuilder<PercolateQueryBuilder> {
private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ParseField.class);
static final String DOCUMENT_TYPE_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [percolate] queries. " +
"The [document_type] should no longer be specified.";
static final String TYPE_DEPRECATION_MESSAGE = "[types removal] Types are deprecated in [percolate] queries. " +
"The [type] of the indexed document should no longer be specified.";
public static final String NAME = "percolate";
static final ParseField DOCUMENT_FIELD = new ParseField("document");
static final ParseField DOCUMENTS_FIELD = new ParseField("documents");
private static final ParseField NAME_FIELD = new ParseField("name");
private static final ParseField QUERY_FIELD = new ParseField("field");
private static final ParseField DOCUMENT_TYPE_FIELD = new ParseField("document_type");
private static final ParseField INDEXED_DOCUMENT_FIELD_TYPE = new ParseField("type");
private static final ParseField INDEXED_DOCUMENT_FIELD_INDEX = new ParseField("index");
private static final ParseField INDEXED_DOCUMENT_FIELD_ID = new ParseField("id");
private static final ParseField INDEXED_DOCUMENT_FIELD_ROUTING = new ParseField("routing");
private static final ParseField INDEXED_DOCUMENT_FIELD_PREFERENCE = new ParseField("preference");
private static final ParseField INDEXED_DOCUMENT_FIELD_VERSION = new ParseField("version");
private final String field;
private String name;
private final List<BytesReference> documents;
private final XContentType documentXContentType;
private final String indexedDocumentIndex;
private final String indexedDocumentId;
private final String indexedDocumentRouting;
private final String indexedDocumentPreference;
private final Long indexedDocumentVersion;
private final Supplier<BytesReference> documentSupplier;
/**
* Creates a percolator query builder instance for percolating a provided document.
*
* @param field The field that contains the percolator query
* @param document The binary blob containing document to percolate
* @param documentXContentType The content type of the binary blob containing the document to percolate
*/
public PercolateQueryBuilder(String field, BytesReference document, XContentType documentXContentType) {
this(field, Collections.singletonList(document), documentXContentType);
}
/**
* Creates a percolator query builder instance for percolating a provided document.
*
* @param field The field that contains the percolator query
* @param documents The binary blob containing document to percolate
* @param documentXContentType The content type of the binary blob containing the document to percolate
*/
public PercolateQueryBuilder(String field, List<BytesReference> documents, XContentType documentXContentType) {
if (field == null) {
throw new IllegalArgumentException("[field] is a required argument");
}
if (documents == null) {
throw new IllegalArgumentException("[document] is a required argument");
}
this.field = field;
this.documents = documents;
this.documentXContentType = Objects.requireNonNull(documentXContentType);
indexedDocumentIndex = null;
indexedDocumentId = null;
indexedDocumentRouting = null;
indexedDocumentPreference = null;
indexedDocumentVersion = null;
this.documentSupplier = null;
}
/**
* Creates a percolator query builder instance for percolating a document in a remote index.
*
* @param field The field that contains the percolator query
* @param indexedDocumentIndex The index containing the document to percolate
* @param indexedDocumentId The id of the document to percolate
* @param indexedDocumentRouting The routing value for the document to percolate
* @param indexedDocumentPreference The preference to use when fetching the document to percolate
* @param indexedDocumentVersion The expected version of the document to percolate
*/
public PercolateQueryBuilder(String field, String indexedDocumentIndex,
String indexedDocumentId, String indexedDocumentRouting,
String indexedDocumentPreference, Long indexedDocumentVersion) {
if (field == null) {
throw new IllegalArgumentException("[field] is a required argument");
}
if (indexedDocumentIndex == null) {
throw new IllegalArgumentException("[index] is a required argument");
}
if (indexedDocumentId == null) {
throw new IllegalArgumentException("[id] is a required argument");
}
this.field = field;
this.indexedDocumentIndex = indexedDocumentIndex;
this.indexedDocumentId = indexedDocumentId;
this.indexedDocumentRouting = indexedDocumentRouting;
this.indexedDocumentPreference = indexedDocumentPreference;
this.indexedDocumentVersion = indexedDocumentVersion;
this.documents = Collections.emptyList();
this.documentXContentType = null;
this.documentSupplier = null;
}
protected PercolateQueryBuilder(String field, Supplier<BytesReference> documentSupplier) {
if (field == null) {
throw new IllegalArgumentException("[field] is a required argument");
}
this.field = field;
this.documents = Collections.emptyList();
this.documentXContentType = null;
this.documentSupplier = documentSupplier;
indexedDocumentIndex = null;
indexedDocumentId = null;
indexedDocumentRouting = null;
indexedDocumentPreference = null;
indexedDocumentVersion = null;
}
/**
* Read from a stream.
*/
PercolateQueryBuilder(StreamInput in) throws IOException {
super(in);
field = in.readString();
name = in.readOptionalString();
if (in.getVersion().before(Version.V_8_0_0)) {
String documentType = in.readOptionalString();
assert documentType == null;
}
indexedDocumentIndex = in.readOptionalString();
if (in.getVersion().before(Version.V_8_0_0)) {
String indexedDocumentType = in.readOptionalString();
assert indexedDocumentType == null;
}
indexedDocumentId = in.readOptionalString();
indexedDocumentRouting = in.readOptionalString();
indexedDocumentPreference = in.readOptionalString();
if (in.readBoolean()) {
indexedDocumentVersion = in.readVLong();
} else {
indexedDocumentVersion = null;
}
documents = in.readList(StreamInput::readBytesReference);
if (documents.isEmpty() == false) {
documentXContentType = in.readEnum(XContentType.class);
} else {
documentXContentType = null;
}
documentSupplier = null;
}
/**
* Sets the name used for identification purposes in <code>_percolator_document_slot</code> response field
* when multiple percolate queries have been specified in the main query.
*/
public PercolateQueryBuilder setName(String name) {
this.name = name;
return this;
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
if (documentSupplier != null) {
throw new IllegalStateException("supplier must be null, can't serialize suppliers, missing a rewriteAndFetch?");
}
out.writeString(field);
out.writeOptionalString(name);
if (out.getVersion().before(Version.V_8_0_0)) {
// In 7x, typeless percolate queries are represented by null documentType values
out.writeOptionalString(null);
}
out.writeOptionalString(indexedDocumentIndex);
if (out.getVersion().before(Version.V_8_0_0)) {
// In 7x, typeless percolate queries are represented by null indexedDocumentType values
out.writeOptionalString(null);
}
out.writeOptionalString(indexedDocumentId);
out.writeOptionalString(indexedDocumentRouting);
out.writeOptionalString(indexedDocumentPreference);
if (indexedDocumentVersion != null) {
out.writeBoolean(true);
out.writeVLong(indexedDocumentVersion);
} else {
out.writeBoolean(false);
}
out.writeVInt(documents.size());
for (BytesReference document : documents) {
out.writeBytesReference(document);
}
if (documents.isEmpty() == false) {
XContentHelper.writeTo(out, documentXContentType);
}
}
@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.field(QUERY_FIELD.getPreferredName(), field);
if (name != null) {
builder.field(NAME_FIELD.getPreferredName(), name);
}
if (documents.isEmpty() == false) {
builder.startArray(DOCUMENTS_FIELD.getPreferredName());
for (BytesReference document : documents) {
try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
LoggingDeprecationHandler.INSTANCE, document)) {
parser.nextToken();
builder.generator().copyCurrentStructure(parser);
}
}
builder.endArray();
}
if (indexedDocumentIndex != null || indexedDocumentId != null) {
if (indexedDocumentIndex != null) {
builder.field(INDEXED_DOCUMENT_FIELD_INDEX.getPreferredName(), indexedDocumentIndex);
}
if (builder.getRestApiVersion() == RestApiVersion.V_7) {
builder.field(INDEXED_DOCUMENT_FIELD_TYPE.getPreferredName(), MapperService.SINGLE_MAPPING_NAME);
}
if (indexedDocumentId != null) {
builder.field(INDEXED_DOCUMENT_FIELD_ID.getPreferredName(), indexedDocumentId);
}
if (indexedDocumentRouting != null) {
builder.field(INDEXED_DOCUMENT_FIELD_ROUTING.getPreferredName(), indexedDocumentRouting);
}
if (indexedDocumentPreference != null) {
builder.field(INDEXED_DOCUMENT_FIELD_PREFERENCE.getPreferredName(), indexedDocumentPreference);
}
if (indexedDocumentVersion != null) {
builder.field(INDEXED_DOCUMENT_FIELD_VERSION.getPreferredName(), indexedDocumentVersion);
}
}
printBoostAndQueryName(builder);
builder.endObject();
}
private static final ConstructingObjectParser<PercolateQueryBuilder, Void> PARSER = new ConstructingObjectParser<>(NAME, args -> {
String field = (String) args[0];
BytesReference document = (BytesReference) args[1];
@SuppressWarnings("unchecked")
List<BytesReference> documents = (List<BytesReference>) args[2];
String indexedDocId = (String) args[3];
String indexedDocIndex = (String) args[4];
String indexDocRouting = (String) args[5];
String indexDocPreference = (String) args[6];
Long indexedDocVersion = (Long) args[7];
if (indexedDocId != null) {
return new PercolateQueryBuilder(field, indexedDocIndex, indexedDocId, indexDocRouting, indexDocPreference, indexedDocVersion);
} else if (document != null) {
return new PercolateQueryBuilder(field, List.of(document), XContentType.JSON);
} else {
return new PercolateQueryBuilder(field, documents, XContentType.JSON);
}
});
static {
PARSER.declareString(constructorArg(), QUERY_FIELD);
PARSER.declareObject(optionalConstructorArg(), (p, c) -> parseDocument(p), DOCUMENT_FIELD);
PARSER.declareObjectArray(optionalConstructorArg(), (p, c) -> parseDocument(p), DOCUMENTS_FIELD);
PARSER.declareString(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_ID);
PARSER.declareString(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_INDEX);
PARSER.declareString(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_ROUTING);
PARSER.declareString(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_PREFERENCE);
PARSER.declareLong(optionalConstructorArg(), INDEXED_DOCUMENT_FIELD_VERSION);
PARSER.declareString(PercolateQueryBuilder::setName, NAME_FIELD);
PARSER.declareString(PercolateQueryBuilder::queryName, AbstractQueryBuilder.NAME_FIELD);
PARSER.declareFloat(PercolateQueryBuilder::boost, BOOST_FIELD);
PARSER.declareRequiredFieldSet(DOCUMENT_FIELD.getPreferredName(),
DOCUMENTS_FIELD.getPreferredName(), INDEXED_DOCUMENT_FIELD_ID.getPreferredName());
PARSER.declareExclusiveFieldSet(DOCUMENT_FIELD.getPreferredName(),
DOCUMENTS_FIELD.getPreferredName(), INDEXED_DOCUMENT_FIELD_ID.getPreferredName());
PARSER.declareString(deprecateAndIgnoreType("percolate_with_type", TYPE_DEPRECATION_MESSAGE),
INDEXED_DOCUMENT_FIELD_TYPE.forRestApiVersion(equalTo(RestApiVersion.V_7)));
PARSER.declareString(deprecateAndIgnoreType("percolate_with_document_type", DOCUMENT_TYPE_DEPRECATION_MESSAGE),
DOCUMENT_TYPE_FIELD.forRestApiVersion(equalTo(RestApiVersion.V_7)));
}
private static BiConsumer<PercolateQueryBuilder, String> deprecateAndIgnoreType(String key, String message) {
return (target, type) -> deprecationLogger.compatibleApiWarning(key, message);
}
private static BytesReference parseDocument(XContentParser parser) throws IOException {
try (XContentBuilder builder = XContentFactory.jsonBuilder()) {
builder.copyCurrentStructure(parser);
builder.flush();
return BytesReference.bytes(builder);
}
}
public static PercolateQueryBuilder fromXContent(XContentParser parser) throws IOException {
return PARSER.parse(parser, null);
}
@Override
protected boolean doEquals(PercolateQueryBuilder other) {
return Objects.equals(field, other.field)
&& Objects.equals(documents, other.documents)
&& Objects.equals(indexedDocumentIndex, other.indexedDocumentIndex)
&& Objects.equals(documentSupplier, other.documentSupplier)
&& Objects.equals(indexedDocumentId, other.indexedDocumentId);
}
@Override
protected int doHashCode() {
return Objects.hash(field, documents, indexedDocumentIndex, indexedDocumentId, documentSupplier);
}
@Override
public String getWriteableName() {
return NAME;
}
@Override
protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) {
if (documents.isEmpty() == false) {
return this;
} else if (documentSupplier != null) {
final BytesReference source = documentSupplier.get();
if (source == null) {
return this; // not executed yet
} else {
PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field,
Collections.singletonList(source), XContentHelper.xContentType(source));
if (name != null) {
rewritten.setName(name);
}
return rewritten;
}
}
GetRequest getRequest = new GetRequest(indexedDocumentIndex, indexedDocumentId);
getRequest.preference("_local");
getRequest.routing(indexedDocumentRouting);
getRequest.preference(indexedDocumentPreference);
if (indexedDocumentVersion != null) {
getRequest.version(indexedDocumentVersion);
}
SetOnce<BytesReference> documentSupplier = new SetOnce<>();
queryRewriteContext.registerAsyncAction((client, listener) -> {
client.get(getRequest, ActionListener.wrap(getResponse -> {
if (getResponse.isExists() == false) {
throw new ResourceNotFoundException(
"indexed document [{}/{}] couldn't be found", indexedDocumentIndex, indexedDocumentId
);
}
if(getResponse.isSourceEmpty()) {
throw new IllegalArgumentException(
"indexed document [" + indexedDocumentIndex + "/" + indexedDocumentId + "] source disabled"
);
}
documentSupplier.set(getResponse.getSourceAsBytesRef());
listener.onResponse(null);
}, listener::onFailure));
});
PercolateQueryBuilder rewritten = new PercolateQueryBuilder(field, documentSupplier::get);
if (name != null) {
rewritten.setName(name);
}
return rewritten;
}
@Override
protected Query doToQuery(SearchExecutionContext context) throws IOException {
if (context.allowExpensiveQueries() == false) {
throw new ElasticsearchException("[percolate] queries cannot be executed when '" +
ALLOW_EXPENSIVE_QUERIES.getKey() + "' is set to false.");
}
// Call nowInMillis() so that this query becomes un-cacheable since we
// can't be sure that it doesn't use now or scripts
context.nowInMillis();
if (indexedDocumentIndex != null || indexedDocumentId != null || documentSupplier != null) {
throw new IllegalStateException("query builder must be rewritten first");
}
if (documents.isEmpty()) {
throw new IllegalStateException("no document to percolate");
}
MappedFieldType fieldType = context.getFieldType(field);
if (fieldType == null) {
throw new QueryShardException(context, "field [" + field + "] does not exist");
}
if ((fieldType instanceof PercolatorFieldMapper.PercolatorFieldType) == false) {
throw new QueryShardException(context, "expected field [" + field +
"] to be of type [percolator], but is of type [" + fieldType.typeName() + "]");
}
final List<ParsedDocument> docs = new ArrayList<>();
for (BytesReference document : documents) {
docs.add(context.parseDocument(new SourceToParse(context.index().getName(), "_temp_id", document, documentXContentType)));
}
// We need this custom analyzer because the default index analyzer is strict and the percolator sometimes isn't when
// 'index.percolator.map_unmapped_fields_as_string' is enabled:
Analyzer analyzer = new DelegatingAnalyzerWrapper(Analyzer.PER_FIELD_REUSE_STRATEGY) {
@Override
protected Analyzer getWrappedAnalyzer(String fieldName) {
return context.getIndexAnalyzer(f -> context.getIndexAnalyzers().getDefaultIndexAnalyzer());
}
};
final IndexSearcher docSearcher;
final boolean excludeNestedDocuments;
if (docs.size() > 1 || docs.get(0).docs().size() > 1) {
assert docs.size() != 1 || context.hasNested();
docSearcher = createMultiDocumentSearcher(analyzer, docs);
excludeNestedDocuments = context.hasNested() && docs.stream()
.map(ParsedDocument::docs)
.mapToInt(List::size)
.anyMatch(size -> size > 1);
} else {
MemoryIndex memoryIndex = MemoryIndex.fromDocument(docs.get(0).rootDoc(), analyzer, true, false);
docSearcher = memoryIndex.createSearcher();
docSearcher.setQueryCache(null);
excludeNestedDocuments = false;
}
PercolatorFieldMapper.PercolatorFieldType pft = (PercolatorFieldMapper.PercolatorFieldType) fieldType;
String name = this.name != null ? this.name : pft.name();
SearchExecutionContext percolateShardContext = wrap(context);
PercolatorFieldMapper.configureContext(percolateShardContext, pft.mapUnmappedFieldsAsText);;
PercolateQuery.QueryStore queryStore = createStore(pft.queryBuilderField,
percolateShardContext);
return pft.percolateQuery(name, queryStore, documents, docSearcher, excludeNestedDocuments, context.indexVersionCreated());
}
public String getField() {
return field;
}
public List<BytesReference> getDocuments() {
return documents;
}
//pkg-private for testing
XContentType getXContentType() {
return documentXContentType;
}
public String getQueryName() {
return name;
}
static IndexSearcher createMultiDocumentSearcher(Analyzer analyzer, Collection<ParsedDocument> docs) {
Directory directory = new ByteBuffersDirectory();
try (IndexWriter indexWriter = new IndexWriter(directory, new IndexWriterConfig(analyzer))) {
// Indexing in order here, so that the user provided order matches with the docid sequencing:
Iterable<LuceneDocument> iterable = () -> docs.stream()
.map(ParsedDocument::docs)
.flatMap(Collection::stream)
.iterator();
indexWriter.addDocuments(iterable);
DirectoryReader directoryReader = DirectoryReader.open(indexWriter);
assert directoryReader.leaves().size() == 1 : "Expected single leaf, but got [" + directoryReader.leaves().size() + "]";
final IndexSearcher slowSearcher = new IndexSearcher(directoryReader);
slowSearcher.setQueryCache(null);
return slowSearcher;
} catch (IOException e) {
throw new ElasticsearchException("Failed to create index for percolator with nested document ", e);
}
}
static PercolateQuery.QueryStore createStore(MappedFieldType queryBuilderFieldType,
SearchExecutionContext context) {
Version indexVersion = context.indexVersionCreated();
NamedWriteableRegistry registry = context.getWriteableRegistry();
return ctx -> {
LeafReader leafReader = ctx.reader();
BinaryDocValues binaryDocValues = leafReader.getBinaryDocValues(queryBuilderFieldType.name());
if (binaryDocValues == null) {
return docId -> null;
}
return docId -> {
if (binaryDocValues.advanceExact(docId)) {
BytesRef qbSource = binaryDocValues.binaryValue();
try (InputStream in = new ByteArrayInputStream(qbSource.bytes, qbSource.offset, qbSource.length)) {
try (StreamInput input = new NamedWriteableAwareStreamInput(
new InputStreamStreamInput(in, qbSource.length), registry)) {
input.setVersion(indexVersion);
// Query builder's content is stored via BinaryFieldMapper, which has a custom encoding
// to encode multiple binary values into a single binary doc values field.
// This is the reason we need to first need to read the number of values and
// then the length of the field value in bytes.
int numValues = input.readVInt();
assert numValues == 1;
int valueLength = input.readVInt();
assert valueLength > 0;
QueryBuilder queryBuilder = input.readNamedWriteable(QueryBuilder.class);
assert in.read() == -1;
queryBuilder = Rewriteable.rewrite(queryBuilder, context);
return queryBuilder.toQuery(context);
}
}
} else {
return null;
}
};
};
}
static SearchExecutionContext wrap(SearchExecutionContext delegate) {
return new SearchExecutionContext(delegate) {
@Override
public IndexReader getIndexReader() {
// The reader that matters in this context is not the reader of the shard but
// the reader of the MemoryIndex. We just use `null` for simplicity.
return null;
}
@Override
public BitSetProducer bitsetFilter(Query query) {
return context -> {
final IndexReaderContext topLevelContext = ReaderUtil.getTopLevelContext(context);
final IndexSearcher searcher = new IndexSearcher(topLevelContext);
searcher.setQueryCache(null);
final Weight weight = searcher.createWeight(searcher.rewrite(query), ScoreMode.COMPLETE_NO_SCORES, 1f);
final Scorer s = weight.scorer(context);
if (s != null) {
return new BitDocIdSet(BitSet.of(s.iterator(), context.reader().maxDoc())).bits();
} else {
return null;
}
};
}
@Override
@SuppressWarnings("unchecked")
public <IFD extends IndexFieldData<?>> IFD getForField(MappedFieldType fieldType) {
IndexFieldData.Builder builder = fieldType.fielddataBuilder(delegate.getFullyQualifiedIndex().getName(),
delegate::lookup);
IndexFieldDataCache cache = new IndexFieldDataCache.None();
CircuitBreakerService circuitBreaker = new NoneCircuitBreakerService();
return (IFD) builder.build(cache, circuitBreaker);
}
};
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.ec2.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Describes a set of permissions for a security group rule.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/ec2-2016-11-15/IpPermission" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class IpPermission implements Serializable, Cloneable {
/**
* <p>
* The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type number. A value of <code>-1</code>
* indicates all ICMP/ICMPv6 types. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*/
private Integer fromPort;
/**
* <p>
* The IP protocol name (<code>tcp</code>, <code>udp</code>, <code>icmp</code>, <code>icmpv6</code>) or number (see
* <a href="http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml">Protocol Numbers</a>).
* </p>
* <p>
* [VPC only] Use <code>-1</code> to specify all protocols. When authorizing security group rules, specifying
* <code>-1</code> or a protocol number other than <code>tcp</code>, <code>udp</code>, <code>icmp</code>, or
* <code>icmpv6</code> allows traffic on all ports, regardless of any port range you specify. For <code>tcp</code>,
* <code>udp</code>, and <code>icmp</code>, you must specify a port range. For <code>icmpv6</code>, the port range
* is optional; if you omit the port range, traffic for all types and codes is allowed.
* </p>
*/
private String ipProtocol;
/**
* <p>
* [VPC only] The IPv6 ranges.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Ipv6Range> ipv6Ranges;
/**
* <p>
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access through
* a VPC endpoint from instances associated with the security group.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<PrefixListId> prefixListIds;
/**
* <p>
* The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. A value of <code>-1</code> indicates
* all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*/
private Integer toPort;
/**
* <p>
* The security group and AWS account ID pairs.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<UserIdGroupPair> userIdGroupPairs;
/**
* <p>
* The IPv4 ranges.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<IpRange> ipv4Ranges;
/**
* <p>
* The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type number. A value of <code>-1</code>
* indicates all ICMP/ICMPv6 types. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*
* @param fromPort
* The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type number. A value of
* <code>-1</code> indicates all ICMP/ICMPv6 types. If you specify all ICMP/ICMPv6 types, you must specify
* all codes.
*/
public void setFromPort(Integer fromPort) {
this.fromPort = fromPort;
}
/**
* <p>
* The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type number. A value of <code>-1</code>
* indicates all ICMP/ICMPv6 types. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*
* @return The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type number. A value of
* <code>-1</code> indicates all ICMP/ICMPv6 types. If you specify all ICMP/ICMPv6 types, you must specify
* all codes.
*/
public Integer getFromPort() {
return this.fromPort;
}
/**
* <p>
* The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type number. A value of <code>-1</code>
* indicates all ICMP/ICMPv6 types. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*
* @param fromPort
* The start of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 type number. A value of
* <code>-1</code> indicates all ICMP/ICMPv6 types. If you specify all ICMP/ICMPv6 types, you must specify
* all codes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withFromPort(Integer fromPort) {
setFromPort(fromPort);
return this;
}
/**
* <p>
* The IP protocol name (<code>tcp</code>, <code>udp</code>, <code>icmp</code>, <code>icmpv6</code>) or number (see
* <a href="http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml">Protocol Numbers</a>).
* </p>
* <p>
* [VPC only] Use <code>-1</code> to specify all protocols. When authorizing security group rules, specifying
* <code>-1</code> or a protocol number other than <code>tcp</code>, <code>udp</code>, <code>icmp</code>, or
* <code>icmpv6</code> allows traffic on all ports, regardless of any port range you specify. For <code>tcp</code>,
* <code>udp</code>, and <code>icmp</code>, you must specify a port range. For <code>icmpv6</code>, the port range
* is optional; if you omit the port range, traffic for all types and codes is allowed.
* </p>
*
* @param ipProtocol
* The IP protocol name (<code>tcp</code>, <code>udp</code>, <code>icmp</code>, <code>icmpv6</code>) or
* number (see <a href="http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml">Protocol
* Numbers</a>).</p>
* <p>
* [VPC only] Use <code>-1</code> to specify all protocols. When authorizing security group rules, specifying
* <code>-1</code> or a protocol number other than <code>tcp</code>, <code>udp</code>, <code>icmp</code>, or
* <code>icmpv6</code> allows traffic on all ports, regardless of any port range you specify. For
* <code>tcp</code>, <code>udp</code>, and <code>icmp</code>, you must specify a port range. For
* <code>icmpv6</code>, the port range is optional; if you omit the port range, traffic for all types and
* codes is allowed.
*/
public void setIpProtocol(String ipProtocol) {
this.ipProtocol = ipProtocol;
}
/**
* <p>
* The IP protocol name (<code>tcp</code>, <code>udp</code>, <code>icmp</code>, <code>icmpv6</code>) or number (see
* <a href="http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml">Protocol Numbers</a>).
* </p>
* <p>
* [VPC only] Use <code>-1</code> to specify all protocols. When authorizing security group rules, specifying
* <code>-1</code> or a protocol number other than <code>tcp</code>, <code>udp</code>, <code>icmp</code>, or
* <code>icmpv6</code> allows traffic on all ports, regardless of any port range you specify. For <code>tcp</code>,
* <code>udp</code>, and <code>icmp</code>, you must specify a port range. For <code>icmpv6</code>, the port range
* is optional; if you omit the port range, traffic for all types and codes is allowed.
* </p>
*
* @return The IP protocol name (<code>tcp</code>, <code>udp</code>, <code>icmp</code>, <code>icmpv6</code>) or
* number (see <a href="http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml">Protocol
* Numbers</a>).</p>
* <p>
* [VPC only] Use <code>-1</code> to specify all protocols. When authorizing security group rules,
* specifying <code>-1</code> or a protocol number other than <code>tcp</code>, <code>udp</code>,
* <code>icmp</code>, or <code>icmpv6</code> allows traffic on all ports, regardless of any port range you
* specify. For <code>tcp</code>, <code>udp</code>, and <code>icmp</code>, you must specify a port range.
* For <code>icmpv6</code>, the port range is optional; if you omit the port range, traffic for all types
* and codes is allowed.
*/
public String getIpProtocol() {
return this.ipProtocol;
}
/**
* <p>
* The IP protocol name (<code>tcp</code>, <code>udp</code>, <code>icmp</code>, <code>icmpv6</code>) or number (see
* <a href="http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml">Protocol Numbers</a>).
* </p>
* <p>
* [VPC only] Use <code>-1</code> to specify all protocols. When authorizing security group rules, specifying
* <code>-1</code> or a protocol number other than <code>tcp</code>, <code>udp</code>, <code>icmp</code>, or
* <code>icmpv6</code> allows traffic on all ports, regardless of any port range you specify. For <code>tcp</code>,
* <code>udp</code>, and <code>icmp</code>, you must specify a port range. For <code>icmpv6</code>, the port range
* is optional; if you omit the port range, traffic for all types and codes is allowed.
* </p>
*
* @param ipProtocol
* The IP protocol name (<code>tcp</code>, <code>udp</code>, <code>icmp</code>, <code>icmpv6</code>) or
* number (see <a href="http://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml">Protocol
* Numbers</a>).</p>
* <p>
* [VPC only] Use <code>-1</code> to specify all protocols. When authorizing security group rules, specifying
* <code>-1</code> or a protocol number other than <code>tcp</code>, <code>udp</code>, <code>icmp</code>, or
* <code>icmpv6</code> allows traffic on all ports, regardless of any port range you specify. For
* <code>tcp</code>, <code>udp</code>, and <code>icmp</code>, you must specify a port range. For
* <code>icmpv6</code>, the port range is optional; if you omit the port range, traffic for all types and
* codes is allowed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withIpProtocol(String ipProtocol) {
setIpProtocol(ipProtocol);
return this;
}
/**
* <p>
* [VPC only] The IPv6 ranges.
* </p>
*
* @return [VPC only] The IPv6 ranges.
*/
public java.util.List<Ipv6Range> getIpv6Ranges() {
if (ipv6Ranges == null) {
ipv6Ranges = new com.amazonaws.internal.SdkInternalList<Ipv6Range>();
}
return ipv6Ranges;
}
/**
* <p>
* [VPC only] The IPv6 ranges.
* </p>
*
* @param ipv6Ranges
* [VPC only] The IPv6 ranges.
*/
public void setIpv6Ranges(java.util.Collection<Ipv6Range> ipv6Ranges) {
if (ipv6Ranges == null) {
this.ipv6Ranges = null;
return;
}
this.ipv6Ranges = new com.amazonaws.internal.SdkInternalList<Ipv6Range>(ipv6Ranges);
}
/**
* <p>
* [VPC only] The IPv6 ranges.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIpv6Ranges(java.util.Collection)} or {@link #withIpv6Ranges(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param ipv6Ranges
* [VPC only] The IPv6 ranges.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withIpv6Ranges(Ipv6Range... ipv6Ranges) {
if (this.ipv6Ranges == null) {
setIpv6Ranges(new com.amazonaws.internal.SdkInternalList<Ipv6Range>(ipv6Ranges.length));
}
for (Ipv6Range ele : ipv6Ranges) {
this.ipv6Ranges.add(ele);
}
return this;
}
/**
* <p>
* [VPC only] The IPv6 ranges.
* </p>
*
* @param ipv6Ranges
* [VPC only] The IPv6 ranges.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withIpv6Ranges(java.util.Collection<Ipv6Range> ipv6Ranges) {
setIpv6Ranges(ipv6Ranges);
return this;
}
/**
* <p>
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access through
* a VPC endpoint from instances associated with the security group.
* </p>
*
* @return [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access
* through a VPC endpoint from instances associated with the security group.
*/
public java.util.List<PrefixListId> getPrefixListIds() {
if (prefixListIds == null) {
prefixListIds = new com.amazonaws.internal.SdkInternalList<PrefixListId>();
}
return prefixListIds;
}
/**
* <p>
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access through
* a VPC endpoint from instances associated with the security group.
* </p>
*
* @param prefixListIds
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access
* through a VPC endpoint from instances associated with the security group.
*/
public void setPrefixListIds(java.util.Collection<PrefixListId> prefixListIds) {
if (prefixListIds == null) {
this.prefixListIds = null;
return;
}
this.prefixListIds = new com.amazonaws.internal.SdkInternalList<PrefixListId>(prefixListIds);
}
/**
* <p>
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access through
* a VPC endpoint from instances associated with the security group.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setPrefixListIds(java.util.Collection)} or {@link #withPrefixListIds(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param prefixListIds
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access
* through a VPC endpoint from instances associated with the security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withPrefixListIds(PrefixListId... prefixListIds) {
if (this.prefixListIds == null) {
setPrefixListIds(new com.amazonaws.internal.SdkInternalList<PrefixListId>(prefixListIds.length));
}
for (PrefixListId ele : prefixListIds) {
this.prefixListIds.add(ele);
}
return this;
}
/**
* <p>
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access through
* a VPC endpoint from instances associated with the security group.
* </p>
*
* @param prefixListIds
* [VPC only] The prefix list IDs for an AWS service. With outbound rules, this is the AWS service to access
* through a VPC endpoint from instances associated with the security group.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withPrefixListIds(java.util.Collection<PrefixListId> prefixListIds) {
setPrefixListIds(prefixListIds);
return this;
}
/**
* <p>
* The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. A value of <code>-1</code> indicates
* all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*
* @param toPort
* The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. A value of <code>-1</code>
* indicates all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6 types, you must specify all codes.
*/
public void setToPort(Integer toPort) {
this.toPort = toPort;
}
/**
* <p>
* The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. A value of <code>-1</code> indicates
* all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*
* @return The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. A value of <code>-1</code>
* indicates all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6 types, you must specify all codes.
*/
public Integer getToPort() {
return this.toPort;
}
/**
* <p>
* The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. A value of <code>-1</code> indicates
* all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* </p>
*
* @param toPort
* The end of port range for the TCP and UDP protocols, or an ICMP/ICMPv6 code. A value of <code>-1</code>
* indicates all ICMP/ICMPv6 codes. If you specify all ICMP/ICMPv6 types, you must specify all codes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withToPort(Integer toPort) {
setToPort(toPort);
return this;
}
/**
* <p>
* The security group and AWS account ID pairs.
* </p>
*
* @return The security group and AWS account ID pairs.
*/
public java.util.List<UserIdGroupPair> getUserIdGroupPairs() {
if (userIdGroupPairs == null) {
userIdGroupPairs = new com.amazonaws.internal.SdkInternalList<UserIdGroupPair>();
}
return userIdGroupPairs;
}
/**
* <p>
* The security group and AWS account ID pairs.
* </p>
*
* @param userIdGroupPairs
* The security group and AWS account ID pairs.
*/
public void setUserIdGroupPairs(java.util.Collection<UserIdGroupPair> userIdGroupPairs) {
if (userIdGroupPairs == null) {
this.userIdGroupPairs = null;
return;
}
this.userIdGroupPairs = new com.amazonaws.internal.SdkInternalList<UserIdGroupPair>(userIdGroupPairs);
}
/**
* <p>
* The security group and AWS account ID pairs.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setUserIdGroupPairs(java.util.Collection)} or {@link #withUserIdGroupPairs(java.util.Collection)} if you
* want to override the existing values.
* </p>
*
* @param userIdGroupPairs
* The security group and AWS account ID pairs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withUserIdGroupPairs(UserIdGroupPair... userIdGroupPairs) {
if (this.userIdGroupPairs == null) {
setUserIdGroupPairs(new com.amazonaws.internal.SdkInternalList<UserIdGroupPair>(userIdGroupPairs.length));
}
for (UserIdGroupPair ele : userIdGroupPairs) {
this.userIdGroupPairs.add(ele);
}
return this;
}
/**
* <p>
* The security group and AWS account ID pairs.
* </p>
*
* @param userIdGroupPairs
* The security group and AWS account ID pairs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withUserIdGroupPairs(java.util.Collection<UserIdGroupPair> userIdGroupPairs) {
setUserIdGroupPairs(userIdGroupPairs);
return this;
}
/**
* <p>
* The IPv4 ranges.
* </p>
*
* @return The IPv4 ranges.
*/
public java.util.List<IpRange> getIpv4Ranges() {
if (ipv4Ranges == null) {
ipv4Ranges = new com.amazonaws.internal.SdkInternalList<IpRange>();
}
return ipv4Ranges;
}
/**
* <p>
* The IPv4 ranges.
* </p>
*
* @param ipv4Ranges
* The IPv4 ranges.
*/
public void setIpv4Ranges(java.util.Collection<IpRange> ipv4Ranges) {
if (ipv4Ranges == null) {
this.ipv4Ranges = null;
return;
}
this.ipv4Ranges = new com.amazonaws.internal.SdkInternalList<IpRange>(ipv4Ranges);
}
/**
* <p>
* The IPv4 ranges.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIpv4Ranges(java.util.Collection)} or {@link #withIpv4Ranges(java.util.Collection)} if you want to
* override the existing values.
* </p>
*
* @param ipv4Ranges
* The IPv4 ranges.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withIpv4Ranges(IpRange... ipv4Ranges) {
if (this.ipv4Ranges == null) {
setIpv4Ranges(new com.amazonaws.internal.SdkInternalList<IpRange>(ipv4Ranges.length));
}
for (IpRange ele : ipv4Ranges) {
this.ipv4Ranges.add(ele);
}
return this;
}
/**
* <p>
* The IPv4 ranges.
* </p>
*
* @param ipv4Ranges
* The IPv4 ranges.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public IpPermission withIpv4Ranges(java.util.Collection<IpRange> ipv4Ranges) {
setIpv4Ranges(ipv4Ranges);
return this;
}
/**
* <p>
* One or more IP ranges.
* </p>
*
* @return One or more IP ranges.
* @deprecated Use {@link #getIpv4Ranges()}.
*/
@Deprecated
public java.util.List<String> getIpRanges() {
if (ipv4Ranges == null) {
ipv4Ranges = new com.amazonaws.internal.SdkInternalList<IpRange>();
}
return newLegacyIpRangeList(ipv4Ranges);
}
/**
* <p>
* One or more IP ranges.
* </p>
*
* @param ipRanges
* One or more IP ranges.
* @deprecated Use {@link #setIpv4Ranges(java.util.Collection)}
*/
@Deprecated
public void setIpRanges(java.util.Collection<String> ipRanges) {
if (ipRanges == null) {
this.ipv4Ranges = null;
return;
}
this.ipv4Ranges = newIpRangeList(ipRanges);
}
/**
* <p>
* One or more IP ranges.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setIpRanges(java.util.Collection)} or {@link #withIpRanges(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param ipRanges
* One or more IP ranges.
* @return Returns a reference to this object so that method calls can be chained together.
* @deprecated Use {@link #withIpv4Ranges(IpRange...)}
*/
@Deprecated
public IpPermission withIpRanges(String... ipRanges) {
if (this.ipv4Ranges == null) {
setIpRanges(new com.amazonaws.internal.SdkInternalList<String>(ipRanges.length));
}
for (String ele : ipRanges) {
this.ipv4Ranges.add(newIpRange(ele));
}
return this;
}
/**
* <p>
* One or more IP ranges.
* </p>
*
* @param ipRanges
* One or more IP ranges.
* @return Returns a reference to this object so that method calls can be chained together.
* @deprecated Use {@link #withIpv4Ranges(java.util.Collection)}
*/
@Deprecated
public IpPermission withIpRanges(java.util.Collection<String> ipRanges) {
setIpRanges(ipRanges);
return this;
}
private IpRange newIpRange(String ipRange) {
return new IpRange().withCidrIp(ipRange);
}
private com.amazonaws.internal.SdkInternalList<IpRange> newIpRangeList(java.util.Collection<String> ipRanges) {
com.amazonaws.internal.SdkInternalList<IpRange> ipRangeList = new com.amazonaws.internal.SdkInternalList<IpRange>(ipRanges.size());
for (String ipRange : ipRanges) {
ipRangeList.add(newIpRange(ipRange));
}
return ipRangeList;
}
private java.util.List<String> newLegacyIpRangeList(java.util.List<IpRange> ipRanges) {
java.util.List<String> ipRangeList = new java.util.ArrayList<String>();
for (IpRange ipRange : ipRanges) {
ipRangeList.add(ipRange.getCidrIp());
}
return ipRangeList;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getFromPort() != null)
sb.append("FromPort: ").append(getFromPort()).append(",");
if (getIpProtocol() != null)
sb.append("IpProtocol: ").append(getIpProtocol()).append(",");
if (getIpv6Ranges() != null)
sb.append("Ipv6Ranges: ").append(getIpv6Ranges()).append(",");
if (getPrefixListIds() != null)
sb.append("PrefixListIds: ").append(getPrefixListIds()).append(",");
if (getToPort() != null)
sb.append("ToPort: ").append(getToPort()).append(",");
if (getUserIdGroupPairs() != null)
sb.append("UserIdGroupPairs: ").append(getUserIdGroupPairs()).append(",");
if (getIpv4Ranges() != null)
sb.append("Ipv4Ranges: ").append(getIpv4Ranges());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof IpPermission == false)
return false;
IpPermission other = (IpPermission) obj;
if (other.getFromPort() == null ^ this.getFromPort() == null)
return false;
if (other.getFromPort() != null && other.getFromPort().equals(this.getFromPort()) == false)
return false;
if (other.getIpProtocol() == null ^ this.getIpProtocol() == null)
return false;
if (other.getIpProtocol() != null && other.getIpProtocol().equals(this.getIpProtocol()) == false)
return false;
if (other.getIpv6Ranges() == null ^ this.getIpv6Ranges() == null)
return false;
if (other.getIpv6Ranges() != null && other.getIpv6Ranges().equals(this.getIpv6Ranges()) == false)
return false;
if (other.getPrefixListIds() == null ^ this.getPrefixListIds() == null)
return false;
if (other.getPrefixListIds() != null && other.getPrefixListIds().equals(this.getPrefixListIds()) == false)
return false;
if (other.getToPort() == null ^ this.getToPort() == null)
return false;
if (other.getToPort() != null && other.getToPort().equals(this.getToPort()) == false)
return false;
if (other.getUserIdGroupPairs() == null ^ this.getUserIdGroupPairs() == null)
return false;
if (other.getUserIdGroupPairs() != null && other.getUserIdGroupPairs().equals(this.getUserIdGroupPairs()) == false)
return false;
if (other.getIpv4Ranges() == null ^ this.getIpv4Ranges() == null)
return false;
if (other.getIpv4Ranges() != null && other.getIpv4Ranges().equals(this.getIpv4Ranges()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getFromPort() == null) ? 0 : getFromPort().hashCode());
hashCode = prime * hashCode + ((getIpProtocol() == null) ? 0 : getIpProtocol().hashCode());
hashCode = prime * hashCode + ((getIpv6Ranges() == null) ? 0 : getIpv6Ranges().hashCode());
hashCode = prime * hashCode + ((getPrefixListIds() == null) ? 0 : getPrefixListIds().hashCode());
hashCode = prime * hashCode + ((getToPort() == null) ? 0 : getToPort().hashCode());
hashCode = prime * hashCode + ((getUserIdGroupPairs() == null) ? 0 : getUserIdGroupPairs().hashCode());
hashCode = prime * hashCode + ((getIpv4Ranges() == null) ? 0 : getIpv4Ranges().hashCode());
return hashCode;
}
@Override
public IpPermission clone() {
try {
return (IpPermission) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package com.supermeetup.supermeetup.fragment;
import android.content.Intent;
import android.databinding.DataBindingUtil;
import android.location.Location;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v7.widget.SearchView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import com.supermeetup.supermeetup.MeetupApp;
import com.supermeetup.supermeetup.R;
import com.supermeetup.supermeetup.activities.HomeActivity;
import com.supermeetup.supermeetup.adapter.CategoryAndEventAdapter;
import com.supermeetup.supermeetup.common.Util;
import com.supermeetup.supermeetup.databinding.FragmentNearbyBinding;
import com.supermeetup.supermeetup.dialog.LoadingDialog;
import com.supermeetup.supermeetup.model.Category;
import com.supermeetup.supermeetup.model.Event;
import com.supermeetup.supermeetup.network.MeetupClient;
import java.util.ArrayList;
import retrofit2.Call;
import retrofit2.Callback;
import retrofit2.Response;
/**
* Created by Irene on 10/17/17.
*/
public class NearbyFragment extends Fragment implements BaseModelListFragment.DataLoadListener<Event> {
private static NearbyFragment mFragment;
private Location mLocation;
private String mQuery = "";
private BaseModelListFragment mBaseModelListFragment;
private CategoryAndEventAdapter categoryAndEventAdapter;
public NearbyFragment() {
}
public static NearbyFragment getInstance(Location location){
if(mFragment == null){
mFragment = new NearbyFragment();
}
mFragment.setLocation(location);
return mFragment;
}
private FragmentNearbyBinding mNearbyBinding;
private LoadingDialog mLoadingDialog;
private MeetupClient meetupClient;
@Override
public View onCreateView(LayoutInflater inflater,
ViewGroup container,
Bundle savedInstanceState) {
super.onCreateView(inflater, container, savedInstanceState);
mNearbyBinding = DataBindingUtil.inflate(inflater, R.layout.fragment_nearby, container, false);
View view = mNearbyBinding.getRoot();
mBaseModelListFragment = BaseModelListFragment.getInstance();
mBaseModelListFragment.placeModelListFragment(getFragmentManager(), R.id.nearby_listview);
mBaseModelListFragment.setDataListener(this);
mLoadingDialog = new LoadingDialog(getActivity());
mNearbyBinding.nearbySearchlayout.searchview.post(new Runnable() {
@Override
public void run() {
mNearbyBinding.nearbySearchlayout.searchview.setQuery(mQuery, false);
}
});
mNearbyBinding.nearbySearchlayout.searchview.clearFocus();
mNearbyBinding.nearbySearchlayout.searchview.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
String sQuery = mNearbyBinding.nearbySearchlayout.searchview.getQuery().toString();
Intent i = new Intent(getActivity(), HomeActivity.class);
i.putExtra(Util.EXTRA_QUERY, sQuery);
getActivity().startActivity(i);
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
meetupClient = MeetupApp.getRestClient(getActivity());
loadCategories();
return view;
}
public void setLocation(Location location){
if(location != null) {
mLocation = location;
}
}
public void setQuery(String query){
mQuery = query;
}
private void loadCategories(){
mLoadingDialog.setMessage(Util.getString(getActivity(), R.string.load_category));
mLoadingDialog.show();
meetupClient.findTopicCategories(new Callback<ArrayList<Category>>() {
@Override
public void onResponse(Call<ArrayList<Category>> call, Response<ArrayList<Category>> response) {
if (response.isSuccessful()) {
ArrayList<Category> categories = response.body();
if(categories != null){
NearbyFragment.this.setCategoryList(categories);
}
loadRecommendEvents(false);
}
}
@Override
public void onFailure(Call<ArrayList<Category>> call, Throwable t) {
// Log error here since request failed
Log.e("nearbyerror", "Find topic categories request error: " + t.toString());
}
}, null, null, null, null);
}
private void loadRecommendEvents(final boolean isRefresh){
mLoadingDialog.setMessage(Util.getString(getActivity(), R.string.load_event));
if (isRefresh) {
mLoadingDialog.show();
}
meetupClient.recommendedEvents(new Callback<ArrayList<Event>>() {
@Override
public void onResponse(Call<ArrayList<Event>> call, Response<ArrayList<Event>> response) {
if (response.isSuccessful()) {
meetupClient.saveNextUrlForRecommendedEvents(response);
ArrayList<Event> events = response.body();
if(events != null){
setEventList(events);
}
}
mLoadingDialog.dismiss();
if (isRefresh) {
mBaseModelListFragment.onRefreshingComplete();
}
}
@Override
public void onFailure(Call<ArrayList<Event>> call, Throwable t) {
// Log error here since request failed
mLoadingDialog.dismiss();
if (isRefresh) {
mBaseModelListFragment.onRefreshingComplete();
}
Log.e("nearbyerror", "Recommended event request error: " + t.toString());
}
}, Util.DEFAULT_FIELDS, mLocation.getLatitude(), mLocation.getLongitude(), null, null, null);
}
private void setCategoryList(ArrayList<Category> categories){
if (categoryAndEventAdapter == null) {
categoryAndEventAdapter = new CategoryAndEventAdapter(getActivity());
}
mBaseModelListFragment.setAdapter(categoryAndEventAdapter);
((CategoryAndEventAdapter) mBaseModelListFragment.getAdapter()).setCategories(categories);
}
private void setEventList(ArrayList<Event> events){
if (categoryAndEventAdapter == null) {
categoryAndEventAdapter = new CategoryAndEventAdapter(getActivity());
}
mBaseModelListFragment.setAdapter(categoryAndEventAdapter);
((CategoryAndEventAdapter) mBaseModelListFragment.getAdapter()).setEvents(events, true);
}
@Override
public void getNewData() {
mBaseModelListFragment.reset();
loadRecommendEvents(true);
}
@Override
public void getMoreData(int offset) {
mLoadingDialog.setMessage(Util.getString(getActivity(), R.string.load_data));
mLoadingDialog.show();
meetupClient.getNextUrlForListEvents(new Callback<ArrayList<Event>>() {
@Override
public void onResponse(Call<ArrayList<Event>> call, Response<ArrayList<Event>> response) {
if (response.isSuccessful()) {
ArrayList<Event> events = response.body();
if(events != null){
mBaseModelListFragment.addModels(events);
}
meetupClient.saveNextUrlForRecommendedEvents(response);
}
mLoadingDialog.dismiss();
}
@Override
public void onFailure(Call<ArrayList<Event>> call, Throwable t) {
// Log error here since request failed
mLoadingDialog.dismiss();
Log.e("nearerror", "Recommended event request error: " + t.toString());
}
} );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.delegate;
import static com.google.common.base.Preconditions.checkNotNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_READ_COUNTER;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_READ_DURATION;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_WRITE_COUNTER;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_WRITE_DURATION;
import static org.apache.jackrabbit.oak.commons.PathUtils.denotesRoot;
import java.io.IOException;
import java.util.Iterator;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.jcr.ItemExistsException;
import javax.jcr.PathNotFoundException;
import javax.jcr.RepositoryException;
import javax.jcr.nodetype.ConstraintViolationException;
import com.google.common.collect.ImmutableMap;
import org.apache.jackrabbit.oak.api.AuthInfo;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.ContentSession;
import org.apache.jackrabbit.oak.api.QueryEngine;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.jcr.observation.EventFactory;
import org.apache.jackrabbit.oak.jcr.session.RefreshStrategy;
import org.apache.jackrabbit.oak.jcr.session.RefreshStrategy.Composite;
import org.apache.jackrabbit.oak.jcr.session.SessionNamespaces;
import org.apache.jackrabbit.oak.jcr.session.SessionStats;
import org.apache.jackrabbit.oak.jcr.session.SessionStats.Counters;
import org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation;
import org.apache.jackrabbit.oak.plugins.identifier.IdentifierManager;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider;
import org.apache.jackrabbit.oak.stats.Clock;
import org.apache.jackrabbit.oak.stats.StatisticManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO document
*/
public class SessionDelegate {
static final Logger log = LoggerFactory.getLogger(SessionDelegate.class);
static final Logger auditLogger = LoggerFactory.getLogger("org.apache.jackrabbit.oak.audit");
static final Logger readOperationLogger = LoggerFactory.getLogger("org.apache.jackrabbit.oak.jcr.operations.reads");
static final Logger writeOperationLogger = LoggerFactory.getLogger("org.apache.jackrabbit.oak.jcr.operations.writes");
private final ContentSession contentSession;
private final SecurityProvider securityProvider;
private final RefreshAtNextAccess refreshAtNextAccess = new RefreshAtNextAccess();
private final SaveCountRefresh saveCountRefresh;
private final RefreshStrategy refreshStrategy;
private final Root root;
private final IdentifierManager idManager;
private final SessionStats sessionStats;
private final Clock clock;
// access time stamps and counters for statistics about this session
private final Counters sessionCounters;
// repository-wide counters for statistics about all sessions
private final AtomicLong readCounter;
private final AtomicLong readDuration;
private final AtomicLong writeCounter;
private final AtomicLong writeDuration;
private boolean isAlive = true;
private int sessionOpCount;
private long updateCount = 0;
private String userData = null;
private PermissionProvider permissionProvider;
/**
* The lock used to guarantee synchronized execution of repository
* operations. An explicit lock is used instead of normal Java
* synchronization in order to be able to log attempts to concurrently
* use a session.
*/
private final WarningLock lock = new WarningLock(new ReentrantLock());
private final SessionNamespaces namespaces;
/**
* Create a new session delegate for a {@code ContentSession}. The refresh behaviour of the
* session is governed by the value of the {@code refreshInterval} argument: if the session
* has been idle longer than that value, an implicit refresh will take place.
* In addition a refresh can always be scheduled from the next access by an explicit call
* to {@link #refreshAtNextAccess()}. This is typically done from within the observation event
* dispatcher in order.
*
* @param contentSession the content session
* @param securityProvider the security provider
* @param refreshStrategy the refresh strategy used for auto refreshing this session
* @param statisticManager the statistics manager for tracking session operations
*/
public SessionDelegate(
@Nonnull ContentSession contentSession,
@Nonnull SecurityProvider securityProvider,
@Nonnull RefreshStrategy refreshStrategy,
@Nonnull ThreadLocal<Long> threadSaveCount,
@Nonnull StatisticManager statisticManager,
@Nonnull Clock clock) {
this.contentSession = checkNotNull(contentSession);
this.securityProvider = checkNotNull(securityProvider);
this.root = contentSession.getLatestRoot();
this.namespaces = new SessionNamespaces(this.root);
this.saveCountRefresh = new SaveCountRefresh(checkNotNull(threadSaveCount));
this.refreshStrategy = Composite.create(checkNotNull(refreshStrategy),
refreshAtNextAccess, saveCountRefresh, new RefreshNamespaces(
namespaces));
this.idManager = new IdentifierManager(root);
this.clock = checkNotNull(clock);
checkNotNull(statisticManager);
this.sessionStats = new SessionStats(contentSession.toString(),
contentSession.getAuthInfo(), clock, refreshStrategy, this, statisticManager);
this.sessionCounters = sessionStats.getCounters();
readCounter = statisticManager.getCounter(SESSION_READ_COUNTER);
readDuration = statisticManager.getCounter(SESSION_READ_DURATION);
writeCounter = statisticManager.getCounter(SESSION_WRITE_COUNTER);
writeDuration = statisticManager.getCounter(SESSION_WRITE_DURATION);
}
@Nonnull
public SessionStats getSessionStats() {
return sessionStats;
}
public void refreshAtNextAccess() {
lock.lock();
try {
refreshAtNextAccess.refreshAtNextAccess(true);
} finally {
lock.unlock();
}
}
/**
* Wrap the passed {@code iterator} in an iterator that synchronizes
* all access to the underlying session.
* @param iterator iterator to synchronized
* @param <T>
* @return synchronized iterator
*/
public <T> Iterator<T> sync(Iterator<T> iterator) {
return new SynchronizedIterator<T>(iterator, lock);
}
/**
* Performs the passed {@code SessionOperation} in a safe execution context. This
* context ensures that the session is refreshed if necessary and that refreshing
* occurs before the session operation is performed and the refreshing is done only
* once.
*
* @param sessionOperation the {@code SessionOperation} to perform
* @param <T> return type of {@code sessionOperation}
* @return the result of {@code sessionOperation.perform()}
* @throws RepositoryException
* @see #getRoot()
*/
@Nonnull
public <T> T perform(@Nonnull SessionOperation<T> sessionOperation) throws RepositoryException {
long t0 = clock.getTime();
// Acquire the exclusive lock for accessing session internals.
// No other session should be holding the lock, so we log a
// message to let the user know of such cases.
lock.lock(sessionOperation);
try {
prePerform(sessionOperation, t0);
try {
sessionOpCount++;
T result = sessionOperation.perform();
logOperationDetails(contentSession, sessionOperation);
return result;
} finally {
postPerform(sessionOperation, t0);
}
} finally {
lock.unlock();
}
}
/**
* Same as {@link #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)}
* but with the option to return {@code null}; thus calling
* {@link org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation#performNullable()}
*
* @param sessionOperation the {@code SessionOperation} to perform
* @param <T> return type of {@code sessionOperation}
* @return the result of {@code sessionOperation.performNullable()}, which
* might also be {@code null}.
* @throws RepositoryException
* @see #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)
*/
@Nullable
public <T> T performNullable(@Nonnull SessionOperation<T> sessionOperation) throws RepositoryException {
long t0 = clock.getTime();
// Acquire the exclusive lock for accessing session internals.
// No other session should be holding the lock, so we log a
// message to let the user know of such cases.
lock.lock(sessionOperation);
try {
prePerform(sessionOperation, t0);
try {
sessionOpCount++;
T result = sessionOperation.performNullable();
logOperationDetails(contentSession, sessionOperation);
return result;
} finally {
postPerform(sessionOperation, t0);
}
} finally {
lock.unlock();
}
}
/**
* Same as {@link #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)}
* for calls that don't expect any return value; thus calling
* {@link org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation#performVoid()}.
*
* @param sessionOperation the {@code SessionOperation} to perform.
* @throws RepositoryException
* @see #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)
*/
public void performVoid(SessionOperation<Void> sessionOperation) throws RepositoryException {
long t0 = clock.getTime();
// Acquire the exclusive lock for accessing session internals.
// No other session should be holding the lock, so we log a
// message to let the user know of such cases.
lock.lock(sessionOperation);
try {
prePerform(sessionOperation, t0);
try {
sessionOpCount++;
sessionOperation.performVoid();
logOperationDetails(contentSession, sessionOperation);
} finally {
postPerform(sessionOperation, t0);
}
} finally {
lock.unlock();
}
}
/**
* Same as {@link #perform(SessionOperation)} unless this method expects
* {@link SessionOperation#perform} <em>not</em> to throw a {@code RepositoryException}.
* Such exceptions will be wrapped into a {@code RuntimeException} and rethrown as they
* are considered an internal error.
*
* @param sessionOperation the {@code SessionOperation} to perform
* @param <T> return type of {@code sessionOperation}
* @return the result of {@code sessionOperation.perform()}
* @see #getRoot()
*/
public <T> T safePerform(SessionOperation<T> sessionOperation) {
try {
return perform(sessionOperation);
} catch (RepositoryException e) {
throw new RuntimeException("Unexpected exception thrown by operation " + sessionOperation, e);
}
}
@Nonnull
public ContentSession getContentSession() {
return contentSession;
}
/**
* Determine whether this session is alive and has not been logged
* out or become stale by other means.
* @return {@code true} if this session is alive, {@code false} otherwise.
*/
public boolean isAlive() {
return isAlive;
}
/**
* Check that this session is alive.
* @throws RepositoryException if this session is not alive
* @see #isAlive()
*/
public void checkAlive() throws RepositoryException {
if (!isAlive()) {
throw new RepositoryException("This session has been closed.");
}
}
/**
* @return session update counter
*/
public long getUpdateCount() {
return updateCount;
}
public void setUserData(String userData) {
this.userData = userData;
}
private void commit(Root root, String path) throws CommitFailedException {
ImmutableMap.Builder<String, Object> info = ImmutableMap.builder();
if (path != null && !denotesRoot(path)) {
info.put(Root.COMMIT_PATH, path);
}
if (userData != null) {
info.put(EventFactory.USER_DATA, userData);
}
root.commit(info.build());
if (permissionProvider != null) {
permissionProvider.refresh();
}
}
/**
* Commits the changes currently in the transient space.
* TODO: Consolidate with save().
*
* @throws CommitFailedException if the commit failed
*/
public void commit() throws CommitFailedException {
commit(root, null);
}
/**
* Commits the changes applied to the given root. The user data (if any)
* currently attached to this session is passed as the commit message.
* Used both for normal save() calls and for the various
* direct-to-workspace operations.
*
* @throws CommitFailedException if the commit failed
*/
public void commit(Root root) throws CommitFailedException {
commit(root, null);
}
public void checkProtectedNode(String path) throws RepositoryException {
NodeDelegate node = getNode(path);
if (node == null) {
throw new PathNotFoundException(
"Node " + path + " does not exist.");
} else if (node.isProtected()) {
throw new ConstraintViolationException(
"Node " + path + " is protected.");
}
}
@Nonnull
public AuthInfo getAuthInfo() {
return contentSession.getAuthInfo();
}
public void logout() {
if (!isAlive) {
// ignore
return;
}
isAlive = false;
// TODO
sessionStats.close();
try {
contentSession.close();
} catch (IOException e) {
log.warn("Error while closing connection", e);
}
}
@Nonnull
public IdentifierManager getIdManager() {
return idManager;
}
@CheckForNull
public NodeDelegate getRootNode() {
return getNode("/");
}
/**
* {@code NodeDelegate} at the given path
* @param path Oak path
* @return The {@code NodeDelegate} at {@code path} or {@code null} if
* none exists or not accessible.
*/
@CheckForNull
public NodeDelegate getNode(String path) {
Tree tree = root.getTree(path);
return tree.exists() ? new NodeDelegate(this, tree) : null;
}
/**
* Returns the node or property delegate at the given path.
*
* @param path Oak path
* @return node or property delegate, or {@code null} if none exists
*/
@CheckForNull
public ItemDelegate getItem(String path) {
String name = PathUtils.getName(path);
if (name.isEmpty()) {
return getRootNode();
} else {
Tree parent = root.getTree(PathUtils.getParentPath(path));
Tree child = parent.getChild(name);
if (child.exists()) {
return new NodeDelegate(this, child);
} else if (parent.hasProperty(name)) {
return new PropertyDelegate(this, parent, name);
} else {
return null;
}
}
}
@CheckForNull
public NodeDelegate getNodeByIdentifier(String id) {
Tree tree = idManager.getTree(id);
return (tree == null || !tree.exists()) ? null : new NodeDelegate(this, tree);
}
/**
* {@code PropertyDelegate} at the given path
* @param path Oak path
* @return The {@code PropertyDelegate} at {@code path} or {@code null} if
* none exists or not accessible.
*/
@CheckForNull
public PropertyDelegate getProperty(String path) {
Tree parent = root.getTree(PathUtils.getParentPath(path));
String name = PathUtils.getName(path);
return parent.hasProperty(name) ? new PropertyDelegate(this, parent,
name) : null;
}
public boolean hasPendingChanges() {
return root.hasPendingChanges();
}
/**
* Save the subtree rooted at the given {@code path}, or the entire
* transient space if given the root path or {@code null}.
* <p>
* This implementation only performs the save if the subtree rooted
* at {@code path} contains all transient changes and will throw an
* {@link javax.jcr.UnsupportedRepositoryOperationException} otherwise.
*
* @param path
* @throws RepositoryException
*/
public void save(String path) throws RepositoryException {
sessionCounters.saveTime = clock.getTime();
sessionCounters.saveCount++;
try {
commit(root, path);
} catch (CommitFailedException e) {
RepositoryException repositoryException = newRepositoryException(e);
sessionStats.failedSave(repositoryException);
throw repositoryException;
}
}
public void refresh(boolean keepChanges) {
sessionCounters.refreshTime = clock.getTime();
sessionCounters.refreshCount++;
if (keepChanges && hasPendingChanges()) {
root.rebase();
} else {
root.refresh();
}
if (permissionProvider != null) {
permissionProvider.refresh();
}
}
//----------------------------------------------------------< Workspace >---
@Nonnull
public String getWorkspaceName() {
return contentSession.getWorkspaceName();
}
/**
* Move a node
*
* @param srcPath oak path to the source node to copy
* @param destPath oak path to the destination
* @param transientOp whether or not to perform the move in transient space
* @throws RepositoryException
*/
public void move(String srcPath, String destPath, boolean transientOp)
throws RepositoryException {
Root moveRoot = transientOp ? root : contentSession.getLatestRoot();
// check destination
Tree dest = moveRoot.getTree(destPath);
if (dest.exists()) {
throw new ItemExistsException(destPath);
}
// check parent of destination
String destParentPath = PathUtils.getParentPath(destPath);
Tree destParent = moveRoot.getTree(destParentPath);
if (!destParent.exists()) {
throw new PathNotFoundException(PathUtils.getParentPath(destPath));
}
// check source exists
Tree src = moveRoot.getTree(srcPath);
if (!src.exists()) {
throw new PathNotFoundException(srcPath);
}
try {
if (!moveRoot.move(srcPath, destPath)) {
throw new RepositoryException("Cannot move node at " + srcPath + " to " + destPath);
}
if (!transientOp) {
sessionCounters.saveTime = clock.getTime();
sessionCounters.saveCount++;
commit(moveRoot);
refresh(true);
}
} catch (CommitFailedException e) {
throw newRepositoryException(e);
}
}
@Nonnull
public QueryEngine getQueryEngine() {
return root.getQueryEngine();
}
@Nonnull
public PermissionProvider getPermissionProvider() {
if (permissionProvider == null) {
permissionProvider = checkNotNull(securityProvider)
.getConfiguration(AuthorizationConfiguration.class)
.getPermissionProvider(root, getWorkspaceName(), getAuthInfo().getPrincipals());
}
return permissionProvider;
}
/**
* The current {@code Root} instance this session delegate instance operates on.
* To ensure the returned root reflects the correct repository revision access
* should only be done from within a {@link SessionOperation} closure through
* {@link #perform(SessionOperation)}.
*
* @return current root
*/
@Nonnull
public Root getRoot() {
return root;
}
@Override
public String toString() {
return contentSession.toString();
}
//-----------------------------------------------------------< internal >---
private void prePerform(@Nonnull SessionOperation<?> op, long t0) throws RepositoryException {
if (sessionOpCount == 0) {
// Refresh and precondition checks only for non re-entrant
// session operations. Don't refresh if this operation is a
// refresh operation itself or a save operation, which does an
// implicit refresh, or logout for obvious reasons.
if (!op.isRefresh() && !op.isSave() && !op.isLogout() &&
refreshStrategy.needsRefresh(SECONDS.convert(t0 - sessionCounters.accessTime, MILLISECONDS))) {
refresh(true);
refreshStrategy.refreshed();
updateCount++;
}
op.checkPreconditions();
}
}
private void postPerform(@Nonnull SessionOperation<?> op, long t0) {
sessionCounters.accessTime = t0;
long dt = NANOSECONDS.convert(clock.getTime() - t0, MILLISECONDS);
sessionOpCount--;
if (op.isUpdate()) {
sessionCounters.writeTime = t0;
sessionCounters.writeCount++;
writeCounter.incrementAndGet();
writeDuration.addAndGet(dt);
updateCount++;
} else {
sessionCounters.readTime = t0;
sessionCounters.readCount++;
readCounter.incrementAndGet();
readDuration.addAndGet(dt);
}
if (op.isSave()) {
refreshAtNextAccess.refreshAtNextAccess(false);
// Force refreshing on access through other sessions on the same thread
saveCountRefresh.forceRefresh();
} else if (op.isRefresh()) {
refreshAtNextAccess.refreshAtNextAccess(false);
saveCountRefresh.refreshed();
}
}
private static <T> void logOperationDetails(ContentSession session, SessionOperation<T> ops) {
if (readOperationLogger.isTraceEnabled()
|| writeOperationLogger.isTraceEnabled()
|| auditLogger.isDebugEnabled()) {
Logger log = ops.isUpdate() ? writeOperationLogger : readOperationLogger;
log.trace("[{}] {}", session, ops);
//For a logout operation the auth info is not accessible
if (!ops.isLogout() && !ops.isRefresh() && !ops.isSave() && ops.isUpdate()) {
auditLogger.debug("[{}] [{}] {}", session.getAuthInfo().getUserID(), session, ops);
}
}
}
/**
* Wraps the given {@link CommitFailedException} instance using the
* appropriate {@link RepositoryException} subclass based on the
* {@link CommitFailedException#getType() type} of the given exception.
*
* @param exception typed commit failure exception
* @return matching repository exception
*/
private static RepositoryException newRepositoryException(CommitFailedException exception) {
return exception.asRepositoryException();
}
//------------------------------------------------------------< SynchronizedIterator >---
/**
* This iterator delegates to a backing iterator and synchronises
* all calls wrt. the lock passed to its constructor.
* @param <T>
*/
private static final class SynchronizedIterator<T> implements Iterator<T> {
private final Iterator<T> iterator;
private final WarningLock lock;
SynchronizedIterator(Iterator<T> iterator, WarningLock lock) {
this.iterator = iterator;
this.lock = lock;
}
@Override
public boolean hasNext() {
lock.lock(false, "hasNext()");
try {
return iterator.hasNext();
} finally {
lock.unlock();
}
}
@Override
public T next() {
lock.lock(false, "next()");
try {
return iterator.next();
} finally {
lock.unlock();
}
}
@Override
public void remove() {
lock.lock(true, "remove()");
try {
iterator.remove();
} finally {
lock.unlock();
}
}
}
/**
* A {@link Lock} implementation that has additional methods
* for acquiring the lock, which log a warning if the lock is
* already held by another thread and was also acquired through
* such a method.
*/
private static final class WarningLock implements Lock {
private final Lock lock;
// All access to members only *after* the lock has been acquired
private boolean isUpdate;
private Exception holderTrace;
private String holderThread;
private WarningLock(Lock lock) {
this.lock = lock;
}
public void lock(boolean isUpdate, String opName) {
if (!lock.tryLock()) {
// Acquire the lock before logging the warnings. As otherwise race conditions
// on the involved fields might lead to wrong warnings.
lock.lock();
if (holderThread != null) {
if (this.isUpdate) {
warn(log, "Attempted to perform " + opName + " while thread " + holderThread +
" was concurrently writing to this session. Blocked until the " +
"other thread finished using this session. Please review your code " +
"to avoid concurrent use of a session.", holderTrace);
} else if (log.isDebugEnabled()) {
log.debug("Attempted to perform " + opName + " while thread " + holderThread +
" was concurrently reading from this session. Blocked until the " +
"other thread finished using this session. Please review your code " +
"to avoid concurrent use of a session.", holderTrace);
}
}
}
this.isUpdate = isUpdate;
if (log.isDebugEnabled()) {
holderTrace = new Exception("Stack trace of concurrent access to session");
}
holderThread = Thread.currentThread().getName();
}
private static void warn(Logger logger, String message, Exception stackTrace) {
if (stackTrace != null) {
logger.warn(message, stackTrace);
} else {
logger. warn(message);
}
}
public void lock(SessionOperation<?> sessionOperation) {
lock(sessionOperation.isUpdate(), sessionOperation.toString());
}
@Override
public void lock() {
lock.lock();
holderTrace = null;
holderThread = null;
}
@Override
public void lockInterruptibly() throws InterruptedException {
lock.lockInterruptibly();
holderTrace = null;
holderThread = null;
}
@Override
public boolean tryLock() {
if (lock.tryLock()) {
holderTrace = null;
holderThread = null;
return true;
} else {
return false;
}
}
@Override
public boolean tryLock(long time, @Nonnull TimeUnit unit) throws InterruptedException {
if (lock.tryLock(time, unit)) {
holderTrace = null;
holderThread = null;
return true;
} else {
return false;
}
}
@Override
public void unlock() {
lock.unlock();
}
@Nonnull
@Override
public Condition newCondition() {
return lock.newCondition();
}
}
private static class RefreshAtNextAccess implements RefreshStrategy {
private boolean refreshAtNextAccess;
public void refreshAtNextAccess(boolean refreshAtNextAccess) {
this.refreshAtNextAccess = refreshAtNextAccess;
}
@Override
public boolean needsRefresh(long secondsSinceLastAccess) {
return refreshAtNextAccess;
}
@Override
public void refreshed() {
refreshAtNextAccess = false;
}
@Override
public String toString() {
return "Refresh on observation event";
}
}
private static class SaveCountRefresh implements RefreshStrategy {
/**
* The repository-wide {@link ThreadLocal} that keeps track of the number
* of saves performed in each thread.
*/
private final ThreadLocal<Long> threadSaveCount;
/**
* Local copy of the {@link #threadSaveCount} for the current thread.
* If the repository-wide counter differs from our local copy, then
* some other session would have done a commit or this session is
* being accessed from some other thread. In either case it's best to
* refresh this session to avoid unexpected behaviour.
*/
private long sessionSaveCount;
public SaveCountRefresh(ThreadLocal<Long> threadSaveCount) {
this.threadSaveCount = threadSaveCount;
this.sessionSaveCount = getThreadSaveCount();
}
public void forceRefresh() {
threadSaveCount.set(sessionSaveCount = (getThreadSaveCount() + 1));
}
@Override
public boolean needsRefresh(long secondsSinceLastAccess) {
return sessionSaveCount != getThreadSaveCount();
}
@Override
public void refreshed() {
sessionSaveCount = getThreadSaveCount();
}
private long getThreadSaveCount() {
Long c = threadSaveCount.get();
return c == null ? 0 : c;
}
@Override
public String toString() {
return "Refresh after a save on the same thread from a different session";
}
}
/**
* Read-only RefreshStrategy responsible for notifying the SessionNamespaces
* instance that a refresh was called
*/
private static class RefreshNamespaces implements RefreshStrategy {
private final SessionNamespaces namespaces;
public RefreshNamespaces(SessionNamespaces namespaces) {
this.namespaces = namespaces;
}
@Override
public boolean needsRefresh(long secondsSinceLastAccess) {
return false;
}
@Override
public void refreshed() {
this.namespaces.onSessionRefresh();
}
}
public SessionNamespaces getNamespaces() {
return namespaces;
}
}
| |
/*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.javascript.jscomp.lint.CheckArrayWithGoogObject;
import com.google.javascript.jscomp.lint.CheckDuplicateCase;
import com.google.javascript.jscomp.lint.CheckEmptyStatements;
import com.google.javascript.jscomp.lint.CheckEnums;
import com.google.javascript.jscomp.lint.CheckInterfaces;
import com.google.javascript.jscomp.lint.CheckJSDocStyle;
import com.google.javascript.jscomp.lint.CheckMissingSemicolon;
import com.google.javascript.jscomp.lint.CheckNullableReturn;
import com.google.javascript.jscomp.lint.CheckPrimitiveAsObject;
import com.google.javascript.jscomp.lint.CheckPrototypeProperties;
import com.google.javascript.jscomp.lint.CheckRequiresAndProvidesSorted;
import com.google.javascript.jscomp.lint.CheckUnusedLabels;
import com.google.javascript.jscomp.lint.CheckUselessBlocks;
import com.google.javascript.jscomp.newtypes.JSTypeCreatorFromJSDoc;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
* Named groups of DiagnosticTypes exposed by Compiler.
* @author nicksantos@google.com (Nick Santos)
*/
public class DiagnosticGroups {
static final DiagnosticType UNUSED =
DiagnosticType.warning("JSC_UNUSED", "{0}");
public static final Set<String> wildcardExcludedGroups = ImmutableSet.of(
"reportUnknownTypes", "analyzerChecks", "oldReportUnknownTypes",
"newCheckTypes", "newCheckTypesCompatibility", "newCheckTypesExtraChecks");
public DiagnosticGroups() {}
private static final Map<String, DiagnosticGroup> groupsByName =
new HashMap<>();
static DiagnosticGroup registerDeprecatedGroup(String name) {
return registerGroup(name, new DiagnosticGroup(name, UNUSED));
}
static DiagnosticGroup registerGroup(String name,
DiagnosticGroup group) {
groupsByName.put(name, group);
return group;
}
static DiagnosticGroup registerGroup(String name,
DiagnosticType ... types) {
DiagnosticGroup group = new DiagnosticGroup(name, types);
groupsByName.put(name, group);
return group;
}
static DiagnosticGroup registerGroup(String name,
DiagnosticGroup ... groups) {
DiagnosticGroup group = new DiagnosticGroup(name, groups);
groupsByName.put(name, group);
return group;
}
/** Get the registered diagnostic groups, indexed by name. */
public Map<String, DiagnosticGroup> getRegisteredGroups() {
return ImmutableMap.copyOf(groupsByName);
}
/** Find the diagnostic group registered under the given name. */
public DiagnosticGroup forName(String name) {
return groupsByName.get(name);
}
// A bit of a hack to display the available groups on the command-line.
// New groups should be added to this list if they are public and should
// be listed on the command-line as an available option.
//
// If a group is suppressible on a per-file basis, it should be added
// to parser/ParserConfig.properties
static final String DIAGNOSTIC_GROUP_NAMES =
"accessControls, "
+ "ambiguousFunctionDecl, "
+ "checkEventfulObjectDisposal, "
+ "checkRegExp, "
+ "checkTypes, "
+ "checkVars, "
+ "commonJsModuleLoad, "
+ "conformanceViolations, "
+ "const, "
+ "constantProperty, "
+ "deprecated, "
+ "deprecatedAnnotations, "
+ "duplicateMessage, "
+ "es3, "
+ "es5Strict, "
+ "externsValidation, "
+ "fileoverviewTags, "
+ "functionParams, "
+ "globalThis, "
+ "internetExplorerChecks, "
+ "invalidCasts, "
+ "misplacedTypeAnnotation, "
+ "missingGetCssName, "
+ "missingOverride, "
+ "missingPolyfill, "
+ "missingProperties, "
+ "missingProvide, "
+ "missingRequire, "
+ "missingReturn, "
+ "msgDescriptions, "
+ "newCheckTypes, "
+ "nonStandardJsDocs, "
+ "reportUnknownTypes, "
+ "suspiciousCode, "
+ "strictModuleDepCheck, "
+ "typeInvalidation, "
+ "undefinedNames, "
+ "undefinedVars, "
+ "unknownDefines, "
+ "unusedLocalVariables, "
+ "unusedPrivateMembers, "
+ "uselessCode, "
+ "useOfGoogBase, "
+ "underscore, "
+ "visibility";
public static final DiagnosticGroup COMMON_JS_MODULE_LOAD =
DiagnosticGroups.registerGroup("commonJsModuleLoad",
ProcessCommonJSModules.SUSPICIOUS_EXPORTS_ASSIGNMENT,
ProcessCommonJSModules.UNKNOWN_REQUIRE_ENSURE);
public static final DiagnosticGroup GLOBAL_THIS =
DiagnosticGroups.registerGroup("globalThis",
CheckGlobalThis.GLOBAL_THIS);
public static final DiagnosticGroup DEPRECATED =
DiagnosticGroups.registerGroup("deprecated",
CheckAccessControls.DEPRECATED_NAME,
CheckAccessControls.DEPRECATED_NAME_REASON,
CheckAccessControls.DEPRECATED_PROP,
CheckAccessControls.DEPRECATED_PROP_REASON,
CheckAccessControls.DEPRECATED_CLASS,
CheckAccessControls.DEPRECATED_CLASS_REASON);
public static final DiagnosticGroup UNDERSCORE =
DiagnosticGroups.registerGroup("underscore", // undocumented
CheckJSDocStyle.MUST_BE_PRIVATE,
CheckJSDocStyle.MUST_HAVE_TRAILING_UNDERSCORE);
public static final DiagnosticGroup VISIBILITY =
DiagnosticGroups.registerGroup("visibility",
CheckAccessControls.BAD_PRIVATE_GLOBAL_ACCESS,
CheckAccessControls.BAD_PRIVATE_PROPERTY_ACCESS,
CheckAccessControls.BAD_PACKAGE_PROPERTY_ACCESS,
CheckAccessControls.BAD_PROTECTED_PROPERTY_ACCESS,
CheckAccessControls.EXTEND_FINAL_CLASS,
CheckAccessControls.PRIVATE_OVERRIDE,
CheckAccessControls.VISIBILITY_MISMATCH,
CheckAccessControls.CONVENTION_MISMATCH);
public static final DiagnosticGroup ACCESS_CONTROLS =
DiagnosticGroups.registerGroup("accessControls",
DEPRECATED, VISIBILITY);
public static final DiagnosticGroup NON_STANDARD_JSDOC =
DiagnosticGroups.registerGroup("nonStandardJsDocs",
RhinoErrorReporter.BAD_JSDOC_ANNOTATION,
RhinoErrorReporter.INVALID_PARAM,
RhinoErrorReporter.JSDOC_IN_BLOCK_COMMENT);
public static final DiagnosticGroup INVALID_CASTS =
DiagnosticGroups.registerGroup("invalidCasts",
TypeValidator.INVALID_CAST,
NewTypeInference.INVALID_CAST);
@Deprecated
public static final DiagnosticGroup INFERRED_CONST_CHECKS =
DiagnosticGroups.registerDeprecatedGroup("inferredConstCheck");
public static final DiagnosticGroup FILEOVERVIEW_JSDOC =
DiagnosticGroups.registerDeprecatedGroup("fileoverviewTags");
public static final DiagnosticGroup STRICT_MODULE_DEP_CHECK =
DiagnosticGroups.registerGroup("strictModuleDepCheck",
VarCheck.STRICT_MODULE_DEP_ERROR,
CheckGlobalNames.STRICT_MODULE_DEP_QNAME);
public static final DiagnosticGroup VIOLATED_MODULE_DEP =
DiagnosticGroups.registerGroup("violatedModuleDep",
VarCheck.VIOLATED_MODULE_DEP_ERROR);
public static final DiagnosticGroup EXTERNS_VALIDATION =
DiagnosticGroups.registerGroup("externsValidation",
VarCheck.NAME_REFERENCE_IN_EXTERNS_ERROR,
VarCheck.UNDEFINED_EXTERN_VAR_ERROR);
public static final DiagnosticGroup AMBIGUOUS_FUNCTION_DECL =
DiagnosticGroups.registerGroup("ambiguousFunctionDecl",
StrictModeCheck.BAD_FUNCTION_DECLARATION);
public static final DiagnosticGroup UNKNOWN_DEFINES =
DiagnosticGroups.registerGroup("unknownDefines",
ProcessDefines.UNKNOWN_DEFINE_WARNING);
public static final DiagnosticGroup TWEAKS =
DiagnosticGroups.registerGroup("tweakValidation",
ProcessTweaks.INVALID_TWEAK_DEFAULT_VALUE_WARNING,
ProcessTweaks.TWEAK_WRONG_GETTER_TYPE_WARNING,
ProcessTweaks.UNKNOWN_TWEAK_WARNING);
public static final DiagnosticGroup MISSING_OVERRIDE =
DiagnosticGroups.registerGroup(
"missingOverride",
TypeCheck.HIDDEN_INTERFACE_PROPERTY,
TypeCheck.HIDDEN_SUPERCLASS_PROPERTY);
public static final DiagnosticGroup MISSING_PROPERTIES =
DiagnosticGroups.registerGroup("missingProperties",
TypeCheck.INEXISTENT_PROPERTY,
TypeCheck.INEXISTENT_PROPERTY_WITH_SUGGESTION,
TypeCheck.POSSIBLE_INEXISTENT_PROPERTY,
NewTypeInference.INEXISTENT_PROPERTY,
NewTypeInference.POSSIBLY_INEXISTENT_PROPERTY);
public static final DiagnosticGroup J2CL_CHECKS =
DiagnosticGroups.registerGroup("j2clChecks",
J2clChecksPass.J2CL_REFERENCE_EQUALITY);
public static final DiagnosticGroup MISSING_RETURN =
DiagnosticGroups.registerGroup("missingReturn",
CheckMissingReturn.MISSING_RETURN_STATEMENT);
public static final DiagnosticGroup INTERNET_EXPLORER_CHECKS =
DiagnosticGroups.registerGroup("internetExplorerChecks",
RhinoErrorReporter.TRAILING_COMMA);
public static final DiagnosticGroup UNDEFINED_VARIABLES =
DiagnosticGroups.registerGroup("undefinedVars",
VarCheck.UNDEFINED_VAR_ERROR);
public static final DiagnosticGroup UNDEFINED_NAMES =
DiagnosticGroups.registerGroup("undefinedNames",
CheckGlobalNames.UNDEFINED_NAME_WARNING);
public static final DiagnosticGroup DEBUGGER_STATEMENT_PRESENT =
DiagnosticGroups.registerGroup("checkDebuggerStatement",
CheckDebuggerStatement.DEBUGGER_STATEMENT_PRESENT);
public static final DiagnosticGroup CHECK_REGEXP =
DiagnosticGroups.registerGroup("checkRegExp",
CheckRegExp.REGEXP_REFERENCE,
CheckRegExp.MALFORMED_REGEXP);
// NOTE(dimvar): it'd be nice to add TypedScopeCreator.ALL_DIAGNOSTICS here,
// but we would first need to cleanup projects that would break because
// they set --jscomp_error=checkTypes.
public static final DiagnosticGroup OLD_CHECK_TYPES =
DiagnosticGroups.registerGroup("oldCheckTypes", // undocumented
TypeValidator.ALL_DIAGNOSTICS,
TypeCheck.ALL_DIAGNOSTICS);
// Run the new type inference, but omit many warnings that are not
// found by the old type checker. This makes migration to NTI more manageable.
public static final DiagnosticGroup NEW_CHECK_TYPES_COMPATIBILITY_MODE =
DiagnosticGroups.registerGroup("newCheckTypesCompatibility", // undocumented
JSTypeCreatorFromJSDoc.COMPATIBLE_DIAGNOSTICS,
GlobalTypeInfo.COMPATIBLE_DIAGNOSTICS,
NewTypeInference.COMPATIBLE_DIAGNOSTICS);
public static final DiagnosticGroup NEW_CHECK_TYPES_EXTRA_CHECKS =
DiagnosticGroups.registerGroup("newCheckTypesExtraChecks", // undocumented
JSTypeCreatorFromJSDoc.NEW_DIAGNOSTICS,
GlobalTypeInfo.NEW_DIAGNOSTICS,
NewTypeInference.NEW_DIAGNOSTICS);
// Part of the new type inference
public static final DiagnosticGroup NEW_CHECK_TYPES =
DiagnosticGroups.registerGroup("newCheckTypes",
NEW_CHECK_TYPES_COMPATIBILITY_MODE,
NEW_CHECK_TYPES_EXTRA_CHECKS);
public static final DiagnosticGroup CHECK_TYPES =
DiagnosticGroups.registerGroup("checkTypes",
OLD_CHECK_TYPES,
NEW_CHECK_TYPES);
public static final DiagnosticGroup NEW_CHECK_TYPES_ALL_CHECKS =
DiagnosticGroups.registerGroup("newCheckTypesAllChecks",
NewTypeInference.NULLABLE_DEREFERENCE);
static {
// Warnings that are absent in closure library
DiagnosticGroups.registerGroup("newCheckTypesClosureClean",
JSTypeCreatorFromJSDoc.CONFLICTING_EXTENDED_TYPE,
JSTypeCreatorFromJSDoc.CONFLICTING_IMPLEMENTED_TYPE,
JSTypeCreatorFromJSDoc.DICT_IMPLEMENTS_INTERF,
JSTypeCreatorFromJSDoc.EXTENDS_NON_OBJECT,
JSTypeCreatorFromJSDoc.EXTENDS_NOT_ON_CTOR_OR_INTERF,
JSTypeCreatorFromJSDoc.IMPLEMENTS_WITHOUT_CONSTRUCTOR,
JSTypeCreatorFromJSDoc.INHERITANCE_CYCLE,
JSTypeCreatorFromJSDoc.UNION_IS_UNINHABITABLE,
GlobalTypeInfo.ABSTRACT_METHOD_IN_CONCRETE_CLASS,
GlobalTypeInfo.ANONYMOUS_NOMINAL_TYPE,
GlobalTypeInfo.CANNOT_INIT_TYPEDEF,
GlobalTypeInfo.CANNOT_OVERRIDE_FINAL_METHOD,
GlobalTypeInfo.CONST_WITHOUT_INITIALIZER,
// GlobalTypeInfo.COULD_NOT_INFER_CONST_TYPE,
GlobalTypeInfo.CTOR_IN_DIFFERENT_SCOPE,
GlobalTypeInfo.DICT_WITHOUT_CTOR,
GlobalTypeInfo.DUPLICATE_JSDOC,
GlobalTypeInfo.DUPLICATE_PROP_IN_ENUM,
GlobalTypeInfo.EXPECTED_CONSTRUCTOR,
GlobalTypeInfo.EXPECTED_INTERFACE,
GlobalTypeInfo.INEXISTENT_PARAM,
GlobalTypeInfo.INTERFACE_METHOD_NOT_IMPLEMENTED,
// GlobalTypeInfo.INVALID_PROP_OVERRIDE,
GlobalTypeInfo.LENDS_ON_BAD_TYPE,
GlobalTypeInfo.MALFORMED_ENUM,
GlobalTypeInfo.MISPLACED_CONST_ANNOTATION,
GlobalTypeInfo.ONE_TYPE_FOR_MANY_VARS,
// GlobalTypeInfo.REDECLARED_PROPERTY,
GlobalTypeInfo.STRUCT_WITHOUT_CTOR_OR_INTERF,
GlobalTypeInfo.SUPER_INTERFACES_HAVE_INCOMPATIBLE_PROPERTIES,
GlobalTypeInfo.UNKNOWN_OVERRIDE,
GlobalTypeInfo.UNRECOGNIZED_TYPE_NAME,
NewTypeInference.ABSTRACT_SUPER_METHOD_NOT_CALLABLE,
NewTypeInference.ASSERT_FALSE,
NewTypeInference.CANNOT_BIND_CTOR,
NewTypeInference.CONST_REASSIGNED,
NewTypeInference.CONSTRUCTOR_NOT_CALLABLE,
NewTypeInference.CROSS_SCOPE_GOTCHA,
// NewTypeInference.FAILED_TO_UNIFY,
// NewTypeInference.FORIN_EXPECTS_OBJECT,
NewTypeInference.FORIN_EXPECTS_STRING_KEY,
// NewTypeInference.GLOBAL_THIS,
// NewTypeInference.GOOG_BIND_EXPECTS_FUNCTION,
NewTypeInference.ILLEGAL_OBJLIT_KEY,
// NewTypeInference.ILLEGAL_PROPERTY_ACCESS,
// NewTypeInference.ILLEGAL_PROPERTY_CREATION,
NewTypeInference.IN_USED_WITH_STRUCT,
// NewTypeInference.INEXISTENT_PROPERTY,
// NewTypeInference.INVALID_ARGUMENT_TYPE,
// NewTypeInference.INVALID_CAST,
// NewTypeInference.INVALID_INDEX_TYPE,
NewTypeInference.INVALID_INFERRED_RETURN_TYPE,
NewTypeInference.INVALID_OBJLIT_PROPERTY_TYPE,
// NewTypeInference.INVALID_OPERAND_TYPE,
NewTypeInference.INVALID_THIS_TYPE_IN_BIND,
NewTypeInference.MISSING_RETURN_STATEMENT,
// NewTypeInference.MISTYPED_ASSIGN_RHS,
// NewTypeInference.NOT_A_CONSTRUCTOR,
// NewTypeInference.NOT_CALLABLE,
// NewTypeInference.NOT_UNIQUE_INSTANTIATION,
// NewTypeInference.POSSIBLY_INEXISTENT_PROPERTY,
// NewTypeInference.PROPERTY_ACCESS_ON_NONOBJECT,
// NewTypeInference.RETURN_NONDECLARED_TYPE,
// NewTypeInference.WRONG_ARGUMENT_COUNT,
NewTypeInference.UNKNOWN_ASSERTION_TYPE,
NewTypeInference.UNKNOWN_TYPEOF_VALUE);
}
public static final DiagnosticGroup CHECK_EVENTFUL_OBJECT_DISPOSAL =
DiagnosticGroups.registerGroup("checkEventfulObjectDisposal",
CheckEventfulObjectDisposal.EVENTFUL_OBJECT_NOT_DISPOSED,
CheckEventfulObjectDisposal.EVENTFUL_OBJECT_PURELY_LOCAL,
CheckEventfulObjectDisposal.OVERWRITE_PRIVATE_EVENTFUL_OBJECT,
CheckEventfulObjectDisposal.UNLISTEN_WITH_ANONBOUND);
public static final DiagnosticGroup OLD_REPORT_UNKNOWN_TYPES =
DiagnosticGroups.registerGroup("oldReportUnknownTypes", // undocumented
TypeCheck.UNKNOWN_EXPR_TYPE);
public static final DiagnosticGroup REPORT_UNKNOWN_TYPES =
DiagnosticGroups.registerGroup("reportUnknownTypes",
TypeCheck.UNKNOWN_EXPR_TYPE,
NewTypeInference.UNKNOWN_EXPR_TYPE);
public static final DiagnosticGroup CHECK_VARIABLES =
DiagnosticGroups.registerGroup("checkVars",
VarCheck.UNDEFINED_VAR_ERROR,
VarCheck.VAR_MULTIPLY_DECLARED_ERROR,
VariableReferenceCheck.EARLY_REFERENCE,
VariableReferenceCheck.REDECLARED_VARIABLE);
public static final DiagnosticGroup CHECK_USELESS_CODE =
DiagnosticGroups.registerGroup("uselessCode",
CheckSideEffects.USELESS_CODE_ERROR,
CheckUnreachableCode.UNREACHABLE_CODE);
public static final DiagnosticGroup CONST =
DiagnosticGroups.registerGroup("const",
CheckAccessControls.CONST_PROPERTY_DELETED,
CheckAccessControls.CONST_PROPERTY_REASSIGNED_VALUE,
ConstCheck.CONST_REASSIGNED_VALUE_ERROR,
NewTypeInference.CONST_REASSIGNED,
NewTypeInference.CONST_PROPERTY_REASSIGNED,
NewTypeInference.CONST_PROPERTY_DELETED);
static final DiagnosticGroup ACCESS_CONTROLS_CONST =
DiagnosticGroups.registerGroup("accessControlsConst",
CheckAccessControls.CONST_PROPERTY_DELETED,
CheckAccessControls.CONST_PROPERTY_REASSIGNED_VALUE);
public static final DiagnosticGroup CONSTANT_PROPERTY =
DiagnosticGroups.registerGroup("constantProperty",
CheckAccessControls.CONST_PROPERTY_DELETED,
CheckAccessControls.CONST_PROPERTY_REASSIGNED_VALUE,
NewTypeInference.CONST_PROPERTY_REASSIGNED,
NewTypeInference.CONST_PROPERTY_DELETED);
public static final DiagnosticGroup TYPE_INVALIDATION =
DiagnosticGroups.registerGroup("typeInvalidation",
DisambiguateProperties.Warnings.INVALIDATION,
DisambiguateProperties.Warnings.INVALIDATION_ON_TYPE);
public static final DiagnosticGroup DUPLICATE_VARS =
DiagnosticGroups.registerGroup("duplicate",
VarCheck.VAR_MULTIPLY_DECLARED_ERROR,
TypeValidator.DUP_VAR_DECLARATION,
TypeValidator.DUP_VAR_DECLARATION_TYPE_MISMATCH,
VariableReferenceCheck.REDECLARED_VARIABLE,
GlobalTypeInfo.REDECLARED_PROPERTY);
public static final DiagnosticGroup ES3 =
DiagnosticGroups.registerGroup("es3",
RhinoErrorReporter.INVALID_ES3_PROP_NAME,
RhinoErrorReporter.TRAILING_COMMA);
static final DiagnosticGroup ES5_STRICT_UNCOMMON =
DiagnosticGroups.registerGroup("es5StrictUncommon",
RhinoErrorReporter.INVALID_OCTAL_LITERAL,
RhinoErrorReporter.DUPLICATE_PARAM,
StrictModeCheck.USE_OF_WITH,
StrictModeCheck.EVAL_DECLARATION,
StrictModeCheck.EVAL_ASSIGNMENT,
StrictModeCheck.ARGUMENTS_DECLARATION,
StrictModeCheck.ARGUMENTS_ASSIGNMENT,
StrictModeCheck.DELETE_VARIABLE,
StrictModeCheck.DUPLICATE_OBJECT_KEY,
StrictModeCheck.BAD_FUNCTION_DECLARATION);
static final DiagnosticGroup ES5_STRICT_REFLECTION =
DiagnosticGroups.registerGroup("es5StrictReflection",
StrictModeCheck.ARGUMENTS_CALLEE_FORBIDDEN,
StrictModeCheck.ARGUMENTS_CALLER_FORBIDDEN,
StrictModeCheck.FUNCTION_CALLER_FORBIDDEN,
StrictModeCheck.FUNCTION_ARGUMENTS_PROP_FORBIDDEN);
public static final DiagnosticGroup ES5_STRICT =
DiagnosticGroups.registerGroup("es5Strict",
ES5_STRICT_UNCOMMON,
ES5_STRICT_REFLECTION);
public static final DiagnosticGroup MISSING_PROVIDE =
DiagnosticGroups.registerGroup("missingProvide",
CheckProvides.MISSING_PROVIDE_WARNING,
ClosureRewriteModule.MISSING_MODULE_OR_PROVIDE);
public static final DiagnosticGroup MISSING_REQUIRE =
DiagnosticGroups.registerGroup("missingRequire",
CheckRequiresForConstructors.MISSING_REQUIRE_WARNING);
public static final DiagnosticGroup STRICT_MISSING_REQUIRE =
DiagnosticGroups.registerGroup("strictMissingRequire",
CheckRequiresForConstructors.MISSING_REQUIRE_WARNING,
CheckRequiresForConstructors.MISSING_REQUIRE_FOR_GOOG_SCOPE,
CheckRequiresForConstructors.MISSING_REQUIRE_STRICT_WARNING);
public static final DiagnosticGroup STRICT_REQUIRES =
DiagnosticGroups.registerGroup("legacyGoogScopeRequire",
CheckRequiresForConstructors.MISSING_REQUIRE_FOR_GOOG_SCOPE,
CheckRequiresForConstructors.EXTRA_REQUIRE_WARNING);
public static final DiagnosticGroup EXTRA_REQUIRE =
DiagnosticGroups.registerGroup("extraRequire",
CheckRequiresForConstructors.EXTRA_REQUIRE_WARNING);
@GwtIncompatible("java.util.regex")
public static final DiagnosticGroup MISSING_GETCSSNAME =
DiagnosticGroups.registerGroup("missingGetCssName",
CheckMissingGetCssName.MISSING_GETCSSNAME);
@GwtIncompatible("JsMessage")
public static final DiagnosticGroup DUPLICATE_MESSAGE =
DiagnosticGroups.registerGroup("duplicateMessage",
JsMessageVisitor.MESSAGE_DUPLICATE_KEY);
@GwtIncompatible("JsMessage")
public static final DiagnosticGroup MESSAGE_DESCRIPTIONS =
DiagnosticGroups.registerGroup("msgDescriptions",
JsMessageVisitor.MESSAGE_HAS_NO_DESCRIPTION);
/**
* Warnings that only apply to people who use MSG_ to denote
* messages. Note that this doesn't include warnings about
* proper use of goog.getMsg
*/
@GwtIncompatible("JsMessage")
public static final DiagnosticGroup MSG_CONVENTIONS =
DiagnosticGroups.registerGroup("messageConventions", // undocumented
JsMessageVisitor.MESSAGE_HAS_NO_DESCRIPTION,
JsMessageVisitor.MESSAGE_HAS_NO_TEXT,
JsMessageVisitor.MESSAGE_TREE_MALFORMED,
JsMessageVisitor.MESSAGE_HAS_NO_VALUE,
JsMessageVisitor.MESSAGE_DUPLICATE_KEY,
JsMessageVisitor.MESSAGE_NOT_INITIALIZED_USING_NEW_SYNTAX);
public static final DiagnosticGroup MISPLACED_TYPE_ANNOTATION =
DiagnosticGroups.registerGroup("misplacedTypeAnnotation",
CheckJSDoc.ARROW_FUNCTION_AS_CONSTRUCTOR,
CheckJSDoc.DEFAULT_PARAM_MUST_BE_MARKED_OPTIONAL,
CheckJSDoc.DISALLOWED_MEMBER_JSDOC,
CheckJSDoc.INVALID_NO_SIDE_EFFECT_ANNOTATION,
CheckJSDoc.INVALID_MODIFIES_ANNOTATION,
CheckJSDoc.MISPLACED_ANNOTATION,
CheckJSDoc.MISPLACED_MSG_ANNOTATION);
public static final DiagnosticGroup SUSPICIOUS_CODE =
DiagnosticGroups.registerGroup(
"suspiciousCode",
CheckDuplicateCase.DUPLICATE_CASE,
CheckSuspiciousCode.SUSPICIOUS_SEMICOLON,
CheckSuspiciousCode.SUSPICIOUS_COMPARISON_WITH_NAN,
CheckSuspiciousCode.SUSPICIOUS_IN_OPERATOR,
CheckSuspiciousCode.SUSPICIOUS_INSTANCEOF_LEFT_OPERAND,
CheckSuspiciousCode.SUSPICIOUS_NEGATED_LEFT_OPERAND_OF_IN_OPERATOR,
TypeCheck.DETERMINISTIC_TEST,
ProcessCommonJSModules.SUSPICIOUS_EXPORTS_ASSIGNMENT);
public static final DiagnosticGroup FUNCTION_PARAMS =
DiagnosticGroups.registerGroup(
"functionParams",
FunctionTypeBuilder.INEXISTENT_PARAM,
FunctionTypeBuilder.OPTIONAL_ARG_AT_END);
public static final DiagnosticGroup DEPRECATED_ANNOTATIONS =
DiagnosticGroups.registerGroup("deprecatedAnnotations",
CheckJSDoc.ANNOTATION_DEPRECATED);
public static final DiagnosticGroup UNUSED_PRIVATE_PROPERTY =
DiagnosticGroups.registerGroup("unusedPrivateMembers",
CheckUnusedPrivateProperties.UNUSED_PRIVATE_PROPERTY);
public static final DiagnosticGroup UNUSED_LOCAL_VARIABLE =
DiagnosticGroups.registerGroup("unusedLocalVariables",
VariableReferenceCheck.UNUSED_LOCAL_ASSIGNMENT);
// These checks are not intended to be enabled as errors. It is
// recommended that you think of them as "linter" warnings that
// provide optional suggestions.
public static final DiagnosticGroup LINT_CHECKS =
DiagnosticGroups.registerGroup(
"lintChecks", // undocumented
CheckJSDocStyle.ALL_DIAGNOSTICS,
new DiagnosticGroup(
CheckEmptyStatements.USELESS_EMPTY_STATEMENT,
CheckEnums.COMPUTED_PROP_NAME_IN_ENUM,
CheckEnums.DUPLICATE_ENUM_VALUE,
CheckEnums.ENUM_PROP_NOT_CONSTANT,
CheckEnums.SHORTHAND_ASSIGNMENT_IN_ENUM,
// TODO(tbreisacher): Consider moving the CheckInterfaces warnings into the
// checkTypes DiagnosticGroup
CheckInterfaces.INTERFACE_FUNCTION_NOT_EMPTY,
CheckInterfaces.INTERFACE_SHOULD_NOT_TAKE_ARGS,
CheckMissingSemicolon.MISSING_SEMICOLON,
CheckPrimitiveAsObject.NEW_PRIMITIVE_OBJECT,
CheckPrimitiveAsObject.PRIMITIVE_OBJECT_DECLARATION,
CheckPrototypeProperties.ILLEGAL_PROTOTYPE_MEMBER,
CheckRequiresAndProvidesSorted.DUPLICATE_REQUIRE,
CheckRequiresAndProvidesSorted.REQUIRES_NOT_SORTED,
CheckRequiresAndProvidesSorted.PROVIDES_NOT_SORTED,
CheckRequiresAndProvidesSorted.PROVIDES_AFTER_REQUIRES,
CheckUnusedLabels.UNUSED_LABEL,
CheckUselessBlocks.USELESS_BLOCK,
ClosureCheckModule.GOOG_MODULE_IN_NON_MODULE,
ClosureCheckModule.LET_GOOG_REQUIRE,
ClosureCheckModule.JSDOC_REFERENCE_TO_FULLY_QUALIFIED_IMPORT_NAME,
ClosureCheckModule.JSDOC_REFERENCE_TO_SHORT_IMPORT_BY_LONG_NAME_INCLUDING_SHORT_NAME,
ClosureCheckModule.REFERENCE_TO_FULLY_QUALIFIED_IMPORT_NAME,
ClosureCheckModule.REFERENCE_TO_SHORT_IMPORT_BY_LONG_NAME_INCLUDING_SHORT_NAME,
ClosureRewriteModule.USELESS_USE_STRICT_DIRECTIVE,
RhinoErrorReporter.JSDOC_MISSING_BRACES_WARNING,
RhinoErrorReporter.JSDOC_MISSING_TYPE_WARNING,
RhinoErrorReporter.TOO_MANY_TEMPLATE_PARAMS));
static final DiagnosticGroup STRICT_MODULE_CHECKS =
DiagnosticGroups.registerGroup(
"strictModuleChecks",
ClosureCheckModule.AT_EXPORT_IN_NON_LEGACY_GOOG_MODULE,
ClosureCheckModule.LET_GOOG_REQUIRE,
ClosureCheckModule.JSDOC_REFERENCE_TO_FULLY_QUALIFIED_IMPORT_NAME,
ClosureCheckModule.JSDOC_REFERENCE_TO_SHORT_IMPORT_BY_LONG_NAME_INCLUDING_SHORT_NAME,
ClosureCheckModule.REFERENCE_TO_FULLY_QUALIFIED_IMPORT_NAME,
ClosureCheckModule.REFERENCE_TO_SHORT_IMPORT_BY_LONG_NAME_INCLUDING_SHORT_NAME);
// A diagnostic group appears to be enabled if any of the DiagnosticTypes it
// contains are enabled. We need this group so we can distinguish whether
// ANALYZER_CHECKS was directly enabled or only appears to be, because
// UNUSED_PRIVATE_PROPERTY was enabled.
static final DiagnosticGroup ANALYZER_CHECKS_INTERNAL =
DiagnosticGroups.registerGroup("analyzerChecksInternal", // undocumented
CheckArrayWithGoogObject.ARRAY_PASSED_TO_GOOG_OBJECT,
CheckNullableReturn.NULLABLE_RETURN,
CheckNullableReturn.NULLABLE_RETURN_WITH_NAME,
ImplicitNullabilityCheck.IMPLICITLY_NULLABLE_JSDOC);
// Similar to the lintChecks group above, but includes things that cannot be done on a single
// file at a time, for example because they require typechecking.
public static final DiagnosticGroup ANALYZER_CHECKS =
DiagnosticGroups.registerGroup("analyzerChecks", // undocumented
ANALYZER_CHECKS_INTERNAL,
UNUSED_PRIVATE_PROPERTY);
public static final DiagnosticGroup USE_OF_GOOG_BASE =
DiagnosticGroups.registerGroup("useOfGoogBase",
ProcessClosurePrimitives.USE_OF_GOOG_BASE);
public static final DiagnosticGroup CLOSURE_DEP_METHOD_USAGE_CHECKS =
DiagnosticGroups.registerGroup("closureDepMethodUsageChecks",
ProcessClosurePrimitives.INVALID_CLOSURE_CALL_ERROR);
// This group exists so that generated code can suppress these
// warnings. Not for general use. These diagnostics will most likely
// be moved to the suspiciousCode group.
static {
DiagnosticGroups.registerGroup("transitionalSuspiciousCodeWarnings",
PeepholeFoldConstants.INDEX_OUT_OF_BOUNDS_ERROR,
PeepholeFoldConstants.NEGATING_A_NON_NUMBER_ERROR,
PeepholeFoldConstants.FRACTIONAL_BITWISE_OPERAND);
}
// This diagnostic group is intentionally absent in ParserConfig.properties.
// Conformance checks are supposed to be enforced project-wide, so we don't
// allow suppressions on individual functions.
// In the future, we may carve out a subset of the conformance checks that is
// OK to suppress.
// For now, the only way to suppress a check at a granularity smaller than
// the file level is by using a whitelist file.
@GwtIncompatible("Conformance")
public static final DiagnosticGroup CONFORMANCE_VIOLATIONS =
DiagnosticGroups.registerGroup("conformanceViolations",
CheckConformance.CONFORMANCE_VIOLATION,
CheckConformance.CONFORMANCE_POSSIBLE_VIOLATION);
public static final DiagnosticGroup LATE_PROVIDE =
DiagnosticGroups.registerGroup(
"lateProvide", // undocumented
ProcessClosurePrimitives.LATE_PROVIDE_ERROR,
ClosureRewriteModule.LATE_PROVIDE_ERROR);
public static final DiagnosticGroup MISSING_POLYFILL =
DiagnosticGroups.registerGroup(
"missingPolyfill",
RewritePolyfills.INSUFFICIENT_OUTPUT_VERSION_ERROR);
// For internal use only, so there are no constants for these groups.
static {
DiagnosticGroups.registerGroup("invalidProvide",
ProcessClosurePrimitives.INVALID_PROVIDE_ERROR);
DiagnosticGroups.registerGroup("es6Typed",
RhinoErrorReporter.MISPLACED_TYPE_SYNTAX);
DiagnosticGroups.registerGroup("duplicateZipContents",
SourceFile.DUPLICATE_ZIP_CONTENTS);
DiagnosticGroups.registerDeprecatedGroup("unnecessaryCasts");
}
/**
* Adds warning levels by name.
*/
void setWarningLevel(CompilerOptions options,
String name, CheckLevel level) {
DiagnosticGroup group = forName(name);
Preconditions.checkNotNull(group, "No warning class for name: %s", name);
options.setWarningLevel(group, level);
}
}
| |
package com.wizzardo.http.framework.template;
import com.wizzardo.http.framework.Environment;
import com.wizzardo.http.framework.Holders;
import com.wizzardo.http.framework.di.Injectable;
import com.wizzardo.tools.interfaces.Consumer;
import com.wizzardo.tools.interfaces.Filter;
import com.wizzardo.tools.io.FileTools;
import com.wizzardo.tools.io.IOTools;
import com.wizzardo.tools.io.ZipTools;
import com.wizzardo.tools.misc.Unchecked;
import java.io.*;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
/**
* @author: moxa
* Date: 11/23/12
*/
@Injectable
public class LocalResourcesTools implements ResourceTools {
protected Set<String> classpath = new HashSet<>();
protected List<File> resourcesDirs = new ArrayList<>();
protected List<Filter<String>> classpathFilters = new ArrayList<>();
protected File unzippedJar;
{
ClassLoader cl = ClassLoader.getSystemClassLoader();
if (cl instanceof URLClassLoader) {
URL[] urls = ((URLClassLoader) cl).getURLs();
for (URL url : urls) {
classpath.add(url.getFile());
}
}
File src = new File("src");
if (src.exists() && src.isDirectory()) {
addResourcesDir(src.getAbsoluteFile().getParentFile());
}
File jarFile = new File(LocalResourcesTools.class.getProtectionDomain().getCodeSource().getLocation().getPath());
// classpath.add(jarFile.getAbsolutePath());
if (jarFile.isFile()) {
File outDir = new File(Unchecked.call(() -> File.createTempFile("---", null)).getParentFile(), jarFile.getName() + "_unzipped");
if (outDir.exists())
FileTools.deleteRecursive(outDir);
ZipTools.unzip(jarFile, outDir, entry -> entry.getName().startsWith("public"));
addResourcesDir(outDir);
unzippedJar = outDir;
}
}
public boolean isJar() {
return unzippedJar != null;
}
public File getUnzippedJarDirectory() {
return unzippedJar;
}
public InputStream getResource(String path) throws FileNotFoundException {
InputStream in = LocalResourcesTools.class.getResourceAsStream(path.startsWith("/") ? path : "/" + path);
if (in != null) {
return in;
}
File f = getResourceFile(path);
if (f == null || !f.exists())
throw new FileNotFoundException("file " + path + " not found");
return new FileInputStream(f);
}
public void getResourceFile(String path, Consumer<File> consumer) {
File file = getResourceFile(path);
if (file != null && file.exists()) {
consumer.consume(file);
}
}
public File getResourceFile(String path) {
File f;
try {
f = new File(LocalResourcesTools.class.getClassLoader().getResource(path).toURI());
if (f.exists())
return f;
} catch (Exception ignored) {
}
f = path.startsWith("/") ? new File(path) : null;
if (f != null && f.exists())
return f;
for (File dir : resourcesDirs) {
f = new File(dir, path);
if (f.exists())
return f;
}
return null;
}
public String getResourceAsString(String path) {
return Unchecked.ignore(() -> new String(IOTools.bytes(getResource(path)), StandardCharsets.UTF_8));
}
@Override
public List<Class> getClasses() {
List<Class> l = new ArrayList<Class>();
File dir;
if (Holders.getEnvironment() != Environment.TEST)
System.out.println("classpath: " + classpath);
for (String path : classpath) {
dir = new File(path);
if (!filterClasspath(dir))
continue;
if (Holders.getEnvironment() != Environment.TEST)
System.out.println("searching for classes in " + dir.getAbsolutePath());
if (!dir.exists())
continue;
if (dir.isDirectory())
getClasses(dir, dir, l);
else if (ZipTools.isZip(dir)) {
getClasses(dir, l);
}
}
return l;
}
@Override
public ResourceTools addClasspathFilter(Filter<String> filter) {
classpathFilters.add(filter);
return this;
}
@Override
public List<Filter<String>> getClasspathFilters() {
return classpathFilters;
}
protected final static String[] classpathFiltersNotEndsWith = new String[]{
"/jre/lib/charsets.jar",
"/jre/lib/jfxswt.jar",
"/jre/lib/resources.jar",
"/jre/lib/jsse.jar",
"/jre/lib/rt.jar",
"/jre/lib/jce.jar",
"/jre/lib/management-agent.jar",
"/jre/lib/javaws.jar",
"/jre/lib/plugin.jar",
"/jre/lib/jfr.jar",
"/jre/lib/deploy.jar",
"/jre/lib/ext/sunjce_provider.jar",
"/jre/lib/ext/sunec.jar",
"/jre/lib/ext/localedata.jar",
"/jre/lib/ext/jfxrt.jar",
"/jre/lib/ext/dnsns.jar",
"/jre/lib/ext/cldrdata.jar",
"/jre/lib/ext/zipfs.jar",
"/jre/lib/ext/nashorn.jar",
"/jre/lib/ext/sunpkcs11.jar",
"/jre/lib/ext/jaccess.jar",
"/lib/idea_rt.jar",
"/plugins/Groovy/lib/agent/gragent.jar",
};
protected boolean filterClasspath(File file) {
String abs = file.getAbsolutePath();
for (String s : classpathFiltersNotEndsWith) {
if (abs.endsWith(s))
return false;
}
return true;
}
@Override
public void addPathToClasses(String path) {
classpath.add(path);
}
@Override
public void addResourcesDir(File dir) {
if (dir.isFile())
throw new IllegalArgumentException(dir.getAbsolutePath() + " - not a dir");
System.out.println("addResourcesDir: " + dir.getAbsolutePath());
resourcesDirs.add(dir);
}
protected void getClasses(File homeDir, File f, List<Class> l) {
if (f.isDirectory()) {
for (File file : f.listFiles(f1 -> f1.isDirectory() || (f1.getName().endsWith(".class")))) {
getClasses(homeDir, file, l);
}
} else {
String clazz = f.getAbsolutePath().substring(homeDir.getAbsolutePath().length() + 1);
Class c = getClass(clazz);
if (c != null)
l.add(c);
}
}
protected void getClasses(File archive, List<Class> l) {
try {
ZipInputStream zip = new ZipInputStream(new FileInputStream(archive));
ZipEntry entry;
while ((entry = zip.getNextEntry()) != null) {
String name = entry.toString();
if (name.startsWith("WEB-INF/classes/"))
name = name.substring("WEB-INF/classes/".length());
Class c = getClass(name);
if (c != null)
l.add(c);
}
zip.close();
} catch (IOException ignored) {
}
}
protected Class getClass(String name) {
if (name.length() < 7 || !name.endsWith(".class"))
return null;
try {
name = name
.substring(0, name.length() - 6)
.replace(File.separatorChar, '.')
.replace('/', '.')
;
if (!filterClass(name))
return null;
return ClassLoader.getSystemClassLoader().loadClass(name);
} catch (ClassNotFoundException | NoClassDefFoundError ignored) {
}
return null;
}
protected boolean filterClass(String className) {
for (Filter<String> filter : classpathFilters) {
if (filter.allow(className))
return true;
}
return false;
}
}
| |
package org.diorite.scheduler;
import org.apache.commons.lang3.Validate;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.diorite.Diorite;
import org.diorite.plugin.DioritePlugin;
/**
* Simple builder class to build all types of tasks.
*
* @see DioriteTask
* @see Scheduler
*/
public class TaskBuilder
{
private final DioritePlugin dioritePlugin;
private final Runnable runnable;
private boolean async = false;
private boolean isRealTime = false;
private boolean safeMode = true;
private long delay = 0;
private TaskType type = TaskType.SINGLE;
private Synchronizable synchronizable = Diorite.getCore();
private String name; // optional
private TaskBuilder(final DioritePlugin dioritePlugin, final Runnable runnable)
{
Validate.notNull(dioritePlugin, "Plugin can't by null.");
Validate.notNull(runnable, "Runnable can't be null.");
this.dioritePlugin = dioritePlugin;
this.runnable = runnable;
}
/**
* Chnage task to asynchronous, async task are running from separate threads
* not related to ticking threads.
*
* @return this same task builder for chaining method.
*/
public TaskBuilder async()
{
this.async = true;
return this;
}
/**
* <b>This is default value/state of builder.</b> <br>
* Change task type to sync, so it will be executed in one of main server threads.
*
* @return this same task builder for chaining method.
*
* @see #syncTo(Synchronizable)
*/
public TaskBuilder sync()
{
this.async = false;
return this;
}
/**
* <b>server instance is default value, don't work for async tasks.</b> <br>
* Default object is server instance, that means that task will be executed
* before ticking worlds in main thread. <br>
* Using other object (entity or chunk) will cause that task will be always executed in this same
* thread as object is ticked. <br>
* NOTE: Task store weak reference to object, if weak reference will be free, task will be canceled.
* <p>
* This will alse set async mode back to false if needed.
*
* @param synchronizable object to sync with it. (task will be executed in this same thread as object is ticked)
*
* @return this same task builder for chaining method.
*/
public TaskBuilder syncTo(final Synchronizable synchronizable)
{
Validate.notNull(synchronizable, "Can't synchronize to null object");
this.async = false;
this.synchronizable = synchronizable;
return this;
}
/**
* Change name of task, you don't need set it, it isn't used by any important code. <br>
* It may be used by some statisitc/timing systems.
*
* @param name new name of task.
*
* @return this same task builder for chaining method.
*/
public TaskBuilder name(final String name)
{
this.name = name + "@" + System.identityHashCode(this.runnable);
return this;
}
/**
* Change task delay type to real-time, so it is milliseconds based. <br>
* Using real-time delay isn't always accurate for sync tasks, if you set
* delay to 200ms, but server is running with 2TPS, task can be only
* executed every 500ms.
*
* @return this same task builder for chaining method.
*/
public TaskBuilder realTime()
{
this.isRealTime = true;
return this;
}
/**
* <b>This is default value/state of builder.</b> <br>
* Change task delay type to game-time, so it is tick based. <br>
* PS: server lag may extend the duration of tick.
*
* @return this same task builder for chaining method.
*/
public TaskBuilder gameTime()
{
this.isRealTime = false;
return this;
}
/**
* <b>This is default value/state of builder.</b> <br>
* This works only for sync to object tasks. <br>
* Safe sync task will automatically unregister when
* sync object will be invalid, like after player
* logout or chunk unload. <br>
*
* @return this same task builder for chaining method.
*
* @see Synchronizable#isValidSynchronizable()
*/
public TaskBuilder safe()
{
this.safeMode = true;
return this;
}
/**
* This works only for sync to object tasks. <br>
* This will turn off safe mode! <br>
* Safe sync task will automatically unregister when
* sync object will be invalid, like after player
* logout or chunk unload. <br>
*
* @return this same task builder for chaining method.
*
* @see Synchronizable#isValidSynchronizable()
*/
public TaskBuilder unsafe()
{
this.safeMode = false;
return this;
}
/**
* <b>0 is default value.</b>
* Set delay of task, if task is game time then it is in ticks, <br>
* if task is real-time then it is in milliseconds. <br>
* Using real-time delay isn't always accurate for sync tasks, if you set
* delay to 200ms, but server is running with 2TPS, task can be only
* executed every 500ms.
*
* @param delay value of delay.
*
* @return this same task builder for chaining method.
*/
public TaskBuilder delay(final long delay)
{
this.delay = delay;
return this;
}
/**
* <b>This is default value/state of builder.</b> <br>
* Change task type to single, so it will be only executed once.
*
* @return this same task builder for chaining method.
*/
public TaskBuilder single()
{
this.type = TaskType.SINGLE;
return this;
}
/**
* Change task type to repeated, so it will be executed multiple
* times with given delay between each run. <br>
* It's possible to add extra delay before first run, see: {@link #start(long)}
*
* @return this same task builder for chaining method.
*/
public TaskBuilder repeated()
{
this.type = TaskType.REPEATED;
return this;
}
/**
* Finish and register task.
*
* @return finished and registered diorite task.
*/
public DioriteTask start()
{
return this.start(0);
}
/**
* Finish and register task with given delay. <br>
* If task is of single type, this delay will be added to task delay. <br>
* If it is repeated type, then first run of task will be delayed by this time. <br>
* This delay works like task delay, if task dealy is real-time, then it is also real-time.
*
* @param startDelay delay to first run.
*
* @return finished and registered diorite task.
*/
public DioriteTask start(final long startDelay)
{
if (this.type == TaskType.SINGLE)
{
this.delay += startDelay;
return Diorite.getScheduler().runTask(this, 0);
}
if (this.name == null)
{
this.name = this.runnable.getClass().getName() + "@" + System.identityHashCode(this.runnable);
}
return Diorite.getScheduler().runTask(this, startDelay);
}
/**
* Create new TaskBuilder with selected runnable, it can't be null.
*
* @param dioritePlugin plugin that want register task.
* @param runnable runnable to use as task.
*
* @return new task builder.
*
* @see #async(Runnable)
* @see #sync(Runnable)
* @see #sync(Runnable, Synchronizable)
* @see #start()
*/
public static TaskBuilder start(final DioritePlugin dioritePlugin, final Runnable runnable)
{
return new TaskBuilder(dioritePlugin, runnable);
}
/**
* Simple method to create new sync task and run it. <br>
* Equal to: <br>
* <ol>
* <li>{@link #start(Runnable)}</li>
* <li>{@link #start()}</li>
* </ol>
*
* @param dioritePlugin plugin that want register task.
* @param runnable runnable to use as task.
*
* @return finished and registered diorite task.
*/
public static DioriteTask sync(final DioritePlugin dioritePlugin, final Runnable runnable)
{
return new TaskBuilder(dioritePlugin, runnable).start();
}
/**
* Simple method to create new sync task and run it. <br>
* Equal to: <br>
* <ol>
* <li>{@link #start(Runnable)}</li>
* <li>{@link #syncTo(Synchronizable)}</li>
* <li>{@link #start()}</li>
* </ol>
*
* @param dioritePlugin plugin that want register task.
* @param runnable runnable to use as task.
* @param synchronizable object to sync with it. (task will be executed in this same thread as object is ticked as long as object exist in memory)
*
* @return finished and registered diorite task.
*/
public static DioriteTask sync(final DioritePlugin dioritePlugin, final Runnable runnable, final Synchronizable synchronizable)
{
return new TaskBuilder(dioritePlugin, runnable).syncTo(synchronizable).start();
}
/**
* Simple method to create new async task and run it. <br>
* Equal to: <br>
* <ol>
* <li>{@link #start(Runnable)}</li>
* <li>{@link #async()}</li>
* <li>{@link #start()}</li>
* </ol>
*
* @param dioritePlugin plugin that want register task.
* @param runnable runnable to use as task.
*
* @return finished and registered diorite task.
*/
public static DioriteTask async(final DioritePlugin dioritePlugin, final Runnable runnable)
{
return new TaskBuilder(dioritePlugin, runnable).async().start();
}
/**
* Getters
*/
/**
* @return plugin that wan't register this task.
*/
public DioritePlugin getPlugin()
{
return this.dioritePlugin;
}
/**
* @return name of task.
*/
public String getName()
{
if ((this.name == null))
{
return this.runnable.getClass().getName() + "@" + System.identityHashCode(this.runnable);
}
return this.name;
}
/**
* Check if task will be in safe mode. <br>
* This works only for sync to object tasks. <br>
* Safe sync task will automatically unregister when
* sync object will be invalid, like after player
* logout or chunk unload. <br>
*
* @return if task is in safe mode.
*
* @see Synchronizable#isValidSynchronizable()
*/
public boolean isSafeMode()
{
return this.safeMode;
}
/**
* Only for sync tasks, async task can't use synchronizable objects. <br>
* Default object is server instance, that means that task will be executed
* before ticking worlds in main thread. <br>
* Using other object (entity or chunk) will cause that task will be always executed in this same
* thread as object is ticked. <br>
* NOTE: Task store weak reference to object, if weak reference will be free, task will be canceled.
*
* @return Synchronizable object, or null if task is async.
*/
public Synchronizable getSynchronizable()
{
if (this.async)
{
return null;
}
return this.synchronizable;
}
/**
* Asynchronous task are running in separate threads not related to
* server ticking threads.
*
* @return true if task is asynchronous.
*/
public boolean isAsync()
{
return this.async;
}
/**
* This runnable will be executed as task.
*
* @return runnable to use by task.
*/
public Runnable getRunnable()
{
return this.runnable;
}
/**
* Real rime is in milliseconds, and game time is in ticks.
*
* @return true if task use real-time instead of game-time.
*/
public boolean isRealTime()
{
return this.isRealTime;
}
/**
* Delay can be in milliseconds if real-time is used, or in game
* ticks, if game time is used. <br>
* <p>
* If task is single, this is dealy before run. <br>
* If task is repeated, this is delay between runs.
*
* @return dealy of/between task.
*/
public long getDelay()
{
return this.delay;
}
/**
* Single task is executed only once.
*
* @return if task is Single.
*/
public boolean isSingle()
{
return this.type == TaskType.SINGLE;
}
/**
* Repeated task is executed multiple time with selected delay.
*
* @return if task is Repeated.
*/
public boolean isRepeated()
{
return this.type == TaskType.REPEATED;
}
@Override
public String toString()
{
return new ToStringBuilder(this, ToStringStyle.SHORT_PREFIX_STYLE).appendSuper(super.toString()).append("runnable", this.runnable).append("async", this.async).append("isRealTime", this.isRealTime).append("delay", this.delay).append("synchronizable", this.synchronizable).toString();
}
private enum TaskType
{
SINGLE,
REPEATED
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.tx;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import javax.transaction.Status;
import org.apache.logging.log4j.Logger;
import org.apache.geode.GemFireException;
import org.apache.geode.annotations.internal.MutableForTesting;
import org.apache.geode.cache.CommitConflictException;
import org.apache.geode.cache.TransactionDataNodeHasDepartedException;
import org.apache.geode.cache.TransactionException;
import org.apache.geode.cache.TransactionInDoubtException;
import org.apache.geode.cache.client.internal.ServerRegionDataAccess;
import org.apache.geode.cache.client.internal.ServerRegionProxy;
import org.apache.geode.distributed.DistributedMember;
import org.apache.geode.distributed.internal.DistributionConfig;
import org.apache.geode.distributed.internal.DistributionManager;
import org.apache.geode.distributed.internal.ServerLocation;
import org.apache.geode.distributed.internal.membership.InternalDistributedMember;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.InternalRegion;
import org.apache.geode.internal.cache.LocalRegion;
import org.apache.geode.internal.cache.TXCommitMessage;
import org.apache.geode.internal.cache.TXLockRequest;
import org.apache.geode.internal.cache.TXRegionLockRequestImpl;
import org.apache.geode.internal.cache.TXStateProxy;
import org.apache.geode.internal.cache.TXStateStub;
import org.apache.geode.internal.cache.locks.TXRegionLockRequest;
import org.apache.geode.internal.cache.tx.TransactionalOperation.ServerRegionOperation;
import org.apache.geode.internal.logging.LogService;
public class ClientTXStateStub extends TXStateStub {
private static final Logger logger = LogService.getLogger();
/** test hook - used to find out what operations were performed in the last tx */
@MutableForTesting
private static ThreadLocal<List<TransactionalOperation>> recordedTransactionalOperations = null;
/**
* System property to disable conflict checks on clients.
*/
private static final boolean DISABLE_CONFLICT_CHECK_ON_CLIENT =
Boolean.getBoolean(DistributionConfig.GEMFIRE_PREFIX + "disableConflictChecksOnClient");
/**
* @return true if transactional operation recording is enabled (test hook)
*/
public static boolean transactionRecordingEnabled() {
return !DISABLE_CONFLICT_CHECK_ON_CLIENT || recordedTransactionalOperations != null;
}
private final ServerRegionProxy firstProxy;
private final InternalCache cache;
private final DistributionManager dm;
/** the operations performed in the current transaction are held in this list */
private final List<TransactionalOperation> recordedOperations =
Collections.synchronizedList(new LinkedList<TransactionalOperation>());
private ServerLocation serverAffinityLocation;
/** lock request for obtaining local locks */
private TXLockRequest lockReq;
private Runnable internalAfterLocalLocks;
private boolean txRolledback = false;
/**
* test hook
*
* @param t a ThreadLocal to hold lists of TransactionalOperations
*/
public static void setTransactionalOperationContainer(
ThreadLocal<List<TransactionalOperation>> t) {
recordedTransactionalOperations = t;
}
public ClientTXStateStub(InternalCache cache, DistributionManager dm, TXStateProxy stateProxy,
DistributedMember target, InternalRegion firstRegion) {
super(stateProxy, target);
this.cache = cache;
this.dm = dm;
this.firstProxy = firstRegion.getServerProxy();
this.firstProxy.getPool().setupServerAffinity(true);
if (recordedTransactionalOperations != null) {
recordedTransactionalOperations.set(this.recordedOperations);
}
}
@Override
public void commit() throws CommitConflictException {
obtainLocalLocks();
try {
TXCommitMessage txcm = null;
try {
txcm = firstProxy.commit(proxy.getTxId().getUniqId());
} finally {
this.firstProxy.getPool().releaseServerAffinity();
}
afterServerCommit(txcm);
} catch (TransactionDataNodeHasDepartedException e) {
throw new TransactionInDoubtException(e);
} finally {
lockReq.releaseLocal();
}
}
TXLockRequest createTXLockRequest() {
return new TXLockRequest();
}
TXRegionLockRequestImpl createTXRegionLockRequestImpl(InternalCache cache, LocalRegion region) {
return new TXRegionLockRequestImpl(cache, region);
}
/**
* Lock the keys in a local transaction manager
*
* @throws CommitConflictException if the key is already locked by some other transaction
*/
private void obtainLocalLocks() {
lockReq = createTXLockRequest();
for (TransactionalOperation txOp : this.recordedOperations) {
if (ServerRegionOperation.lockKeyForTx(txOp.getOperation())) {
TXRegionLockRequest rlr = lockReq.getRegionLockRequest(txOp.getRegionName());
if (rlr == null) {
rlr = createTXRegionLockRequestImpl(cache,
(LocalRegion) cache.getRegionByPath(txOp.getRegionName()));
lockReq.addLocalRequest(rlr);
}
if (txOp.getOperation() == ServerRegionOperation.PUT_ALL
|| txOp.getOperation() == ServerRegionOperation.REMOVE_ALL) {
rlr.addEntryKeys(txOp.getKeys());
} else {
rlr.addEntryKey(txOp.getKey());
}
}
}
if (logger.isDebugEnabled()) {
logger.debug("TX: client localLockRequest: {}", lockReq);
}
try {
lockReq.obtain(cache.getInternalDistributedSystem());
} catch (CommitConflictException e) {
rollback(); // cleanup tx artifacts on server
throw e;
}
if (internalAfterLocalLocks != null) {
internalAfterLocalLocks.run();
}
}
/** perform local cache modifications using the server's TXCommitMessage */
private void afterServerCommit(TXCommitMessage txcm) {
if (this.internalAfterSendCommit != null) {
this.internalAfterSendCommit.run();
}
if (cache == null) {
// we can probably delete this block because cache is now a final var
// fixes bug 42933
return;
}
cache.getCancelCriterion().checkCancelInProgress(null);
txcm.setDM(dm);
txcm.setAckRequired(false);
txcm.setDisableListeners(true);
cache.getTxManager().setTXState(null);
txcm.hookupRegions(dm);
txcm.basicProcess();
}
@Override
protected TXRegionStub generateRegionStub(InternalRegion region) {
return new ClientTXRegionStub(region);
}
@Override
protected void validateRegionCanJoinTransaction(InternalRegion region)
throws TransactionException {
if (!region.hasServerProxy()) {
throw new TransactionException("Region " + region.getName()
+ " is local to this client and cannot be used in a transaction.");
} else if (this.firstProxy != null
&& this.firstProxy.getPool() != region.getServerProxy().getPool()) {
throw new TransactionException("Region " + region.getName()
+ " is using a different server pool than other regions in this transaction.");
}
}
@Override
public void rollback() {
if (this.internalAfterSendRollback != null) {
this.internalAfterSendRollback.run();
}
try {
txRolledback = true;
this.firstProxy.rollback(proxy.getTxId().getUniqId());
} finally {
this.firstProxy.getPool().releaseServerAffinity();
}
}
@Override
public void afterCompletion(int status) {
try {
if (txRolledback) {
return;
}
TXCommitMessage txcm = this.firstProxy.afterCompletion(status, proxy.getTxId().getUniqId());
if (status == Status.STATUS_COMMITTED) {
if (txcm == null) {
throw new TransactionInDoubtException(
"Commit failed on cache server");
} else {
afterServerCommit(txcm);
}
} else if (status == Status.STATUS_ROLLEDBACK) {
if (this.internalAfterSendRollback != null) {
this.internalAfterSendRollback.run();
}
this.firstProxy.getPool().releaseServerAffinity();
}
} finally {
if (status == Status.STATUS_COMMITTED) {
// rollback does not grab locks
this.lockReq.releaseLocal();
}
this.firstProxy.getPool().releaseServerAffinity();
}
}
@Override
public void beforeCompletion() {
obtainLocalLocks();
try {
this.firstProxy.beforeCompletion(proxy.getTxId().getUniqId());
} catch (GemFireException e) {
this.lockReq.releaseLocal();
this.firstProxy.getPool().releaseServerAffinity();
throw e;
}
}
@Override
public InternalDistributedMember getOriginatingMember() {
/*
* Client member id is implied from the connection so we don't need this
*/
return null;
}
@Override
public boolean isMemberIdForwardingRequired() {
/*
* Client member id is implied from the connection so we don't need this Forwarding will occur
* on the server-side stub
*/
return false;
}
@Override
public TXCommitMessage getCommitMessage() {
/* client gets the txcommit message during Op processing and doesn't need it here */
return null;
}
@Override
public void suspend() {
this.serverAffinityLocation = this.firstProxy.getPool().getServerAffinityLocation();
this.firstProxy.getPool().releaseServerAffinity();
if (logger.isDebugEnabled()) {
logger.debug("TX: suspending transaction: {} server delegate: {}", getTransactionId(),
this.serverAffinityLocation);
}
}
@Override
public void resume() {
this.firstProxy.getPool().setupServerAffinity(true);
this.firstProxy.getPool().setServerAffinityLocation(this.serverAffinityLocation);
if (logger.isDebugEnabled()) {
logger.debug("TX: resuming transaction: {} server delegate: {}", getTransactionId(),
this.serverAffinityLocation);
}
}
/**
* test hook - maintain a list of tx operations
*/
@Override
public void recordTXOperation(ServerRegionDataAccess region, ServerRegionOperation op, Object key,
Object arguments[]) {
if (ClientTXStateStub.transactionRecordingEnabled()) {
this.recordedOperations
.add(new TransactionalOperation(this, region.getRegionName(), op, key, arguments));
}
}
/**
* Add an internal callback which is run after the the local locks are obtained
*/
public void setAfterLocalLocks(Runnable afterLocalLocks) {
this.internalAfterLocalLocks = afterLocalLocks;
}
public ServerLocation getServerAffinityLocation() {
return serverAffinityLocation;
}
}
| |
package ws.zettabyte.weirdscience.machine;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.tileentity.TileEntityFurnace;
import net.minecraftforge.common.util.ForgeDirection;
import ws.zettabyte.zettalib.initutils.Conf;
import ws.zettabyte.zettalib.initutils.Configgable;
import ws.zettabyte.zettalib.inventory.ItemSlot;
import ws.zettabyte.zettalib.inventory.SimpleInvComponent;
import ws.zettabyte.zettalib.inventory.SlotInput;
import ws.zettabyte.zettalib.thermal.HeatRegistry;
import java.util.ArrayList;
/**
* Created by Sam "Gyro" Cutlip on 12/29/2015.
*/
@Configgable(section="Machine")
public class TEBurnerSolid extends TEBurnerBase {
protected ArrayList<ItemSlot> fuelSlots = new ArrayList<ItemSlot>(9);
/**
* How many ticks the associated slot will continue burning for.
*/
public int[] burnRemain = new int[9];
/**
* How many ticks our last consumed item will / had burned for.
*/
public int[] prevBurnTime = new int[9];
/**
* How many of these slots have "remaining fuel" (read: non-zero burnRemain)?
*/
protected short activeCount = 0;
@Conf(name="Burners: How hot the solid fuel burner has to be in order to self-ignite", def="200")
protected static int ignitionHeat = 200;
@Conf(name="Burners: mC (thousandths of degrees celsius) per furnace fuel value for Solid Fuel Burner", def="25")
protected static int mcPerFuelTick = 25;
//For our friends in GUI land:
protected ArrayList<SimpleInvComponent<Float>> remainingDisplay = new ArrayList<SimpleInvComponent<Float>>(9);
protected void setupSlots() {
for(int i = 0; i < 9; ++i) {
ItemSlot slot = new ItemSlot(this, i, "fuel" + i);
fuelSlots.add(slot);
slots.add(slot);
SimpleInvComponent<Float> sInv = new SimpleInvComponent<Float>("slotFuel" + i);
sInv.val = new Float(0.0F);
remainingDisplay.add(sInv);
this.fullComponentList.add(slot);
this.fullComponentList.add(sInv);
}
}
@Override
public void writeToNBT(NBTTagCompound nbt) {
super.writeToNBT(nbt);
nbt.setShort("ActiveSlotCount", activeCount); //Save the count
for(int i = 0; i < 9; ++i) {
nbt.setInteger("RemainingBurn" + i, burnRemain[i]);
nbt.setInteger("PrevRemainingBurn" + i, prevBurnTime[i]);
}
}
@Override
public void readFromNBT(NBTTagCompound nbt) {
super.readFromNBT(nbt);
activeCount = nbt.getShort("ActiveSlotCount"); //read the count
for(int i = 0; i < 9; ++i) {
burnRemain[i] = nbt.getInteger("RemainingBurn" + i);
prevBurnTime[i] = nbt.getInteger("PrevRemainingBurn" + i);
}
}
public TEBurnerSolid() {
super();
setupSlots();
}
@Override
public void updateEntity () {
/* TEBurnerBase.updateEntity() handles a bunch of tick-counting logic and will call doBurnTick. However we need
* to count ticks remaining for fuel in the fuel slots. Here: */
if(activeCount > 0) {
int tempUp = 0;
for (int i = 0; i < 9; ++i) {
//Is this, before our process, a still-burning slot?
if (burnRemain[i] > 0) {
burnRemain[i] -= 1 * HeatRegistry.getInstance().burnSpeedMult;
//A fuel slot "goes out".
if (burnRemain[i] <= 0) activeCount -= 1;
//If they all go out, it is no longer burning.
//if (activeCount <= 0) burning = false;
tempUp += mcPerFuelTick * HeatRegistry.getInstance().burnSpeedMult; /*We have just lost a tick of burn time, add the
* conversion ratio to our temperature.*/
}
}
if (!worldObj.isRemote) {
if (tempUp != 0) {
heat.modifyHeat(tempUp); //Increase heat by cumulative fuel-time converted to heat.
}
}
//Are there no embers in this hearth?
if (activeCount <= 0) {
activeCount = 0; //Prevent shenanigans
//We are technically no longer burning, then.
//burning = false;
// ^ Commented out: Engine stays warm until the Burn Tick happens.
}
refreshProgressValues();
}
//We can now try the burn tick.
super.updateEntity();
}
@Override
protected void doBurnTick() {
if(!this.canBurn()) return; //We need to ignite this thing before we can do anything of the sort.
for(int i = 0; i < 9; ++i) tryBurnSlot(i);
//If fuel-time ran out and there is no more fuel to burn, the fire goes out. Check for that.
verifyBurning();
//this.markDirty();
}
protected void tryBurnSlot(int sl) {
//Only consume fuel if the previous fuel in the slot has finished burning.
if(burnRemain[sl] == 0) {
if(fuelSlots.get(sl) != null) {
ItemSlot slot = fuelSlots.get(sl);
if(slot.getStack() != null) {
ItemStack stack = slot.getStack();
/*We cannot deal with burning, say, a stack of lava buckets, so if our item has a container and
* we have more than one of our item, discontinue this plan. */
if(stack.getItem().hasContainerItem(stack) && (stack.stackSize != 1)) return;
int bTime = TileEntityFurnace.getItemBurnTime(stack);
if(bTime > 0) {
//Congrats, we can burn this item.
//Setup time properly.
activeCount += 1;
burnRemain[sl] = bTime;
prevBurnTime[sl] = bTime;
if(!this.worldObj.isRemote) {
//Do removal / replacement logic on burnable item
if (stack.getItem().getContainerItem(stack) != null) {
//Buckets and such
ItemStack replacementStack = stack.getItem().getContainerItem(stack);
slot.setStackForce(replacementStack);
} else {
//Coal, wood, etc.
slot.decrStackSize(1);
}
this.markDirty();
}
}
}
}
}
}
protected boolean verifyBurning() {
activeCount = 0;
for(int i = 0; i < 9; ++i) {
if(burnRemain[i] != 0) {
activeCount += 1;
}
}
burning = (activeCount > 0);
return burning;
}
protected void refreshProgressValues() {
for(int i = 0; i < 9; ++i) {
if(this.prevBurnTime[i] == 0) {
this.remainingDisplay.get(i).val = 0.0F;
}
else {
this.remainingDisplay.get(i).val = ((float)this.burnRemain[i]) / ((float)this.prevBurnTime[i]);
}
}
}
@Override
public void ignite() {
super.ignite();
verifyBurning(); //If we try to use an F&S or an igniter on an empty burner, don't consider this active.
}
//------- Inventory stuff --------
@Override
public String getInventoryName() {
return "SolidFuelBurner";
}
@Override
public boolean canExtractItem(int s, ItemStack stack, int fromDirection) {
//Only allow extracting from bottom.
if(ForgeDirection.VALID_DIRECTIONS[fromDirection] != ForgeDirection.DOWN) {
return false;
}
else {
return super.canExtractItem(s, stack, fromDirection);
}
}
@Override
public boolean canInsertItem(int s, ItemStack stack, int fromDirection) {
//Is it a valid furnace fuel?
if(stack != null) {
int bTime = TileEntityFurnace.getItemBurnTime(stack);
if(bTime <= 0) return false; //If no, reject it.
}
//If it is a valid furnace fuel, try but still respect whitelists / blacklists / etc
return super.canInsertItem(s, stack, fromDirection);
}
protected boolean canBurn() {
if(this.burning == true) return true;
else if(this.heat.getHeat() > (this.ignitionHeat * 1000)) return true;
return false;
}
}
| |
package org.ry8.external.imagezoom;
import android.content.Context;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.os.Handler;
import android.util.AttributeSet;
import android.util.Log;
import android.view.GestureDetector;
import android.view.GestureDetector.OnGestureListener;
import android.view.MotionEvent;
import android.view.ScaleGestureDetector;
import android.view.ScaleGestureDetector.OnScaleGestureListener;
import android.view.ViewConfiguration;
public class ImageViewTouch extends ImageViewTouchBase {
static final float SCROLL_DELTA_THRESHOLD = 1.0f;
protected ScaleGestureDetector mScaleDetector;
protected GestureDetector mGestureDetector;
protected int mTouchSlop;
protected float mScaleFactor;
protected int mDoubleTapDirection;
protected OnGestureListener mGestureListener;
protected OnScaleGestureListener mScaleListener;
protected boolean mDoubleTapEnabled = true;
protected boolean mScaleEnabled = true;
protected boolean mScrollEnabled = true;
private OnImageViewTouchDoubleTapListener mDoubleTapListener;
private OnImageViewTouchSingleTapListener mSingleTapListener;
public ImageViewTouch ( Context context) {
super( context);
}
public ImageViewTouch ( Context context, AttributeSet attrs ) {
this( context, attrs, 0 );
}
public ImageViewTouch ( Context context, AttributeSet attrs, int defStyle ) {
super( context, attrs, defStyle );
}
@Override
protected void init(Context context, AttributeSet attrs, int defStyle) {
super.init( context, attrs, defStyle );
mTouchSlop = ViewConfiguration.get( getContext() ).getScaledTouchSlop();
mGestureListener = getGestureListener();
mScaleListener = getScaleListener();
mScaleDetector = new ScaleGestureDetector( getContext(), mScaleListener );
mGestureDetector = new GestureDetector( getContext(), mGestureListener, null, true );
mDoubleTapDirection = 1;
}
public void setDoubleTapListener( OnImageViewTouchDoubleTapListener listener ) {
mDoubleTapListener = listener;
}
public void setSingleTapListener( OnImageViewTouchSingleTapListener listener ) {
mSingleTapListener = listener;
}
public void setDoubleTapEnabled( boolean value ) {
mDoubleTapEnabled = value;
}
public void setScaleEnabled( boolean value ) {
mScaleEnabled = value;
}
public void setScrollEnabled( boolean value ) {
mScrollEnabled = value;
}
public boolean getDoubleTapEnabled() {
return mDoubleTapEnabled;
}
protected OnGestureListener getGestureListener() {
return new GestureListener();
}
protected OnScaleGestureListener getScaleListener() {
return new ScaleListener();
}
@Override
protected void _setImageDrawable( final Drawable drawable, final Matrix initial_matrix, float min_zoom, float max_zoom ) {
super._setImageDrawable( drawable, initial_matrix, min_zoom, max_zoom );
mScaleFactor = getMaxScale() / 3;
}
float i;
public Handler parentHandler;
@Override
public boolean onTouchEvent( MotionEvent event ) {
mScaleDetector.onTouchEvent( event );
if ( !mScaleDetector.isInProgress() ) {
mGestureDetector.onTouchEvent( event );
}
int action = event.getAction();
switch ( action & MotionEvent.ACTION_MASK ) {
case MotionEvent.ACTION_UP:
return onUp( event );
}
return true;
}
@Override
protected void onZoomAnimationCompleted( float scale ) {
if( LOG_ENABLED ) {
Log.d( LOG_TAG, "onZoomAnimationCompleted. scale: " + scale + ", minZoom: " + getMinScale() );
}
if ( scale < getMinScale() ) {
zoomTo( getMinScale(), 50 );
}
}
protected float onDoubleTapPost( float scale, float maxZoom ) {
if ( mDoubleTapDirection == 1 ) {
if ( ( scale + ( mScaleFactor * 2 ) ) <= maxZoom ) {
return scale + mScaleFactor;
} else {
mDoubleTapDirection = -1;
return maxZoom;
}
} else {
mDoubleTapDirection = 1;
return 1f;
}
}
public boolean onSingleTapConfirmed( MotionEvent e ) {
return true;
}
public boolean onScroll( MotionEvent e1, MotionEvent e2, float distanceX, float distanceY ) {
if ( getScale() == 1f ) return false;
mUserScaled = true;
scrollBy( -distanceX, -distanceY );
invalidate();
return true;
}
public boolean onFling( MotionEvent e1, MotionEvent e2, float velocityX, float velocityY ) {
float diffX = e2.getX() - e1.getX();
float diffY = e2.getY() - e1.getY();
if ( Math.abs( velocityX ) > 800 || Math.abs( velocityY ) > 800 ) {
mUserScaled = true;
scrollBy( diffX / 2, diffY / 2, 300 );
invalidate();
return true;
}
return false;
}
public boolean onDown( MotionEvent e ) {
return true;
}
public boolean onUp( MotionEvent e ) {
if ( getScale() < getMinScale() ) {
zoomTo( getMinScale(), 50 );
}
return true;
}
public boolean onSingleTapUp( MotionEvent e ) {
return true;
}
/**
* Determines whether this ImageViewTouch can be scrolled.
*
* @param direction
* - positive direction value means scroll from right to left,
* negative value means scroll from left to right
*
* @return true if there is some more place to scroll, false - otherwise.
*/
public boolean canScroll( int direction ) {
RectF bitmapRect = getBitmapRect();
updateRect( bitmapRect, mScrollRect );
Rect imageViewRect = new Rect();
getGlobalVisibleRect( imageViewRect );
if( null == bitmapRect ) {
return false;
}
if ( bitmapRect.right >= imageViewRect.right ) {
if ( direction < 0 ) {
return Math.abs( bitmapRect.right - imageViewRect.right ) > SCROLL_DELTA_THRESHOLD;
}
}
double bitmapScrollRectDelta = Math.abs( bitmapRect.left - mScrollRect.left );
return bitmapScrollRectDelta > SCROLL_DELTA_THRESHOLD;
}
public class GestureListener extends GestureDetector.SimpleOnGestureListener {
@Override
public boolean onSingleTapConfirmed( MotionEvent e ) {
if ( null != mSingleTapListener ) {
mSingleTapListener.onSingleTapConfirmed();
}
return ImageViewTouch.this.onSingleTapConfirmed( e );
}
@Override
public boolean onDoubleTap( MotionEvent e ) {
Log.i( LOG_TAG, "onDoubleTap. double tap enabled? " + mDoubleTapEnabled );
if ( mDoubleTapEnabled ) {
mUserScaled = true;
float scale = getScale();
float targetScale = scale;
targetScale = onDoubleTapPost( scale, getMaxScale() );
targetScale = Math.min( getMaxScale(), Math.max( targetScale, getMinScale() ) );
zoomTo( targetScale, e.getX(), e.getY(), DEFAULT_ANIMATION_DURATION );
invalidate();
}
if ( null != mDoubleTapListener ) {
mDoubleTapListener.onDoubleTap();
}
return super.onDoubleTap( e );
}
@Override
public void onLongPress( MotionEvent e ) {
if ( isLongClickable() ) {
if ( !mScaleDetector.isInProgress() ) {
setPressed( true );
performLongClick();
}
}
}
@Override
public boolean onScroll( MotionEvent e1, MotionEvent e2, float distanceX, float distanceY ) {
if ( !mScrollEnabled ) return false;
if ( e1 == null || e2 == null ) return false;
if ( e1.getPointerCount() > 1 || e2.getPointerCount() > 1 ) return false;
if ( mScaleDetector.isInProgress() ) return false;
return ImageViewTouch.this.onScroll( e1, e2, distanceX, distanceY );
}
@Override
public boolean onFling( MotionEvent e1, MotionEvent e2, float velocityX, float velocityY ) {
if ( !mScrollEnabled ) return false;
if ( e1.getPointerCount() > 1 || e2.getPointerCount() > 1 ) return false;
if ( mScaleDetector.isInProgress() ) return false;
if ( getScale() == 1f ) return false;
return ImageViewTouch.this.onFling( e1, e2, velocityX, velocityY );
}
@Override
public boolean onSingleTapUp( MotionEvent e ) {
return ImageViewTouch.this.onSingleTapUp( e );
}
@Override
public boolean onDown( MotionEvent e ) {
return ImageViewTouch.this.onDown( e );
}
}
public class ScaleListener extends ScaleGestureDetector.SimpleOnScaleGestureListener {
protected boolean mScaled = false;
@Override
public boolean onScale( ScaleGestureDetector detector ) {
float span = detector.getCurrentSpan() - detector.getPreviousSpan();
float targetScale = getScale() * detector.getScaleFactor();
if ( mScaleEnabled ) {
if( mScaled && span != 0 ) {
mUserScaled = true;
targetScale = Math.min( getMaxScale(), Math.max( targetScale, getMinScale() - 0.1f ) );
zoomTo( targetScale, detector.getFocusX(), detector.getFocusY() );
mDoubleTapDirection = 1;
invalidate();
return true;
}
// This is to prevent a glitch the first time
// image is scaled.
if( !mScaled ) mScaled = true;
}
return true;
}
}
public interface OnImageViewTouchDoubleTapListener {
void onDoubleTap();
}
public interface OnImageViewTouchSingleTapListener {
void onSingleTapConfirmed();
}
}
| |
/*
* Copyright 2017-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.distributed.build_slave;
import com.facebook.buck.command.BuildExecutor;
import com.facebook.buck.core.build.engine.BuildEngineResult;
import com.facebook.buck.core.build.engine.BuildResult;
import com.facebook.buck.core.rulekey.RuleKey;
import com.facebook.buck.core.rules.BuildRule;
import com.facebook.buck.distributed.build_slave.HeartbeatService.HeartbeatCallback;
import com.facebook.buck.distributed.thrift.BuildSlaveRunId;
import com.facebook.buck.distributed.thrift.GetWorkResponse;
import com.facebook.buck.distributed.thrift.MinionType;
import com.facebook.buck.distributed.thrift.StampedeId;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.log.CommandThreadFactory;
import com.facebook.buck.log.Logger;
import com.facebook.buck.slb.ThriftException;
import com.facebook.buck.util.ExitCode;
import com.facebook.buck.util.concurrent.MostExecutors;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import java.io.Closeable;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.List;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
/** {@link DistBuildModeRunner} implementation for running a distributed build as minion only. */
public class MinionModeRunner extends AbstractDistBuildModeRunner {
private static final Logger LOG = Logger.get(MinionModeRunner.class);
private final String coordinatorAddress;
private volatile OptionalInt coordinatorPort;
private final int coordinatorConnectionTimeoutMillis;
private final ListenableFuture<BuildExecutor> buildExecutorFuture;
private final StampedeId stampedeId;
private final MinionType minionType;
private final BuildSlaveRunId buildSlaveRunId;
private final long minionPollLoopIntervalMillis;
private final BuildCompletionChecker buildCompletionChecker;
private final ExecutorService buildExecutorService;
private final BuckEventBus eventBus;
private final MinionLocalBuildStateTracker buildTracker;
// Signals to the main loop that it can stop requesting new work.
private final AtomicBoolean finished = new AtomicBoolean(false);
// Aggregate exit code for the minion. Non-zero if any set of build targets failed.
private AtomicReference<ExitCode> exitCode = new AtomicReference<>(ExitCode.SUCCESS);
@Nullable private volatile BuildExecutor buildExecutor = null;
/** Callback when the build has completed. */
public interface BuildCompletionChecker {
boolean hasBuildFinished() throws IOException;
}
public MinionModeRunner(
String coordinatorAddress,
OptionalInt coordinatorPort,
ListenableFuture<BuildExecutor> buildExecutorFuture,
StampedeId stampedeId,
MinionType minionType,
BuildSlaveRunId buildSlaveRunId,
CapacityTracker capacityTracker,
BuildCompletionChecker buildCompletionChecker,
long minionPollLoopIntervalMillis,
MinionBuildProgressTracker minionBuildProgressTracker,
int coordinatorConnectionTimeoutMillis,
BuckEventBus eventBus) {
this(
coordinatorAddress,
coordinatorPort,
coordinatorConnectionTimeoutMillis,
buildExecutorFuture,
stampedeId,
minionType,
buildSlaveRunId,
capacityTracker,
buildCompletionChecker,
minionPollLoopIntervalMillis,
minionBuildProgressTracker,
MostExecutors.newMultiThreadExecutor(
new CommandThreadFactory("MinionBuilderThread"),
capacityTracker.getMaxAvailableCapacity()),
eventBus);
}
@VisibleForTesting
public MinionModeRunner(
String coordinatorAddress,
OptionalInt coordinatorPort,
int coordinatorConnectionTimeoutMillis,
ListenableFuture<BuildExecutor> buildExecutorFuture,
StampedeId stampedeId,
MinionType minionType,
BuildSlaveRunId buildSlaveRunId,
CapacityTracker capacityTracker,
BuildCompletionChecker buildCompletionChecker,
long minionPollLoopIntervalMillis,
MinionBuildProgressTracker minionBuildProgressTracker,
ExecutorService buildExecutorService,
BuckEventBus eventBus) {
this.coordinatorConnectionTimeoutMillis = coordinatorConnectionTimeoutMillis;
this.minionPollLoopIntervalMillis = minionPollLoopIntervalMillis;
this.buildExecutorFuture = buildExecutorFuture;
this.stampedeId = stampedeId;
this.minionType = minionType;
this.buildSlaveRunId = buildSlaveRunId;
this.coordinatorAddress = coordinatorAddress;
coordinatorPort.ifPresent(CoordinatorModeRunner::validatePort);
this.coordinatorPort = coordinatorPort;
this.buildCompletionChecker = buildCompletionChecker;
this.buildExecutorService = buildExecutorService;
this.eventBus = eventBus;
this.buildTracker =
new MinionLocalBuildStateTracker(minionBuildProgressTracker, capacityTracker);
LOG.info(
String.format(
"Started new minion that can build [%d] work units in parallel",
capacityTracker.getMaxAvailableCapacity()));
}
@Override
public ListenableFuture<?> getAsyncPrepFuture() {
return buildExecutorFuture;
}
@Override
public ExitCode runAndReturnExitCode(HeartbeatService heartbeatService)
throws IOException, InterruptedException {
Preconditions.checkState(coordinatorPort.isPresent(), "Coordinator port has not been set.");
try {
buildExecutor = buildExecutorFuture.get();
} catch (ExecutionException e) {
String msg = String.format("Failed to get the BuildExecutor.");
LOG.error(e, msg);
throw new RuntimeException(msg, e);
}
String minionId = generateMinionId(buildSlaveRunId);
try (ThriftCoordinatorClient client = newStartedThriftCoordinatorClient();
Closeable healthCheck =
heartbeatService.addCallback(
"MinionIsAlive", createHeartbeatCallback(client, minionId, buildSlaveRunId))) {
while (!finished.get()) {
signalFinishedTargetsAndFetchMoreWork(minionId, client);
Thread.sleep(minionPollLoopIntervalMillis);
}
LOG.info(String.format("Minion [%s] has exited signal/fetch work loop.", minionId));
}
// At this point there is no more work to schedule, so wait for the build to finish.
buildExecutorService.shutdown();
buildExecutorService.awaitTermination(30, TimeUnit.MINUTES);
Preconditions.checkNotNull(buildExecutor).shutdown();
return exitCode.get();
}
private ThriftCoordinatorClient newStartedThriftCoordinatorClient() throws IOException {
ThriftCoordinatorClient client =
new ThriftCoordinatorClient(
coordinatorAddress, stampedeId, coordinatorConnectionTimeoutMillis);
try {
client.start(coordinatorPort.getAsInt());
} catch (ThriftException exception) {
handleThriftException(exception);
}
return client;
}
private HeartbeatCallback createHeartbeatCallback(
ThriftCoordinatorClient client, String minionId, BuildSlaveRunId runId) {
return new HeartbeatCallback() {
@Override
public void runHeartbeat() throws IOException {
LOG.debug(String.format("About to send keep alive heartbeat for Minion [%s]", minionId));
client.reportMinionAlive(minionId, runId);
}
};
}
public void setCoordinatorPort(int coordinatorPort) {
CoordinatorModeRunner.validatePort(coordinatorPort);
this.coordinatorPort = OptionalInt.of(coordinatorPort);
}
private void signalFinishedTargetsAndFetchMoreWork(
String minionId, ThriftCoordinatorClient client) throws IOException {
List<String> targetsToSignal = buildTracker.getTargetsToSignal();
// Try to reserve available capacity
int reservedCapacity = buildTracker.reserveAllAvailableCapacity();
if (reservedCapacity == 0
&& exitCode.get() == ExitCode.SUCCESS
&& targetsToSignal.size() == 0) {
return; // Making a request will not move the build forward, so wait a while and try again.
}
LOG.info(
String.format(
"Minion [%s] fetching work. Signalling [%d] finished targets",
minionId, targetsToSignal.size()));
try {
GetWorkResponse response =
client.getWork(
minionId, minionType, exitCode.get().getCode(), targetsToSignal, reservedCapacity);
if (!response.isContinueBuilding()) {
LOG.info(String.format("Minion [%s] told to stop building.", minionId));
finished.set(true);
}
buildTracker.enqueueWorkUnitsForBuildingAndCommitCapacity(response.getWorkUnits());
} catch (ThriftException ex) {
handleThriftException(ex);
return;
}
if (!buildTracker.outstandingWorkUnitsToBuild()) {
return; // Nothing new to build
}
buildExecutorService.execute(
() -> {
try {
performBuildOfWorkUnits(minionId);
} catch (Exception e) {
LOG.error(e, "Failed whilst building targets. Terminating build. ");
exitCode.set(ExitCode.FATAL_GENERIC);
finished.set(true);
}
});
}
private void performBuildOfWorkUnits(String minionId) throws IOException {
List<String> targetsToBuild = buildTracker.getTargetsToBuild();
if (targetsToBuild.size() == 0) {
return; // All outstanding targets have already been picked up by an earlier build thread.
}
LOG.info(
String.format(
"Minion [%s] is about to build [%d] targets", minionId, targetsToBuild.size()));
LOG.debug(String.format("Targets: [%s]", Joiner.on(", ").join(targetsToBuild)));
// Start the build, and get futures representing the results.
List<BuildEngineResult> resultFutures =
Preconditions.checkNotNull(buildExecutor).initializeBuild(targetsToBuild);
// Register handlers that will ensure we free up cores as soon as a work unit is complete,
// and signal built targets as soon as they are uploaded to the cache.
for (BuildEngineResult resultFuture : resultFutures) {
registerBuildRuleCompletionHandler(resultFuture);
}
// Wait for the targets to finish building and get the exit code.
ExitCode lastExitCode =
Preconditions.checkNotNull(buildExecutor)
.waitForBuildToFinish(targetsToBuild, resultFutures, Optional.empty());
LOG.info(
String.format(
"Minion [%s] finished with exit code [%d].", minionId, lastExitCode.getCode()));
if (lastExitCode != ExitCode.SUCCESS) {
exitCode.set(lastExitCode);
}
}
private void registerBuildRuleCompletionHandler(BuildEngineResult resultFuture) {
Futures.addCallback(
resultFuture.getResult(),
new FutureCallback<BuildResult>() {
@Override
public void onSuccess(@Nullable BuildResult result) {
Preconditions.checkNotNull(result);
String fullyQualifiedName = result.getRule().getFullyQualifiedName();
if (!result.isSuccess()) {
LOG.error(String.format("Building of target [%s] failed.", fullyQualifiedName));
// Ensure the build doesn't deadlock
exitCode.set(ExitCode.BUILD_ERROR);
return;
} else {
LOG.info(String.format("Building of target [%s] completed.", fullyQualifiedName));
}
buildTracker.recordFinishedTarget(result);
registerUploadCompletionHandler(Preconditions.checkNotNull(result));
}
@Override
public void onFailure(Throwable t) {
LOG.error(t, String.format("Building of unknown target failed."));
// Fail the Stampede build, and ensure it doesn't deadlock.
exitCode.set(ExitCode.BUILD_ERROR);
}
},
MoreExecutors.directExecutor());
}
private void registerUploadCompletionHandler(BuildResult buildResult) {
String fullyQualifiedName = buildResult.getRule().getFullyQualifiedName();
Futures.addCallback(
buildResult.getUploadCompleteFuture().orElse(Futures.immediateFuture(null)),
new FutureCallback<Void>() {
@Override
public void onSuccess(@Nullable Void result) {
buildTracker.recordUploadedTarget(fullyQualifiedName);
}
@Override
public void onFailure(Throwable t) {
// TODO(alisdair,ruibm,msienkiewicz): We used to have async upload confirmations from
// cache which made this codepath (almost) never get triggered - we would crash the
// build if it happened. We need to now look at error rate and decide on a retry/crash
// policy. Until then, log and progress as if upload was successful.
registerFailedUploadHandler(t, buildResult.getRule(), fullyQualifiedName);
buildTracker.recordUploadedTarget(fullyQualifiedName);
}
},
MoreExecutors.directExecutor());
}
private void registerFailedUploadHandler(
Throwable uploadThrowable, BuildRule buildRule, String fullyQualifiedName) {
Futures.addCallback(
Preconditions.checkNotNull(buildExecutor)
.getCachingBuildEngine()
.getRuleKeyCalculator()
.calculate(eventBus, buildRule),
new FutureCallback<RuleKey>() {
@Override
public void onSuccess(RuleKey ruleKey) {
LOG.error(
uploadThrowable,
String.format(
"Cache upload failed for target [%s] with rulekey [%s].",
fullyQualifiedName, ruleKey));
}
@Override
public void onFailure(Throwable t) {
LOG.error(
t,
String.format(
"Cache upload failed for target [%s] with unknown rulekey (calculation failed).",
fullyQualifiedName));
}
},
// Rulekey should have already been computed so direct executor is fine.
MoreExecutors.directExecutor());
}
private void handleThriftException(ThriftException e) throws IOException {
if (buildCompletionChecker.hasBuildFinished()) {
// If the build has finished and this minion was not doing anything and was just
// waiting for work, just exit gracefully with return code 0.
LOG.warn(
e,
("Minion failed to connect to coordinator, "
+ "but build already finished, so shutting down."));
finished.set(true);
return;
} else {
throw e;
}
}
private static String generateMinionId(BuildSlaveRunId buildSlaveRunId) {
Preconditions.checkState(!buildSlaveRunId.getId().isEmpty());
String hostname = "Unknown";
try {
InetAddress addr;
addr = InetAddress.getLocalHost();
hostname = addr.getHostName();
} catch (UnknownHostException ex) {
System.out.println("Hostname can not be resolved");
}
return String.format("minion:%s:%s", hostname, buildSlaveRunId);
}
}
| |
/**
MIT License
Copyright (c) 2018 juhaku
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
package db.juhaku.juhakudb.core.android.transaction;
import android.content.ContentValues;
import android.util.Log;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import db.juhaku.juhakudb.core.schema.Reference;
import db.juhaku.juhakudb.core.schema.Schema;
import db.juhaku.juhakudb.filter.Filter;
import db.juhaku.juhakudb.filter.PredicateBuilder;
import db.juhaku.juhakudb.filter.Query;
import db.juhaku.juhakudb.filter.Root;
import db.juhaku.juhakudb.util.ReflectionUtils;
import db.juhaku.juhakudb.util.StringUtils;
/**
* Created by juha on 20/05/16.
*
* <p>Store operation transaction template is used when one or multiple items are being stored
* to database. All operations will cascade if items contains other entities.</p>
*
* @author juha
*
* @since 1.0.2
*/
public class StoreTransactionTemplate<T> extends TransactionTemplate {
private Collection<T> items;
public void setItems(Collection<T> items) {
this.items = new ArrayList<>(items); // transform collection of items to a list.
}
@Override
void onTransaction() {
store(items, null);
setResult(items);
commit();
}
/**
* Store given items to database. All store operations will be cascaded to referenced tables if
* given items has child entities.
*
* @param items {@link Collection} of items to store.
* @param parent Object parent entity that is being used to make foreign key relation to parent table.
*
* @since 1.2.0
*
* @hide
*/
private void store(Collection<T> items, Object parent) {
for (T item : items) {
cascadeStoreBefore(item);
ContentValues values = getConverter().entityToContentValues(item);
// If parent is specified add parent id to content values as it references to child.
if (parent != null) {
values.put(resolveReverseJoinColumnName(item.getClass(), parent.getClass()),
ReflectionUtils.getIdFieldValue(parent).toString());
}
Long id = insertOrReplace(resolveTableName(item.getClass()), values);
// If storing was successful populate object with the database row id.
if (id > -1) {
ReflectionUtils.setFieldValue(ReflectionUtils.findIdField(item.getClass()), item, id);
cascadeStoreAfter(item);
} else {
// Some general logging if storing fails.
Log.v(getClass().getName(), "Failed to store item: " + item + " to database!");
}
}
}
/**
* Store cascade before the actual item is being stored. This stores items that the item itself
* should refer to when being stored.
*
* @param item T item that is being cascade stored.
*
* @since 1.2.0
*
* @hide
*/
private void cascadeStoreBefore(T item) {
for (Field field : item.getClass().getDeclaredFields()) {
Object value = ReflectionUtils.getFieldValue(item, field);
/*
* If field has foreign key relation it should be stored before the actual item is being
* stored.
*/
if (field.isAnnotationPresent(ManyToOne.class) ||
(field.isAnnotationPresent(OneToOne.class) && StringUtils.isBlank(field.getAnnotation(OneToOne.class).mappedBy()))) {
// Check that there is actually something to store.
if (value != null) {
store((Collection<T>) toCollection(value), null);
}
}
}
}
/**
* Store cascade after the item was stored to database. This stores items that are referenced
* by the item itself.
*
* @param item T item that is being cascade stored.
*
* @since 1.2.0
*
* @hide
*/
private void cascadeStoreAfter(T item) {
for (Field field : item.getClass().getDeclaredFields()) {
Object value = ReflectionUtils.getFieldValue(item, field);
/*
* If field has primary key relation referenced item will be stored after the actual item
* is being stored.
*/
if (field.isAnnotationPresent(ManyToMany.class) || field.isAnnotationPresent(OneToMany.class)
|| (field.isAnnotationPresent(OneToOne.class) && !StringUtils.isBlank(field.getAnnotation(OneToOne.class).mappedBy()))) {
// Check that there is actually something to store.
if (value != null) {
if (field.isAnnotationPresent(ManyToMany.class)) {
store((Collection<T>) toCollection(value), null);
// Update middle table reference for many to many relations.
storeMiddleTable(item, (Collection<T>) value);
} else {
store((Collection<T>) toCollection(value), item);
}
}
}
}
}
/**
* Store middle table joins for given item. This is special processing that is being
* executed after both join parties are stored to database. First existing references is being
* deleted and then newly coming references is being stored to database for the item.
*
* <p>References is being created for given item from given collection of items.</p>
*
* @param item T item from table item.
* @param items {@link Collection} of to table items.
*
* @since 1.2.0
*
* @hide
*/
private void storeMiddleTable(final T item, Collection<T> items) {
final Schema middleTable = findMiddleTable(item.getClass(), items.iterator().next().getClass());
/*
* Delete existing references by id.
*/
Query where = getProcessor().createWhere(null, new Filter() {
@Override
public void filter(Root root, PredicateBuilder builder) {
String middleTableJoinColumn = null;
for (Reference reference : middleTable.getReferences()) {
if (reference.getReferenceTableName().equals(resolveTableName(item.getClass()))) {
middleTableJoinColumn = reference.getColumnName();
}
}
builder.eq(middleTableJoinColumn, ReflectionUtils.getIdFieldValue(item));
}
});
getDb().delete(middleTable.getName(), where.getSql(), where.getArgs());
String fromTable = resolveTableName(item.getClass());
/*
* Store middle table references.
*/
for (T joinItem : items) {
// Create content value for each join item as each item represents one row in database.
ContentValues values = new ContentValues();
for (Reference reference : middleTable.getReferences()) {
Object value;
/*
* Determine which id is being used to to which reference. If reference table equals
* from table get id of the from item otherwise use to id.
*/
if (reference.getReferenceTableName().equals(fromTable)) {
value = ReflectionUtils.getIdFieldValue(item).toString();
} else {
value = ReflectionUtils.getIdFieldValue(joinItem);
}
values.put(reference.getColumnName(), value.toString());
}
insertOrReplace(middleTable.getName(), values);
}
}
/**
* Find middle table by model class and reference model class.
* @param model Entity class that join is made from.
* @param joinModel Entity class that join is made to.
*
* @return Schema found middle table or null if not found.
*
* @since 1.2.0
*
* @hide
*/
private Schema findMiddleTable(Class<?> model, Class<?> joinModel) {
String tableName = resolveTableName(model);
String joinTable = resolveTableName(joinModel);
Schema middleTable;
if ((middleTable = getSchema().getElement(tableName.concat("_").concat(joinTable))) == null) {
middleTable = getSchema().getElement(joinTable.concat("_").concat(tableName));
}
return middleTable;
}
/**
* Inserts or replaces given content values in given table. If SQL was executed successfully the
* id of database row will be returned. If execution fails -1 will be returned.
*
* @param tableName String name of the table to store content values to.
* @param values {@link ContentValues} that is being stored to given table.
* @return Long id of stored row in database table or -1 if storing will fail.
*
* @since 1.2.0
*
* @hide
*/
private Long insertOrReplace(String tableName, ContentValues values) {
return getDb().replace(tableName, null, values);
}
/**
* Maps given value to collection if value itself is not assignable from collection.
* @param value Object value to map.
* @return Collection containing given value or if value is collection then itself will be returned.
*
* @since 1.2.0
*
* @hide
*/
private static <T> Collection<T> toCollection(T value) {
if (Collection.class.isAssignableFrom(value.getClass())) {
return (Collection<T>) value;
} else {
return Arrays.asList(value);
}
}
/**
* Resolves reverse join column name from given model class's table. Reverse join column name
* is returned if reverse join table name is same as provided reverse join model.
*
* <p>Column is resolved by looking it for from {@link Schema} in order to maintain integrity.</p>
*
* @param model Instance of {@link Class} of model class of table where the join is made from.
* @param reverseModel Instance of {@link Class} of reverse join model of table where the join is made to.
* @return String reverse join column name from join table if found.
*
* @since 1.2.0
*
* @hide
*/
private String resolveReverseJoinColumnName(Class<?> model, Class<?> reverseModel) {
Schema table = getSchema().getElement(resolveTableName(model));
if (table != null) {
String reverseJoinTableName = resolveTableName(reverseModel);
for (Reference reference : table.getReferences()) {
if (reference.getReferenceTableName().equals(reverseJoinTableName)) {
return reference.getColumnName();
}
}
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.registry.client;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.extension.ExtensionLoader;
import org.apache.dubbo.common.extension.SPI;
import org.apache.dubbo.common.logger.Logger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.utils.CollectionUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.metadata.MappingChangedEvent;
import org.apache.dubbo.metadata.MappingListener;
import org.apache.dubbo.metadata.ServiceNameMapping;
import org.apache.dubbo.metadata.WritableMetadataService;
import org.apache.dubbo.registry.NotifyListener;
import org.apache.dubbo.registry.Registry;
import org.apache.dubbo.registry.client.event.ServiceInstancesChangedEvent;
import org.apache.dubbo.registry.client.event.listener.ServiceInstancesChangedListener;
import org.apache.dubbo.registry.client.metadata.SubscribedURLsSynthesizer;
import org.apache.dubbo.registry.support.AbstractRegistryFactory;
import org.apache.dubbo.registry.support.FailbackRegistry;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.TreeSet;
import java.util.stream.Collectors;
import static java.lang.String.format;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableSet;
import static java.util.stream.Collectors.toSet;
import static java.util.stream.Stream.of;
import static org.apache.dubbo.common.constants.CommonConstants.DUBBO;
import static org.apache.dubbo.common.constants.CommonConstants.GROUP_CHAR_SEPARATOR;
import static org.apache.dubbo.common.constants.CommonConstants.GROUP_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.INTERFACE_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.MAPPING_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.PROTOCOL_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.PROVIDER_SIDE;
import static org.apache.dubbo.common.constants.CommonConstants.SIDE_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.VERSION_KEY;
import static org.apache.dubbo.common.constants.RegistryConstants.PROVIDED_BY;
import static org.apache.dubbo.common.constants.RegistryConstants.REGISTRY_CLUSTER_KEY;
import static org.apache.dubbo.common.constants.RegistryConstants.REGISTRY_TYPE_KEY;
import static org.apache.dubbo.common.constants.RegistryConstants.SERVICE_REGISTRY_TYPE;
import static org.apache.dubbo.common.constants.RegistryConstants.SUBSCRIBED_SERVICE_NAMES_KEY;
import static org.apache.dubbo.common.function.ThrowableAction.execute;
import static org.apache.dubbo.common.utils.CollectionUtils.isEmpty;
import static org.apache.dubbo.common.utils.StringUtils.isBlank;
import static org.apache.dubbo.registry.client.ServiceDiscoveryFactory.getExtension;
import static org.apache.dubbo.rpc.Constants.ID_KEY;
/**
* Being different to the traditional registry, {@link ServiceDiscoveryRegistry} that is a new service-oriented
* {@link Registry} based on {@link ServiceDiscovery}, it will not interact in the external registry directly,
* but store the {@link URL urls} that Dubbo services exported and referenced into {@link WritableMetadataService}
* when {@link #register(URL)} and {@link #subscribe(URL, NotifyListener)} methods are executed. After that the exported
* {@link URL urls} can be get from {@link WritableMetadataService#getExportedURLs()} and its variant methods. In contrast,
* {@link WritableMetadataService#getSubscribedURLs()} method offers the subscribed {@link URL URLs}.
* <p>
* Every {@link ServiceDiscoveryRegistry} object has its own {@link ServiceDiscovery} instance that was initialized
* under {@link #ServiceDiscoveryRegistry(URL) the construction}. As the primary argument of constructor , the
* {@link URL} of connection the registry decides what the kind of ServiceDiscovery is. Generally, each
* protocol associates with a kind of {@link ServiceDiscovery}'s implementation if present, or the
* {@link FileSystemServiceDiscovery} will be the default one. Obviously, it's also allowed to extend
* {@link ServiceDiscovery} using {@link SPI the Dubbo SPI}.
* In contrast, current {@link ServiceInstance service instance} will not be registered to the registry whether any
* Dubbo service is exported or not.
* <p>
*
* @see ServiceDiscovery
* @see FailbackRegistry
* @see WritableMetadataService
* @since 2.7.5
*/
public class ServiceDiscoveryRegistry implements Registry {
protected final Logger logger = LoggerFactory.getLogger(getClass());
private final ServiceDiscovery serviceDiscovery;
private final Set<String> subscribedServices;
private final ServiceNameMapping serviceNameMapping;
private final WritableMetadataService writableMetadataService;
private final Set<String> registeredListeners = new LinkedHashSet<>();
/* apps - listener */
private final Map<String, ServiceInstancesChangedListener> serviceListeners = new HashMap<>();
private final Map<String, String> serviceToAppsMapping = new HashMap<>();
private URL registryURL;
/**
* A cache for all URLs of services that the subscribed services exported
* The key is the service name
* The value is a nested {@link Map} whose key is the revision and value is all URLs of services
*/
private final Map<String, Map<String, List<URL>>> serviceRevisionExportedURLsCache = new LinkedHashMap<>();
public ServiceDiscoveryRegistry(URL registryURL) {
this.registryURL = registryURL;
this.serviceDiscovery = createServiceDiscovery(registryURL);
this.subscribedServices = parseServices(registryURL.getParameter(SUBSCRIBED_SERVICE_NAMES_KEY));
this.serviceNameMapping = ServiceNameMapping.getExtension(registryURL.getParameter(MAPPING_KEY));
this.writableMetadataService = WritableMetadataService.getDefaultExtension();
}
public ServiceDiscovery getServiceDiscovery() {
return serviceDiscovery;
}
/**
* Create the {@link ServiceDiscovery} from the registry {@link URL}
*
* @param registryURL the {@link URL} to connect the registry
* @return non-null
*/
protected ServiceDiscovery createServiceDiscovery(URL registryURL) {
ServiceDiscovery originalServiceDiscovery = getServiceDiscovery(registryURL);
ServiceDiscovery serviceDiscovery = enhanceEventPublishing(originalServiceDiscovery);
execute(() -> {
serviceDiscovery.initialize(registryURL.addParameter(INTERFACE_KEY, ServiceDiscovery.class.getName())
.removeParameter(REGISTRY_TYPE_KEY));
});
return serviceDiscovery;
}
private List<SubscribedURLsSynthesizer> initSubscribedURLsSynthesizers() {
ExtensionLoader<SubscribedURLsSynthesizer> loader = ExtensionLoader.getExtensionLoader(SubscribedURLsSynthesizer.class);
return Collections.unmodifiableList(new ArrayList<>(loader.getSupportedExtensionInstances()));
}
/**
* Get the instance {@link ServiceDiscovery} from the registry {@link URL} using
* {@link ServiceDiscoveryFactory} SPI
*
* @param registryURL the {@link URL} to connect the registry
* @return
*/
private ServiceDiscovery getServiceDiscovery(URL registryURL) {
ServiceDiscoveryFactory factory = getExtension(registryURL);
return factory.getServiceDiscovery(registryURL);
}
/**
* Enhance the original {@link ServiceDiscovery} with event publishing feature
*
* @param original the original {@link ServiceDiscovery}
* @return {@link EventPublishingServiceDiscovery} instance
*/
private ServiceDiscovery enhanceEventPublishing(ServiceDiscovery original) {
return new EventPublishingServiceDiscovery(original);
}
protected boolean shouldRegister(URL providerURL) {
String side = providerURL.getParameter(SIDE_KEY);
boolean should = PROVIDER_SIDE.equals(side); // Only register the Provider.
if (!should) {
if (logger.isDebugEnabled()) {
logger.debug(String.format("The URL[%s] should not be registered.", providerURL.toString()));
}
}
return should;
}
protected boolean shouldSubscribe(URL subscribedURL) {
return !shouldRegister(subscribedURL);
}
@Override
public final void register(URL url) {
if (!shouldRegister(url)) { // Should Not Register
return;
}
doRegister(url);
}
public void doRegister(URL url) {
String registryCluster = serviceDiscovery.getUrl().getParameter(ID_KEY);
if (registryCluster != null && url.getParameter(REGISTRY_CLUSTER_KEY) == null) {
url = url.addParameter(REGISTRY_CLUSTER_KEY, registryCluster);
}
if (writableMetadataService.exportURL(url)) {
if (logger.isInfoEnabled()) {
logger.info(format("The URL[%s] registered successfully.", url.toString()));
}
} else {
if (logger.isWarnEnabled()) {
logger.info(format("The URL[%s] has been registered.", url.toString()));
}
}
}
@Override
public final void unregister(URL url) {
if (!shouldRegister(url)) {
return;
}
doUnregister(url);
}
public void doUnregister(URL url) {
String registryCluster = serviceDiscovery.getUrl().getParameter(ID_KEY);
if (registryCluster != null && url.getParameter(REGISTRY_CLUSTER_KEY) == null) {
url = url.addParameter(REGISTRY_CLUSTER_KEY, registryCluster);
}
if (writableMetadataService.unexportURL(url)) {
if (logger.isInfoEnabled()) {
logger.info(format("The URL[%s] deregistered successfully.", url.toString()));
}
} else {
if (logger.isWarnEnabled()) {
logger.info(format("The URL[%s] has been deregistered.", url.toString()));
}
}
}
@Override
public final void subscribe(URL url, NotifyListener listener) {
if (!shouldSubscribe(url)) { // Should Not Subscribe
return;
}
String registryCluster = serviceDiscovery.getUrl().getParameter(ID_KEY);
if (registryCluster != null && url.getParameter(REGISTRY_CLUSTER_KEY) == null) {
url = url.addParameter(REGISTRY_CLUSTER_KEY, registryCluster);
}
doSubscribe(url, listener);
}
public void doSubscribe(URL url, NotifyListener listener) {
writableMetadataService.subscribeURL(url);
Set<String> serviceNames = getServices(url, listener);
if (CollectionUtils.isEmpty(serviceNames)) {
logger.warn("Should has at least one way to know which services this interface belongs to, subscription url: " + url);
return;
}
subscribeURLs(url, listener, serviceNames);
}
@Override
public final void unsubscribe(URL url, NotifyListener listener) {
if (!shouldSubscribe(url)) { // Should Not Subscribe
return;
}
String registryCluster = serviceDiscovery.getUrl().getParameter(ID_KEY);
if (registryCluster != null && url.getParameter(REGISTRY_CLUSTER_KEY) == null) {
url = url.addParameter(REGISTRY_CLUSTER_KEY, registryCluster);
}
doUnsubscribe(url, listener);
}
public void doUnsubscribe(URL url, NotifyListener listener) {
writableMetadataService.unsubscribeURL(url);
String protocolServiceKey = url.getServiceKey() + GROUP_CHAR_SEPARATOR + url.getParameter(PROTOCOL_KEY, DUBBO);
String serviceNamesKey = serviceToAppsMapping.remove(protocolServiceKey);
if (serviceNamesKey == null) {
return;
}
ServiceInstancesChangedListener instancesChangedListener = serviceListeners.get(serviceNamesKey);
instancesChangedListener.removeListener(protocolServiceKey);
}
@Override
public List<URL> lookup(URL url) {
throw new UnsupportedOperationException("");
}
@Override
public URL getUrl() {
return registryURL;
}
@Override
public boolean isAvailable() {
return !serviceDiscovery.getServices().isEmpty();
}
@Override
public void destroy() {
AbstractRegistryFactory.removeDestroyedRegistry(this);
execute(() -> {
// stop ServiceDiscovery
serviceDiscovery.destroy();
});
}
protected void subscribeURLs(URL url, NotifyListener listener, Set<String> serviceNames) {
String serviceNamesKey = serviceNames.toString();
String protocolServiceKey = url.getServiceKey() + GROUP_CHAR_SEPARATOR + url.getParameter(PROTOCOL_KEY, DUBBO);
serviceToAppsMapping.put(protocolServiceKey, serviceNamesKey);
// register ServiceInstancesChangedListener
ServiceInstancesChangedListener serviceListener = serviceListeners.computeIfAbsent(serviceNamesKey,
k -> new ServiceInstancesChangedListener(serviceNames, serviceDiscovery));
serviceListener.setUrl(url);
listener.addServiceListener(serviceListener);
serviceListener.addListener(protocolServiceKey, listener);
registerServiceInstancesChangedListener(url, serviceListener);
// FIXME: This will cause redundant duplicate notifications
serviceNames.forEach(serviceName -> {
List<ServiceInstance> serviceInstances = serviceDiscovery.getInstances(serviceName);
if (CollectionUtils.isNotEmpty(serviceInstances)) {
serviceListener.onEvent(new ServiceInstancesChangedEvent(serviceName, serviceInstances));
} else {
logger.info("getInstances by serviceName=" + serviceName + " is empty, waiting for serviceListener callback. url=" + url);
}
});
listener.notify(serviceListener.getUrls(protocolServiceKey));
}
/**
* Register the {@link ServiceInstancesChangedListener} If absent
*
* @param url {@link URL}
* @param listener the {@link ServiceInstancesChangedListener}
*/
private void registerServiceInstancesChangedListener(URL url, ServiceInstancesChangedListener listener) {
String listenerId = createListenerId(url, listener);
if (registeredListeners.add(listenerId)) {
serviceDiscovery.addServiceInstancesChangedListener(listener);
}
}
private String createListenerId(URL url, ServiceInstancesChangedListener listener) {
return listener.getServiceNames() + ":" + url.toString(VERSION_KEY, GROUP_KEY, PROTOCOL_KEY);
}
/**
* 1.developer explicitly specifies the application name this interface belongs to
* 2.check Interface-App mapping
* 3.use the services specified in registry url.
*
* @param subscribedURL
* @return
*/
protected Set<String> getServices(URL subscribedURL, final NotifyListener listener) {
Set<String> subscribedServices = new TreeSet<>();
String serviceNames = subscribedURL.getParameter(PROVIDED_BY);
if (StringUtils.isNotEmpty(serviceNames)) {
logger.info(subscribedURL.getServiceInterface() + " mapping to " + serviceNames + " instructed by provided-by set by user.");
subscribedServices.addAll(parseServices(serviceNames));
}
if (isEmpty(subscribedServices)) {
Set<String> mappedServices = findMappedServices(subscribedURL, new DefaultMappingListener(subscribedURL, subscribedServices, listener));
logger.info(subscribedURL.getServiceInterface() + " mapping to " + serviceNames + " instructed by remote metadata center.");
subscribedServices.addAll(mappedServices);
if (isEmpty(subscribedServices)) {
logger.info(subscribedURL.getServiceInterface() + " mapping to " + serviceNames + " by default.");
subscribedServices.addAll(getSubscribedServices());
}
}
return subscribedServices;
}
public static Set<String> parseServices(String literalServices) {
return isBlank(literalServices) ? emptySet() :
unmodifiableSet(of(literalServices.split(","))
.map(String::trim)
.filter(StringUtils::isNotEmpty)
.collect(toSet()));
}
/**
* Get the subscribed service names
*
* @return non-null
*/
public Set<String> getSubscribedServices() {
return subscribedServices;
}
/**
* Get the mapped services name by the specified {@link URL}
*
* @param subscribedURL
* @return
*/
protected Set<String> findMappedServices(URL subscribedURL, MappingListener listener) {
return serviceNameMapping.getAndListen(subscribedURL, listener);
}
/**
* Create an instance of {@link ServiceDiscoveryRegistry} if supported
*
* @param registryURL the {@link URL url} of registry
* @return <code>null</code> if not supported
*/
public static ServiceDiscoveryRegistry create(URL registryURL) {
return supports(registryURL) ? new ServiceDiscoveryRegistry(registryURL) : null;
}
/**
* Supports or not ?
*
* @param registryURL the {@link URL url} of registry
* @return if supported, return <code>true</code>, or <code>false</code>
*/
public static boolean supports(URL registryURL) {
return SERVICE_REGISTRY_TYPE.equalsIgnoreCase(registryURL.getParameter(REGISTRY_TYPE_KEY));
}
private static List<URL> filterSubscribedURLs(URL subscribedURL, List<URL> exportedURLs) {
return exportedURLs.stream()
.filter(url -> isSameServiceInterface(subscribedURL, url))
.filter(url -> isSameParameter(subscribedURL, url, VERSION_KEY))
.filter(url -> isSameParameter(subscribedURL, url, GROUP_KEY))
.filter(url -> isCompatibleProtocol(subscribedURL, url))
.collect(Collectors.toList());
}
private static boolean isSameServiceInterface(URL one, URL another) {
return Objects.equals(one.getServiceInterface(), another.getServiceInterface());
}
private static boolean isSameParameter(URL one, URL another, String key) {
return Objects.equals(one.getParameter(key), another.getParameter(key));
}
private static boolean isCompatibleProtocol(URL one, URL another) {
String protocol = one.getParameter(PROTOCOL_KEY);
return isCompatibleProtocol(protocol, another);
}
private static boolean isCompatibleProtocol(String protocol, URL targetURL) {
return protocol == null || Objects.equals(protocol, targetURL.getParameter(PROTOCOL_KEY))
|| Objects.equals(protocol, targetURL.getProtocol());
}
private class DefaultMappingListener implements MappingListener {
private URL url;
private Set<String> oldApps;
private NotifyListener listener;
public DefaultMappingListener(URL subscribedURL, Set<String> serviceNames, NotifyListener listener) {
this.url = subscribedURL;
this.oldApps = serviceNames;
this.listener = listener;
}
@Override
public void onEvent(MappingChangedEvent event) {
Set<String> newApps = event.getApps();
Set<String> tempOldApps = oldApps;
oldApps = newApps;
if (CollectionUtils.isEmpty(newApps)) {
return;
}
if (CollectionUtils.isEmpty(tempOldApps) && newApps.size() > 0) {
subscribeURLs(url, listener, newApps);
return;
}
for (String newAppName : newApps) {
if (!tempOldApps.contains(newAppName)) {
subscribeURLs(url, listener, newApps);
return;
}
}
}
}
}
| |
/**
* Copyright (c) 2016 Apereo Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/ecl2
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.elfinder.sakai.samigo;
import cn.bluejoe.elfinder.service.FsItem;
import org.sakaiproject.tool.assessment.services.assessment.PublishedAssessmentService;
import org.sakaiproject.tool.assessment.facade.PublishedAssessmentFacade;
import org.sakaiproject.tool.assessment.data.dao.assessment.PublishedAssessmentData;
import org.sakaiproject.tool.assessment.data.ifc.assessment.AssessmentMetaDataIfc;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.exception.PermissionException;
import org.sakaiproject.elfinder.sakai.ReadOnlyFsVolume;
import org.sakaiproject.elfinder.sakai.SiteVolumeFactory;
import org.sakaiproject.elfinder.sakai.SakaiFsService;
import org.sakaiproject.elfinder.sakai.SiteVolume;
import org.sakaiproject.elfinder.sakai.site.SiteFsItem;
import org.sakaiproject.elfinder.sakai.site.SiteFsVolume;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.component.api.ServerConfigurationService;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
/**
* Created by ddelblanco on 03/16.
*/
public class SamSiteVolumeFactory implements SiteVolumeFactory {
private static final Log LOG = LogFactory.getLog(SamSiteVolumeFactory.class);
private PublishedAssessmentService publishedAssessmentService;
private ServerConfigurationService serverConfigurationService;
public void setPublishedAssessmentService(PublishedAssessmentService publishedAssessmentService) {
this.publishedAssessmentService = publishedAssessmentService;
}
public void setServerConfigurationService(ServerConfigurationService serverConfigurationService) {
this.serverConfigurationService = serverConfigurationService;
}
@Override
public String getPrefix() {
return "samigo";
}
@Override
public SiteVolume getVolume(SakaiFsService sakaiFsService, String siteId) {
return new SamSiteVolume(sakaiFsService, siteId);
}
@Override
public String getToolId() {
return "sakai.samigo";
}
public class SamSiteVolume extends ReadOnlyFsVolume implements SiteVolume {
private SakaiFsService service;
private String siteId;
public SamSiteVolume(SakaiFsService service, String siteId) {
this.service = service;
this.siteId = siteId;
}
public String getSiteId() {
return this.siteId;
}
@Override
public SiteVolumeFactory getSiteVolumeFactory() {
return SamSiteVolumeFactory.this;
}
public boolean exists(FsItem newFile) {
return false;
}
public FsItem fromPath(String relativePath) {
LOG.debug("relativePath=" + relativePath);
if(relativePath != null && !relativePath.isEmpty()){
String[] parts = relativePath.split("/");
if(parts.length == 2 && (parts[0].equals(siteId))){
LOG.debug("parts[1]=" + parts[1]);
LOG.debug("parts[0]=" + parts[0]);
PublishedAssessmentFacade test = publishedAssessmentService.getPublishedAssessment(parts[1]);
return new SamFsItem(test.getPublishedAssessmentId().toString(), this);
}
}
return this.getRoot();
}
public String getPath(FsItem fsi) throws IOException {
if(this.getRoot().equals(fsi)) {
LOG.debug("getPath returns siteId" + ((SamFsItem)fsi).getId());
return "/samigo/" +siteId;
} else if(fsi instanceof SamFsItem) {
SamFsItem samFsItem1 = (SamFsItem)fsi;
PublishedAssessmentFacade assessment = samFsItem1.getAssessment();
String alias = assessment.getAssessmentMetaDataByLabel(AssessmentMetaDataIfc.ALIAS);
LOG.debug("getPath returns =" + "/samigo/" +siteId + "/" + alias);
return "/samigo/" +siteId + "/" + alias;
} else {
throw new IllegalArgumentException("Wrong type: " + fsi);
}
}
public String getDimensions(FsItem fsi) {
return null;
}
public long getLastModified(FsItem fsi) {
return 0L;
}
public String getMimeType(FsItem fsi) {
return this.isFolder(fsi)?"directory":"sakai/assessments";
}
public String getName() {
return null;
}
public String getName(FsItem fsi) {
if(this.getRoot().equals(fsi)) {
//TO DO: I18n
return "Test & Quizzes";
} else if(fsi instanceof SamFsItem) {
SamFsItem samFsItem1 = (SamFsItem)fsi;
PublishedAssessmentFacade assessment = (PublishedAssessmentFacade)samFsItem1.getAssessment();
return assessment.getTitle();
} else {
throw new IllegalArgumentException("Could not get title for: " + fsi.toString());
}
}
public FsItem getParent(FsItem fsi) {
if(this.getRoot().equals(fsi)) {
return service.getSiteVolume(siteId).getRoot();
} else {
return this.getRoot();
}
}
public FsItem getRoot() {
return new SamFsItem("", this);
}
public long getSize(FsItem fsi) throws IOException {
return 0L;
}
public String getThumbnailFileName(FsItem fsi) {
return null;
}
public boolean hasChildFolder(FsItem fsi) {
return false;
}
public boolean isFolder(FsItem fsi) {
if(fsi instanceof SamFsItem && ((SamFsItem)fsi).getId().equals("")){
return true;
}else{
return false;
}
}
public boolean isRoot(FsItem fsi) {
return false;
}
public FsItem[] listChildren(FsItem fsi) {
List<FsItem> items = new ArrayList<>();
if(this.getRoot().equals(fsi)) {
//GET SAMIGO LIST
List tests = publishedAssessmentService.getBasicInfoOfAllPublishedAssessments("","title",true,this.siteId);
Iterator testsIterator = tests.iterator();
while(testsIterator.hasNext()) {
PublishedAssessmentFacade pubAssessment = (PublishedAssessmentFacade)testsIterator.next();
//we need the FULL data in the assessment, not only the basic info
pubAssessment = publishedAssessmentService.getPublishedAssessment(pubAssessment.getPublishedAssessmentId().toString());
// TO DO: At this moment getBasicInfoOfAllPublishedAssessments returns
// the ones "takeable" for the user (or all for the instructors roles)
// Maybe we don't want the students to know the links to the
// assessments. Of course they can't access to assessments that
// they don't have permissions, so it is not a security problem
// but maybe in the future we want to filter this list by the role or other
// parameters
SamFsItem test = new SamFsItem(pubAssessment, pubAssessment.getPublishedAssessmentId().toString(), this);
LOG.debug("listing children " + test.getId() );
items.add(test);
}
}else if(fsi instanceof SamFsItem){
items.add(fsi);
}
return items.toArray(new FsItem[0]);
}
public InputStream openInputStream(FsItem fsi) throws IOException {
return null;
}
public String getURL(FsItem f) {
String serverUrlPrefix = serverConfigurationService.getServerUrl();
if(f instanceof SamFsItem) {
SamFsItem samFsItem1 = (SamFsItem)f;
if (!(samFsItem1.getId().equals(""))) {
PublishedAssessmentFacade pubAssessment = samFsItem1.getAssessment();
String alias = pubAssessment.getAssessmentMetaDataByLabel(AssessmentMetaDataIfc.ALIAS);
return serverUrlPrefix + "/samigo-app/servlet/Login?id=" + alias;
}else{
return null;
}
}else{
return null;
}
}
public boolean isWriteable(FsItem fsi) {
return false;
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.lakeformation.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
/**
* <p>
* Statistics related to the processing of a query statement.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/lakeformation-2017-03-31/ExecutionStatistics" target="_top">AWS
* API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ExecutionStatistics implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* The average time the request took to be executed.
* </p>
*/
private Long averageExecutionTimeMillis;
/**
* <p>
* The amount of data that was scanned in bytes.
* </p>
*/
private Long dataScannedBytes;
/**
* <p>
* The number of work units executed.
* </p>
*/
private Long workUnitsExecutedCount;
/**
* <p>
* The average time the request took to be executed.
* </p>
*
* @param averageExecutionTimeMillis
* The average time the request took to be executed.
*/
public void setAverageExecutionTimeMillis(Long averageExecutionTimeMillis) {
this.averageExecutionTimeMillis = averageExecutionTimeMillis;
}
/**
* <p>
* The average time the request took to be executed.
* </p>
*
* @return The average time the request took to be executed.
*/
public Long getAverageExecutionTimeMillis() {
return this.averageExecutionTimeMillis;
}
/**
* <p>
* The average time the request took to be executed.
* </p>
*
* @param averageExecutionTimeMillis
* The average time the request took to be executed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ExecutionStatistics withAverageExecutionTimeMillis(Long averageExecutionTimeMillis) {
setAverageExecutionTimeMillis(averageExecutionTimeMillis);
return this;
}
/**
* <p>
* The amount of data that was scanned in bytes.
* </p>
*
* @param dataScannedBytes
* The amount of data that was scanned in bytes.
*/
public void setDataScannedBytes(Long dataScannedBytes) {
this.dataScannedBytes = dataScannedBytes;
}
/**
* <p>
* The amount of data that was scanned in bytes.
* </p>
*
* @return The amount of data that was scanned in bytes.
*/
public Long getDataScannedBytes() {
return this.dataScannedBytes;
}
/**
* <p>
* The amount of data that was scanned in bytes.
* </p>
*
* @param dataScannedBytes
* The amount of data that was scanned in bytes.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ExecutionStatistics withDataScannedBytes(Long dataScannedBytes) {
setDataScannedBytes(dataScannedBytes);
return this;
}
/**
* <p>
* The number of work units executed.
* </p>
*
* @param workUnitsExecutedCount
* The number of work units executed.
*/
public void setWorkUnitsExecutedCount(Long workUnitsExecutedCount) {
this.workUnitsExecutedCount = workUnitsExecutedCount;
}
/**
* <p>
* The number of work units executed.
* </p>
*
* @return The number of work units executed.
*/
public Long getWorkUnitsExecutedCount() {
return this.workUnitsExecutedCount;
}
/**
* <p>
* The number of work units executed.
* </p>
*
* @param workUnitsExecutedCount
* The number of work units executed.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ExecutionStatistics withWorkUnitsExecutedCount(Long workUnitsExecutedCount) {
setWorkUnitsExecutedCount(workUnitsExecutedCount);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getAverageExecutionTimeMillis() != null)
sb.append("AverageExecutionTimeMillis: ").append(getAverageExecutionTimeMillis()).append(",");
if (getDataScannedBytes() != null)
sb.append("DataScannedBytes: ").append(getDataScannedBytes()).append(",");
if (getWorkUnitsExecutedCount() != null)
sb.append("WorkUnitsExecutedCount: ").append(getWorkUnitsExecutedCount());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ExecutionStatistics == false)
return false;
ExecutionStatistics other = (ExecutionStatistics) obj;
if (other.getAverageExecutionTimeMillis() == null ^ this.getAverageExecutionTimeMillis() == null)
return false;
if (other.getAverageExecutionTimeMillis() != null && other.getAverageExecutionTimeMillis().equals(this.getAverageExecutionTimeMillis()) == false)
return false;
if (other.getDataScannedBytes() == null ^ this.getDataScannedBytes() == null)
return false;
if (other.getDataScannedBytes() != null && other.getDataScannedBytes().equals(this.getDataScannedBytes()) == false)
return false;
if (other.getWorkUnitsExecutedCount() == null ^ this.getWorkUnitsExecutedCount() == null)
return false;
if (other.getWorkUnitsExecutedCount() != null && other.getWorkUnitsExecutedCount().equals(this.getWorkUnitsExecutedCount()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getAverageExecutionTimeMillis() == null) ? 0 : getAverageExecutionTimeMillis().hashCode());
hashCode = prime * hashCode + ((getDataScannedBytes() == null) ? 0 : getDataScannedBytes().hashCode());
hashCode = prime * hashCode + ((getWorkUnitsExecutedCount() == null) ? 0 : getWorkUnitsExecutedCount().hashCode());
return hashCode;
}
@Override
public ExecutionStatistics clone() {
try {
return (ExecutionStatistics) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.lakeformation.model.transform.ExecutionStatisticsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.sql.calcite.http;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.calcite.tools.ValidationException;
import org.apache.druid.common.config.NullHandling;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.io.Closer;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.QueryInterruptedException;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.ResourceLimitExceededException;
import org.apache.druid.server.security.AllowAllAuthenticator;
import org.apache.druid.server.security.AuthConfig;
import org.apache.druid.server.security.AuthTestUtils;
import org.apache.druid.sql.calcite.planner.DruidOperatorTable;
import org.apache.druid.sql.calcite.planner.PlannerConfig;
import org.apache.druid.sql.calcite.planner.PlannerContext;
import org.apache.druid.sql.calcite.planner.PlannerFactory;
import org.apache.druid.sql.calcite.schema.DruidSchema;
import org.apache.druid.sql.calcite.schema.SystemSchema;
import org.apache.druid.sql.calcite.util.CalciteTestBase;
import org.apache.druid.sql.calcite.util.CalciteTests;
import org.apache.druid.sql.calcite.util.QueryLogHook;
import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
import org.apache.druid.sql.http.ResultFormat;
import org.apache.druid.sql.http.SqlQuery;
import org.apache.druid.sql.http.SqlResource;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import javax.servlet.http.HttpServletRequest;
import javax.ws.rs.core.Response;
import javax.ws.rs.core.StreamingOutput;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
import java.util.stream.Collectors;
public class SqlResourceTest extends CalciteTestBase
{
private static final ObjectMapper JSON_MAPPER = new DefaultObjectMapper();
private static QueryRunnerFactoryConglomerate conglomerate;
private static Closer resourceCloser;
@BeforeClass
public static void setUpClass()
{
final Pair<QueryRunnerFactoryConglomerate, Closer> conglomerateCloserPair = CalciteTests
.createQueryRunnerFactoryConglomerate();
conglomerate = conglomerateCloserPair.lhs;
resourceCloser = conglomerateCloserPair.rhs;
}
@AfterClass
public static void tearDownClass() throws IOException
{
resourceCloser.close();
}
@Rule
public TemporaryFolder temporaryFolder = new TemporaryFolder();
@Rule
public QueryLogHook queryLogHook = QueryLogHook.create();
private SpecificSegmentsQuerySegmentWalker walker = null;
private SqlResource resource;
private HttpServletRequest req;
@Before
public void setUp() throws Exception
{
walker = CalciteTests.createMockWalker(conglomerate, temporaryFolder.newFolder());
final PlannerConfig plannerConfig = new PlannerConfig();
final DruidSchema druidSchema = CalciteTests.createMockSchema(conglomerate, walker, plannerConfig);
final SystemSchema systemSchema = CalciteTests.createMockSystemSchema(druidSchema, walker);
final DruidOperatorTable operatorTable = CalciteTests.createOperatorTable();
final ExprMacroTable macroTable = CalciteTests.createExprMacroTable();
req = EasyMock.createStrictMock(HttpServletRequest.class);
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_ALLOW_UNSECURED_PATH)).andReturn(null).anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED))
.andReturn(null)
.anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT)
.anyTimes();
req.setAttribute(AuthConfig.DRUID_AUTHORIZATION_CHECKED, true);
EasyMock.expectLastCall().anyTimes();
EasyMock.expect(req.getAttribute(AuthConfig.DRUID_AUTHENTICATION_RESULT))
.andReturn(AllowAllAuthenticator.ALLOW_ALL_RESULT)
.anyTimes();
EasyMock.replay(req);
resource = new SqlResource(
JSON_MAPPER,
new PlannerFactory(
druidSchema,
systemSchema,
CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate),
operatorTable,
macroTable,
plannerConfig,
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
CalciteTests.getJsonMapper()
)
);
}
@After
public void tearDown() throws Exception
{
walker.close();
walker = null;
}
@Test
public void testCountStar() throws Exception
{
final List<Map<String, Object>> rows = doPost(
new SqlQuery("SELECT COUNT(*) AS cnt, 'foo' AS TheFoo FROM druid.foo", null, false, null)
).rhs;
Assert.assertEquals(
ImmutableList.of(
ImmutableMap.of("cnt", 6, "TheFoo", "foo")
),
rows
);
}
@Test
public void testTimestampsInResponse() throws Exception
{
final List<Map<String, Object>> rows = doPost(
new SqlQuery(
"SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1",
ResultFormat.OBJECT,
false,
null
)
).rhs;
Assert.assertEquals(
ImmutableList.of(
ImmutableMap.of("__time", "2000-01-01T00:00:00.000Z", "t2", "2000-01-01T00:00:00.000Z")
),
rows
);
}
@Test
public void testTimestampsInResponseLosAngelesTimeZone() throws Exception
{
final List<Map<String, Object>> rows = doPost(
new SqlQuery(
"SELECT __time, CAST(__time AS DATE) AS t2 FROM druid.foo LIMIT 1",
ResultFormat.OBJECT,
false,
ImmutableMap.of(PlannerContext.CTX_SQL_TIME_ZONE, "America/Los_Angeles")
)
).rhs;
Assert.assertEquals(
ImmutableList.of(
ImmutableMap.of("__time", "1999-12-31T16:00:00.000-08:00", "t2", "1999-12-31T00:00:00.000-08:00")
),
rows
);
}
@Test
public void testFieldAliasingSelect() throws Exception
{
final List<Map<String, Object>> rows = doPost(
new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo LIMIT 1", ResultFormat.OBJECT, false, null)
).rhs;
Assert.assertEquals(
ImmutableList.of(
ImmutableMap.of("x", "a", "y", "a")
),
rows
);
}
@Test
public void testFieldAliasingGroupBy() throws Exception
{
final List<Map<String, Object>> rows = doPost(
new SqlQuery("SELECT dim2 \"x\", dim2 \"y\" FROM druid.foo GROUP BY dim2", ResultFormat.OBJECT, false, null)
).rhs;
Assert.assertEquals(
NullHandling.replaceWithDefault() ?
ImmutableList.of(
ImmutableMap.of("x", "", "y", ""),
ImmutableMap.of("x", "a", "y", "a"),
ImmutableMap.of("x", "abc", "y", "abc")
) :
ImmutableList.of(
// x and y both should be null instead of empty string
Maps.transformValues(ImmutableMap.of("x", "", "y", ""), (val) -> null),
ImmutableMap.of("x", "", "y", ""),
ImmutableMap.of("x", "a", "y", "a"),
ImmutableMap.of("x", "abc", "y", "abc")
),
rows
);
}
@Test
public void testArrayResultFormat() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String nullStr = NullHandling.replaceWithDefault() ? "" : null;
Assert.assertEquals(
ImmutableList.of(
Arrays.asList(
"2000-01-01T00:00:00.000Z",
1,
"",
"a",
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
),
Arrays.asList(
"2000-01-02T00:00:00.000Z",
1,
"10.1",
nullStr,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
)
),
doPost(new SqlQuery(query, ResultFormat.ARRAY, false, null), new TypeReference<List<List<Object>>>() {}).rhs
);
}
@Test
public void testArrayResultFormatWithHeader() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String nullStr = NullHandling.replaceWithDefault() ? "" : null;
Assert.assertEquals(
ImmutableList.of(
Arrays.asList("__time", "cnt", "dim1", "dim2", "m1", "m2", "unique_dim1", "EXPR$7"),
Arrays.asList(
"2000-01-01T00:00:00.000Z",
1,
"",
"a",
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
),
Arrays.asList(
"2000-01-02T00:00:00.000Z",
1,
"10.1",
nullStr,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
)
),
doPost(new SqlQuery(query, ResultFormat.ARRAY, true, null), new TypeReference<List<List<Object>>>() {}).rhs
);
}
@Test
public void testArrayLinesResultFormat() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String response = doPostRaw(new SqlQuery(query, ResultFormat.ARRAYLINES, false, null)).rhs;
final String nullStr = NullHandling.replaceWithDefault() ? "" : null;
final List<String> lines = Splitter.on('\n').splitToList(response);
Assert.assertEquals(4, lines.size());
Assert.assertEquals(
Arrays.asList(
"2000-01-01T00:00:00.000Z",
1,
"",
"a",
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
),
JSON_MAPPER.readValue(lines.get(0), List.class)
);
Assert.assertEquals(
Arrays.asList(
"2000-01-02T00:00:00.000Z",
1,
"10.1",
nullStr,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
),
JSON_MAPPER.readValue(lines.get(1), List.class)
);
Assert.assertEquals("", lines.get(2));
Assert.assertEquals("", lines.get(3));
}
@Test
public void testArrayLinesResultFormatWithHeader() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String response = doPostRaw(new SqlQuery(query, ResultFormat.ARRAYLINES, true, null)).rhs;
final String nullStr = NullHandling.replaceWithDefault() ? "" : null;
final List<String> lines = Splitter.on('\n').splitToList(response);
Assert.assertEquals(5, lines.size());
Assert.assertEquals(
Arrays.asList("__time", "cnt", "dim1", "dim2", "m1", "m2", "unique_dim1", "EXPR$7"),
JSON_MAPPER.readValue(lines.get(0), List.class)
);
Assert.assertEquals(
Arrays.asList(
"2000-01-01T00:00:00.000Z",
1,
"",
"a",
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
),
JSON_MAPPER.readValue(lines.get(1), List.class)
);
Assert.assertEquals(
Arrays.asList(
"2000-01-02T00:00:00.000Z",
1,
"10.1",
nullStr,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
nullStr
),
JSON_MAPPER.readValue(lines.get(2), List.class)
);
Assert.assertEquals("", lines.get(3));
Assert.assertEquals("", lines.get(4));
}
@Test
public void testObjectResultFormat() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String nullStr = NullHandling.replaceWithDefault() ? "" : null;
final Function<Map<String, Object>, Map<String, Object>> transformer = m -> {
return Maps.transformEntries(
m,
(k, v) -> "EXPR$7".equals(k) || ("dim2".equals(k) && v.toString().isEmpty()) ? nullStr : v
);
};
Assert.assertEquals(
ImmutableList.of(
ImmutableMap
.<String, Object>builder()
.put("__time", "2000-01-01T00:00:00.000Z")
.put("cnt", 1)
.put("dim1", "")
.put("dim2", "a")
.put("m1", 1.0)
.put("m2", 1.0)
.put("unique_dim1", "org.apache.druid.hll.VersionOneHyperLogLogCollector")
.put("EXPR$7", "")
.build(),
ImmutableMap
.<String, Object>builder()
.put("__time", "2000-01-02T00:00:00.000Z")
.put("cnt", 1)
.put("dim1", "10.1")
.put("dim2", "")
.put("m1", 2.0)
.put("m2", 2.0)
.put("unique_dim1", "org.apache.druid.hll.VersionOneHyperLogLogCollector")
.put("EXPR$7", "")
.build()
).stream().map(transformer).collect(Collectors.toList()),
doPost(
new SqlQuery(query, ResultFormat.OBJECT, false, null),
new TypeReference<List<Map<String, Object>>>() {}
).rhs
);
}
@Test
public void testObjectLinesResultFormat() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String response = doPostRaw(new SqlQuery(query, ResultFormat.OBJECTLINES, false, null)).rhs;
final String nullStr = NullHandling.replaceWithDefault() ? "" : null;
final Function<Map<String, Object>, Map<String, Object>> transformer = m -> {
return Maps.transformEntries(
m,
(k, v) -> "EXPR$7".equals(k) || ("dim2".equals(k) && v.toString().isEmpty()) ? nullStr : v
);
};
final List<String> lines = Splitter.on('\n').splitToList(response);
Assert.assertEquals(4, lines.size());
Assert.assertEquals(
transformer.apply(
ImmutableMap
.<String, Object>builder()
.put("__time", "2000-01-01T00:00:00.000Z")
.put("cnt", 1)
.put("dim1", "")
.put("dim2", "a")
.put("m1", 1.0)
.put("m2", 1.0)
.put("unique_dim1", "org.apache.druid.hll.VersionOneHyperLogLogCollector")
.put("EXPR$7", "")
.build()
),
JSON_MAPPER.readValue(lines.get(0), Object.class)
);
Assert.assertEquals(
transformer.apply(
ImmutableMap
.<String, Object>builder()
.put("__time", "2000-01-02T00:00:00.000Z")
.put("cnt", 1)
.put("dim1", "10.1")
.put("dim2", "")
.put("m1", 2.0)
.put("m2", 2.0)
.put("unique_dim1", "org.apache.druid.hll.VersionOneHyperLogLogCollector")
.put("EXPR$7", "")
.build()
),
JSON_MAPPER.readValue(lines.get(1), Object.class)
);
Assert.assertEquals("", lines.get(2));
Assert.assertEquals("", lines.get(3));
}
@Test
public void testCsvResultFormat() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String response = doPostRaw(new SqlQuery(query, ResultFormat.CSV, false, null)).rhs;
final List<String> lines = Splitter.on('\n').splitToList(response);
Assert.assertEquals(
ImmutableList.of(
"2000-01-01T00:00:00.000Z,1,,a,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
"2000-01-02T00:00:00.000Z,1,10.1,,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
"",
""
),
lines
);
}
@Test
public void testCsvResultFormatWithHeaders() throws Exception
{
final String query = "SELECT *, CASE dim2 WHEN '' THEN dim2 END FROM foo LIMIT 2";
final String response = doPostRaw(new SqlQuery(query, ResultFormat.CSV, true, null)).rhs;
final List<String> lines = Splitter.on('\n').splitToList(response);
Assert.assertEquals(
ImmutableList.of(
"__time,cnt,dim1,dim2,m1,m2,unique_dim1,EXPR$7",
"2000-01-01T00:00:00.000Z,1,,a,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
"2000-01-02T00:00:00.000Z,1,10.1,,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
"",
""
),
lines
);
}
@Test
public void testExplainCountStar() throws Exception
{
final List<Map<String, Object>> rows = doPost(
new SqlQuery("EXPLAIN PLAN FOR SELECT COUNT(*) AS cnt FROM druid.foo", ResultFormat.OBJECT, false, null)
).rhs;
Assert.assertEquals(
ImmutableList.of(
ImmutableMap.<String, Object>of(
"PLAN",
"DruidQueryRel(query=[{\"queryType\":\"timeseries\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"descending\":false,\"virtualColumns\":[],\"filter\":null,\"granularity\":{\"type\":\"all\"},\"aggregations\":[{\"type\":\"count\",\"name\":\"a0\"}],\"postAggregations\":[],\"limit\":2147483647,\"context\":{\"skipEmptyBuckets\":true}}], signature=[{a0:LONG}])\n"
)
),
rows
);
}
@Test
public void testCannotValidate() throws Exception
{
final QueryInterruptedException exception = doPost(
new SqlQuery(
"SELECT dim3 FROM druid.foo",
ResultFormat.OBJECT,
false,
null
)
).lhs;
Assert.assertNotNull(exception);
Assert.assertEquals(QueryInterruptedException.UNKNOWN_EXCEPTION, exception.getErrorCode());
Assert.assertEquals(ValidationException.class.getName(), exception.getErrorClass());
Assert.assertTrue(exception.getMessage().contains("Column 'dim3' not found in any table"));
}
@Test
public void testCannotConvert() throws Exception
{
// SELECT + ORDER unsupported
final QueryInterruptedException exception = doPost(
new SqlQuery("SELECT dim1 FROM druid.foo ORDER BY dim1", ResultFormat.OBJECT, false, null)
).lhs;
Assert.assertNotNull(exception);
Assert.assertEquals(QueryInterruptedException.UNKNOWN_EXCEPTION, exception.getErrorCode());
Assert.assertEquals(ISE.class.getName(), exception.getErrorClass());
Assert.assertTrue(
exception.getMessage()
.contains("Cannot build plan for query: SELECT dim1 FROM druid.foo ORDER BY dim1")
);
}
@Test
public void testResourceLimitExceeded() throws Exception
{
final QueryInterruptedException exception = doPost(
new SqlQuery(
"SELECT DISTINCT dim1 FROM foo",
ResultFormat.OBJECT,
false,
ImmutableMap.of("maxMergingDictionarySize", 1)
)
).lhs;
Assert.assertNotNull(exception);
Assert.assertEquals(exception.getErrorCode(), QueryInterruptedException.RESOURCE_LIMIT_EXCEEDED);
Assert.assertEquals(exception.getErrorClass(), ResourceLimitExceededException.class.getName());
}
// Returns either an error or a result, assuming the result is a JSON object.
private <T> Pair<QueryInterruptedException, T> doPost(
final SqlQuery query,
final TypeReference<T> typeReference
) throws Exception
{
final Pair<QueryInterruptedException, String> pair = doPostRaw(query);
if (pair.rhs == null) {
//noinspection unchecked
return (Pair<QueryInterruptedException, T>) pair;
} else {
return Pair.of(pair.lhs, JSON_MAPPER.readValue(pair.rhs, typeReference));
}
}
// Returns either an error or a result.
private Pair<QueryInterruptedException, String> doPostRaw(final SqlQuery query) throws Exception
{
final Response response = resource.doPost(query, req);
if (response.getStatus() == 200) {
final StreamingOutput output = (StreamingOutput) response.getEntity();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
output.write(baos);
return Pair.of(
null,
new String(baos.toByteArray(), StandardCharsets.UTF_8)
);
} else {
return Pair.of(
JSON_MAPPER.readValue((byte[]) response.getEntity(), QueryInterruptedException.class),
null
);
}
}
private Pair<QueryInterruptedException, List<Map<String, Object>>> doPost(final SqlQuery query) throws Exception
{
return doPost(query, new TypeReference<List<Map<String, Object>>>()
{
});
}
}
| |
/**
* AccountProperties.java
* created: 19.06.2009
* (c) 2008 by <a href="http://Wolschon.biz">Wolschon Softwaredesign und Beratung</a>
* This file is part of jgnucashLib-GPL by Marcus Wolschon <a href="mailto:Marcus@Wolscon.biz">Marcus@Wolscon.biz</a>.
* You can purchase support for a sensible hourly rate or
* a commercial license of this file (unless modified by others) by contacting him directly.
*
* jgnucashLib-GPL is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* jgnucashLib-GPL is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with jgnucashLib-V1. If not, see <http://www.gnu.org/licenses/>.
*
***********************************
* Editing this file:
* -For consistent code-quality this file should be checked with the
* checkstyle-ruleset enclosed in this project.
* -After the design of this file has settled it should get it's own
* JUnit-Test that shall be executed regularly. It is best to write
* the test-case BEFORE writing this class and to run it on every build
* as a regression-test.
*/
package biz.wolschon.finance.jgnucash.accountProperties;
import java.awt.BorderLayout;
import java.awt.GridLayout;
import java.awt.Toolkit;
import java.awt.datatransfer.Clipboard;
import java.awt.datatransfer.ClipboardOwner;
import java.awt.datatransfer.StringSelection;
import java.awt.datatransfer.Transferable;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.beans.PropertyChangeSupport;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import javax.swing.Action;
import javax.swing.JButton;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JMenuItem;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JTextField;
import javax.swing.WindowConstants;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import biz.wolschon.fileformats.gnucash.GnucashAccount;
import biz.wolschon.fileformats.gnucash.GnucashWritableAccount;
import biz.wolschon.finance.jgnucash.actions.AccountAction;
import com.l2fprod.common.propertysheet.DefaultProperty;
import com.l2fprod.common.propertysheet.Property;
import com.l2fprod.common.propertysheet.PropertySheetPanel;
import com.l2fprod.common.propertysheet.PropertySheetTableModel;
/**
* (c) 2009 by <a href="http://Wolschon.biz>Wolschon Softwaredesign und Beratung</a>.<br/>
* Project: jgnucashLib-GPL<br/>
* AccountProperties<br/>
* created: 19.06.2009 <br/>
*<br/><br/>
* <b>Action to open an the properties of an account in a new tab.</b>
* @author <a href="mailto:Marcus@Wolschon.biz">Marcus Wolschon</a>
*/
public class AccountProperties implements AccountAction, ClipboardOwner {
/**
* Our logger for debug- and error-output.
*/
private static final Log LOGGER = LogFactory.getLog(AccountProperties.class);
/**
* The account we open.
*/
private GnucashWritableAccount myAccount;
/**
* @see #getValue(String)
*/
private final Map<String, Object> myAddedTags = new HashMap<String, Object>();
/**
* @see #addPropertyChangeListener(PropertyChangeListener)
*/
private final PropertyChangeSupport myPropertyChangeSupport = new PropertyChangeSupport(this);
/**
* The JPanel with the slot-values.
*/
private JPanel mySettingsPanel;
/**
* The Panel with the controls to add a custom attribute.
*/
private JPanel myAddCustomAttrPanel;
/**
* The text-field to enter the name of a new custom attribute.
* @see #myAddCustomAttrPanel
*/
private JTextField myCustomAttributeName;
/**
* The text-field to enter the value of a new custom attribute.
* @see #myAddCustomAttrPanel
*/
private JTextField myCustomAttributeValue;
/**
* The button to add a new custom attribute.
*/
private JButton myAddCustomAttributeButton;
/**
* The PropertySheet with the custom attributes.
*/
private PropertySheetPanel myPropertySheet;
/**
* The panel with the close-button.
*/
private JPanel myButtonsPanel;
/**
* The frame we show everything in.
*/
private JFrame myFrame;
/**
* The button to close the frame.
*/
private JButton myCloseButton;
/**
* Popup-menu on properties.
*/
private JPopupMenu myPropertyMenu;
/**
* The menu-item in the {@link #myPropertyMenu} to remove a custom attribute.
*/
private JMenuItem myRemoveMenuItem;
/**
* Initialize.
*/
public AccountProperties() {
this.putValue(Action.NAME, "Account Properties...");
this.putValue(Action.LONG_DESCRIPTION, "Show the properties of an account.");
this.putValue(Action.SHORT_DESCRIPTION, "Show the properties of an account.");
}
/**
* @param anAccount the account to show.
*/
public AccountProperties(final GnucashAccount anAccount) {
this();
setAccount(anAccount);
}
/**
* {@inheritDoc}
*/
@Override
public void setAccount(final GnucashAccount anAccount) {
myAccount = (GnucashWritableAccount) anAccount;
if (myAccount != null) {
LOGGER.debug("setAccount(" + myAccount.getName() + ")");
updateCustomAttributesPanel();
}
}
/**
* {@inheritDoc}
*/
@Override
public void addPropertyChangeListener(final PropertyChangeListener aListener) {
myPropertyChangeSupport.addPropertyChangeListener(aListener);
}
/**
* {@inheritDoc}
*/
@Override
public Object getValue(final String aKey) {
return myAddedTags.get(aKey);
}
/**
* {@inheritDoc}
*/
@Override
public boolean isEnabled() {
return myAccount != null;
}
/**
* {@inheritDoc}
*/
@Override
public void putValue(final String aKey, final Object aValue) {
myAddedTags.put(aKey, aValue);
}
/**
* {@inheritDoc}
*/
@Override
public void removePropertyChangeListener(final PropertyChangeListener aListener) {
myPropertyChangeSupport.removePropertyChangeListener(aListener);
}
/**
* {@inheritDoc}
*/
@Override
public void setEnabled(final boolean aB) {
}
/**
* {@inheritDoc}
*/
@Override
public void actionPerformed(final ActionEvent aE) {
JPanel newPanel = new JPanel(new GridLayout(2, 2));
newPanel.add(new JLabel("GUID:"));
final JTextField disabledIDInput = new JTextField(myAccount.getId());
final JPopupMenu accountIDPopupMenu = createAccountIDPopupMenu();
disabledIDInput.setEditable(false);
disabledIDInput.addMouseListener(new MouseAdapter() {
@Override
public void mouseReleased(final MouseEvent arg0) {
if (arg0.isPopupTrigger()) {
accountIDPopupMenu.show(disabledIDInput, arg0.getX(), arg0.getY());
}
}
@Override
public void mousePressed(final MouseEvent arg0) {
if (arg0.isPopupTrigger()) {
accountIDPopupMenu.show(disabledIDInput, arg0.getX(), arg0.getY());
}
}
});
newPanel.add(disabledIDInput);
newPanel.add(new JLabel("name:"));
final JTextField nameInput = new JTextField(myAccount.getName());
nameInput.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent aE) {
myAccount.setName(nameInput.getText());
}
});
newPanel.add(nameInput);
myFrame = new JFrame(myAccount.getName());
myFrame.getContentPane().setLayout(new BorderLayout());
myFrame.getContentPane().add(newPanel, BorderLayout.NORTH);
myFrame.getContentPane().add(getButtonsPanel(), BorderLayout.SOUTH);
myFrame.getContentPane().add(getMySettingsPanel(), BorderLayout.CENTER);
myFrame.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE);
myFrame.pack();
myFrame.setVisible(true);
}
private JPopupMenu createAccountIDPopupMenu() {
final JPopupMenu accountIDPopupMenu = new JPopupMenu();
JMenuItem copyAccountIDMenuItem = new JMenuItem("copy");
copyAccountIDMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent arg0) {
Clipboard clipboard = Toolkit.getDefaultToolkit().getSystemClipboard();
clipboard.setContents(new StringSelection( myAccount.getId() ), AccountProperties.this);
}
});
accountIDPopupMenu.add(copyAccountIDMenuItem);
return accountIDPopupMenu;
}
/**
* @return The panel with the close-button.
*/
private JPanel getButtonsPanel() {
if (myButtonsPanel == null) {
myButtonsPanel = new JPanel(new BorderLayout());
myButtonsPanel.add(getCloseButton(), BorderLayout.CENTER);
}
return myButtonsPanel;
}
/**
* @return the close-button
*/
private JButton getCloseButton() {
if (myCloseButton == null) {
myCloseButton = new JButton("close");
myCloseButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent aE) {
myFrame.setVisible(false);
}
});
}
return myCloseButton;
}
/**
* @return a panel to edit the settings of this section
*/
private JPanel getMySettingsPanel() {
if (mySettingsPanel == null) {
mySettingsPanel = new JPanel();
mySettingsPanel.setLayout(new BorderLayout());
myPropertySheet = new PropertySheetPanel();
myPropertySheet.setToolBarVisible(true);
myPropertySheet.setSorting(false);
myPropertySheet.setMode(PropertySheetPanel.VIEW_AS_CATEGORIES);
myPropertySheet.setDescriptionVisible(true);
myPropertySheet.addPropertySheetChangeListener(new PropertyChangeListener() {
@Override
public void propertyChange(final PropertyChangeEvent aEvt) {
Object property = aEvt.getSource();
if (property instanceof DefaultProperty) {
DefaultProperty prop = (DefaultProperty) property;
try {
myAccount.setUserDefinedAttribute(prop.getName(),
prop.getValue().toString());
} catch (Exception e) {
LOGGER.error("error in writing userDefinedAttribute", e);
}
}
}
});
myPropertySheet.getTable().addMouseListener(new MouseAdapter() {
/** show popup if mouseReleased is a popupTrigger on this platform.
* @see java.awt.event.MouseAdapter#mouseClicked(java.awt.event.MouseEvent)
*/
@Override
public void mouseReleased(final MouseEvent aE) {
if (aE.isPopupTrigger()) {
JPopupMenu menu = getPropertyPopup();
menu.show(myPropertySheet, aE.getX(), aE.getY());
}
super.mouseClicked(aE);
}
/** show popup if mousePressed is a popupTrigger on this platform.
* @see java.awt.event.MouseAdapter#mouseClicked(java.awt.event.MouseEvent)
*/
@Override
public void mousePressed(final MouseEvent aE) {
if (aE.isPopupTrigger()) {
JPopupMenu menu = getPropertyPopup();
menu.show(myPropertySheet, aE.getX(), aE.getY());
}
super.mouseClicked(aE);
}
});
//
updateCustomAttributesPanel();
// for (ConfigurationSetting setting : getConfigSection().getSettings()) {
// MyProperty myProperty = new MyProperty(setting);
// myProperty.addPropertyChangeListener(savingPropertyChangeListener);
// propertySheet.addProperty(myProperty);
// }
mySettingsPanel.add(new JLabel("custom attributes:"), BorderLayout.NORTH);
mySettingsPanel.add(myPropertySheet, BorderLayout.CENTER);
mySettingsPanel.add(getAddCustomAttrPanel(), BorderLayout.SOUTH);
// MyPropertyEditorFactory propertyEditorFactory = new MyPropertyEditorFactory();
// propertySheet.setEditorFactory(propertyEditorFactory);
// propertySheet.setRendererFactory(propertyEditorFactory);
}
return mySettingsPanel;
}
/**
* Update the PropertySheet.
*/
private void updateCustomAttributesPanel() {
if (myPropertySheet == null) {
LOGGER.debug("updateCustomAttributesPanel() myPropertySheet is null");
return;
}
Property[] properties = myPropertySheet.getProperties();
if (properties != null) {
LOGGER.debug("updateCustomAttributesPanel() "
+ properties.length + " attributes to remove from panel");
for (Property property : properties) {
myPropertySheet.removeProperty(property);
}
} else {
LOGGER.debug("updateCustomAttributesPanel() "
+ "no attributes to remove from panel");
}
Collection<String> keys = myAccount.getUserDefinedAttributeKeys();
LOGGER.debug("updateCustomAttributesPanel() #UserDefinedAttributeKeys=" + keys.size());
for (String key : keys) {
DefaultProperty property = new DefaultProperty();
property.setName(key);
property.setDisplayName(key);
property.setEditable(true);
//property.setCategory("");
property.setType(String.class);
property.setValue(myAccount.getUserDefinedAttribute(key));
myPropertySheet.addProperty(property);
}
// remove the dummy-slots created in GnucashObjectImpl (cannot save empty xml-slots-entity)
if (myPropertySheet.getProperties().length > 0
&& myPropertySheet.getProperties()[0].getName().equals("dummy")) {
myPropertySheet.removeProperty(myPropertySheet.getProperties()[0]);
}
}
/**
* @return The Panel with the controls to add a custom attribute.
*/
private JPanel getAddCustomAttrPanel() {
if (myAddCustomAttrPanel == null) {
final int rowCount = 3;
myAddCustomAttrPanel = new JPanel(new GridLayout(rowCount, 2));
myAddCustomAttrPanel.add(new JLabel("Name:"));
myAddCustomAttrPanel.add(getCustomAttributeName());
myAddCustomAttrPanel.add(new JLabel("Value:"));
myAddCustomAttrPanel.add(getCustomAttributeValue());
myAddCustomAttrPanel.add(new JLabel(""));
myAddCustomAttrPanel.add(getAddCustomAttributeButton());
}
return myAddCustomAttrPanel;
}
/**
* @return The text-field to enter the name of a new custom attribute.
*/
private JTextField getCustomAttributeName() {
if (myCustomAttributeName == null) {
myCustomAttributeName = new JTextField();
}
return myCustomAttributeName;
}
/**
* @return The text-field to enter the value of a new custom attribute.
*/
private JTextField getCustomAttributeValue() {
if (myCustomAttributeValue == null) {
myCustomAttributeValue = new JTextField();
}
return myCustomAttributeValue;
}
/**
* @return the button to add a new custom attribute.
*/
private JButton getAddCustomAttributeButton() {
if (myAddCustomAttributeButton == null) {
myAddCustomAttributeButton = new JButton("add");
myAddCustomAttributeButton.setEnabled(myAccount instanceof GnucashWritableAccount);
myAddCustomAttributeButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent aE) {
try {
LOGGER.debug("adding user-defined attribute '"
+ getCustomAttributeName().getText() + "' to '"
+ getCustomAttributeValue().getText() + "'");
(myAccount).setUserDefinedAttribute(getCustomAttributeName().getText(), getCustomAttributeValue().getText());
updateCustomAttributesPanel();
} catch (Exception e) {
LOGGER.error("error in updateCustomAttributesPanel", e);
}
}
});
}
return myAddCustomAttributeButton;
}
/**
* @return Popup-menu on properties.
*/
protected JPopupMenu getPropertyPopup() {
if (myPropertyMenu == null) {
myPropertyMenu = new JPopupMenu();
myPropertyMenu.add(getRemoveMenuItem());
}
return myPropertyMenu;
}
/**
* @return The menu-item in the {@link #myPropertyMenu} to remove a custom attribute.
*/
private JMenuItem getRemoveMenuItem() {
if (myRemoveMenuItem == null) {
myRemoveMenuItem = new JMenuItem("remove");
myRemoveMenuItem.addActionListener(new ActionListener() {
@Override
public void actionPerformed(final ActionEvent aE) {
int selectedRow = myPropertySheet.getTable().getSelectedRow();
PropertySheetTableModel model = ((PropertySheetTableModel) myPropertySheet.getTable().getModel());
LOGGER.debug("selected for deletion: #" + selectedRow + " of " + model.getProperties().length);
Property property = model.getProperties()[selectedRow - 1];
LOGGER.debug("selected for deletion: " + property.getName());
model.removeProperty(property);
}
});
}
return myRemoveMenuItem;
}
/* (non-Javadoc)
* @see java.awt.datatransfer.ClipboardOwner#lostOwnership(java.awt.datatransfer.Clipboard, java.awt.datatransfer.Transferable)
*/
@Override
public void lostOwnership(final Clipboard arg0, final Transferable arg1) {
// ignored
}
}
| |
package shijimi.gui.launch;
import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CancellationException;
import javax.swing.Icon;
import javax.swing.ImageIcon;
import javax.swing.SwingUtilities;
import shijimi.base.NotificationCenter;
import shijimi.base.ReflectionShell;
import shijimi.gui.resource.GuiResources;
import shijimi.gui.swing.ApplicationIconSetter;
import shijimi.gui.swing.SearchPopupPane;
import shijimi.gui.swing.SearchPopupPane.SearchPopupItem;
import shijimi.json.JsonFromArgumentsMapper;
import shijimi.json.JsonShell;
import shijimi.launch.eclipse.ClasspathConfig;
import shijimi.launch.JavaCommandLine;
import shijimi.launch.eclipse.LaunchConfig;
import shijimi.launch.eclipse.ProjectConfig;
import shijimi.launch.eclipse.WorkspaceConfig;
import shijimi.launch.eclipse.ProjectConfig.ProjectClassConfig;
public class LaunchSearchModel implements SearchPopupPane.SearchPopupModel {
protected WorkspaceConfig workspace;
protected Map<Object, Icon> icons = new HashMap<Object, Icon>();
protected SearchPopupItem selected;
protected Setting setting;
protected JsonShell jSh = JsonShell.v();
protected boolean searchClasses = true;
protected SearchPopupPane pane;
@SuppressWarnings("unchecked")
public static void main(String[] args) {
LaunchSearchModel m = new LaunchSearchModel();
Map<String,Object> argMap = new JsonFromArgumentsMapper(args).mapFromArgs();
String w = (String) argMap.get("workspace");
if (w == null) {
w = (String) argMap.get("w");
}
if (w != null) {
m.setWorkspaceDir(new File(w));
} else {
m.load();
}
List<String> nameAndParams = (List<String>) argMap.get("");
List<String> params = nameAndParams.subList(1, nameAndParams.size());
List<SearchPopupItem> is = m.searchLaunchConfigs(nameAndParams.get(0));
if (is.size() > 0) {
JavaCommandLine com = m.getCommandLine(is.get(0));
if (com != null) {
if (params.size() > 0) {
com.setArguments(params);
}
m.setMacAppName(com, m.getName(is.get(0)));
com.run();
}
}
System.exit(0);
}
public LaunchSearchModel() {
setting = new Setting(this);
}
@Override
public void setPane(SearchPopupPane pane) {
this.pane = pane;
}
public void setSearchClasses(boolean searchClasses) {
this.searchClasses = searchClasses;
}
public boolean isSearchClasses() {
return searchClasses;
}
public void setWorkspace(WorkspaceConfig workspace) {
this.workspace = workspace;
}
public WorkspaceConfig getWorkspace() {
return workspace;
}
public void setWorkspaceDir(File dir) {
WorkspaceConfig w = new WorkspaceConfig(dir);
setWorkspace(w);
}
public Setting getSetting() {
return setting;
}
public void load() {
jSh.loadAndMapFromPrefs(setting, LaunchSearchModel.class);
}
public void save() {
jSh.saveAndMapToPrefs(setting, LaunchSearchModel.class);
}
public void settingUpdated() {
SwingUtilities.invokeLater(new Runnable() {public void run() {
if (pane != null) {
pane.contentEdited(true);
}
}});
}
//@Override
public Iterable<SearchPopupItem> getCandidates(String text) {
List<SearchPopupItem> os = new ArrayList<SearchPopupItem>();
try {
searchLaunchConfigs(text, os);
if (!text.equals("") && isSearchClasses()) {
searchProjectClasses(text, os);
}
} catch (RuntimeException e) {
NotificationCenter.v().addException(e);
}
return os;
}
public void checkInterrution() {
if (Thread.interrupted()) {
throw new CancellationException();
}
}
public void searchLaunchConfigs(String text, List<SearchPopupItem> confs) {
text = text.trim();
boolean alllower = true;
for (char c : text.toCharArray()) {
if (Character.isUpperCase(c)) {
alllower = false;
break;
}
}
if (alllower) { //abc => ABC
text = text.toUpperCase();
}
List<String> texts = ReflectionShell.v().divideForSearch(text);
if (workspace == null) {
return;
}
for (LaunchConfig launch : workspace.getLaunchConfigs()) {
checkInterrution();
String name = launch.getName();
boolean matched = false;
if (match(name, texts)) {
matched = true;
}
checkInterrution();
if (!matched) {
String main = launch.getAttrMainType();
if (match(main, texts)) {
matched = true;
}
}
if (matched) {
confs.add(new LaunchSearchItem(launch, getIcon(launch)));
}
}
}
public boolean match(String name, List<String> texts) {
if (name == null) {
return false;
}
List<String> names = ReflectionShell.v().divideForSearch(name);
boolean fail = false;
Iterator<String> ni = names.iterator();
for (String tc : texts) {
boolean found = false;
while (ni.hasNext()) {
String nc = ni.next();
if (nc.trim().toLowerCase().startsWith(tc.trim().toLowerCase())) {
found = true;
break;
}
}
if (!found) {
fail = true;
break;
}
}
return !fail;
}
public void searchProjectClasses(String text, List<SearchPopupItem> cs) {
text = text.trim();
List<String> texts = ReflectionShell.v().divideForSearch(text);
for (ProjectConfig project : workspace.getProjects()) {
checkInterrution();
ClasspathConfig cp = project.getClasspath();
if (cp != null) {
for (ClasspathConfig.ClasspathEntryConfig e : cp.getEntries()) {
if (e.isOutput()) {
File dir = e.getClasspathFile();
NotificationCenter.v().add("search " + dir);
searchProjectClasses(dir, texts, null, project, cs);
}
}
}
}
}
public void searchProjectClasses(File file, List<String> texts, String path, ProjectConfig project, List<SearchPopupItem> result) {
if (file == null) {
return;
}
String name = file.getName();
if (name.startsWith(".")) {
return;
}
checkInterrution();
if (file.isFile()) {
if (name.endsWith(".class")) {
int i = name.lastIndexOf('.');
String clsName = name.substring(0, i);
if (path != null && !path.equals("")) {
clsName = path + "." + clsName;
}
if (match(clsName, texts)) {
ProjectClassConfig conf = project.getClassConfig(clsName);
result.add(new ClassSearchItem(conf, getIcon(conf)));
}
}
} else if (file.isDirectory()) {
String nextPath;
if (path == null) {
nextPath = "";
} else if (path.equals("")) {
nextPath = name;
} else {
nextPath = path + "." + name;
}
for (File i : file.listFiles()) {
searchProjectClasses(i, texts, nextPath, project, result);
}
}
}
@Override
public void select(SearchPopupItem item) {
selected = item;
}
@Override
public SearchPopupItem getSelection() {
return selected;
}
public Icon getIcon(Object obj) {
if (obj != null) {
Icon icon = icons.get(obj);
if (icon != null) {
return icon;
}
} else if (obj == null) {
return GuiResources.v().getNofileIcon();
}
String name = null;
if (obj instanceof LaunchConfig) {
name = ((LaunchConfig) obj).getName();
} else if (obj instanceof ProjectClassConfig) {
name = ReflectionShell.v().getPackageAndClassNames(((ProjectClassConfig) obj).getClassName())[1];
}
Icon icon;
if (name != null) {
ApplicationIconSetter s = new ApplicationIconSetter();
icon = new ImageIcon(s.getApplicationIcon(name, 16, s.getNameColorHue(name)));
} else {
icon = GuiResources.v().getNofileIcon();
}
icons.put(obj, icon);
return icon;
}
public boolean launchTopOne(String text) {
List<SearchPopupItem> os = searchLaunchConfigs(text);
if (os.size() >= 1) {
launch(os.get(0));
return true;
} else {
return false;
}
}
public List<SearchPopupItem> searchLaunchConfigs(String text) {
List<SearchPopupItem> os = new ArrayList<SearchPopupItem>();
searchLaunchConfigs(text, os);
return os;
}
public boolean launchSelection() {
SearchPopupItem item = getSelection();
return launch(item);
}
public boolean launch(SearchPopupItem item) {
JavaCommandLine com = getCommandLine(item);
String name = getName(item);
if (com != null) {
setMacAppName(com, name);
run(com);
return true;
} else {
return false;
}
}
public JavaCommandLine getCommandLine(SearchPopupItem item) {
if (item != null) {
if (item instanceof LaunchSearchItem) {
LaunchSearchItem conf = (LaunchSearchItem) item;
return conf.getConf().toCommandLine();
} else if (item instanceof ClassSearchItem) {
ClassSearchItem conf = (ClassSearchItem) item;
return conf.getConf().getCommandLine();
}
}
return null;
}
public String getName(SearchPopupItem item) {
if (item != null) {
if (item instanceof LaunchSearchItem) {
LaunchSearchItem conf = (LaunchSearchItem) item;
return conf.getConf().getName();
} else if (item instanceof ClassSearchItem) {
ClassSearchItem conf = (ClassSearchItem) item;
return conf.getConf().getName();
}
}
return null;
}
public void setMacAppName(JavaCommandLine com, String name) {
if (name != null && System.getProperty("os.name", "").indexOf("Mac OS X") != -1) {
com.getVmArguments().add("-Xdock:name=" + name);
}
}
public void run(JavaCommandLine com) {
com.run();
}
public static class LaunchSearchItem implements SearchPopupItem {
protected LaunchConfig conf;
protected Icon icon;
public LaunchSearchItem(LaunchConfig conf, Icon icon) {
this.conf = conf;
this.icon = icon;
}
@Override
public String getName() {
return conf.getName();
}
@Override
public String getCategory() {
return "Launch Config";
}
@Override
public Icon getIcon() {
return icon;
}
public LaunchConfig getConf() {
return conf;
}
}
public static class ClassSearchItem implements SearchPopupItem {
protected ProjectClassConfig conf;
protected Icon icon;
protected String name;
public ClassSearchItem(ProjectClassConfig conf, Icon icon) {
this.conf = conf;
this.icon = icon;
name = conf.getClassName() + " - " + conf.getProject().getDir().getName();
}
@Override
public String getName() {
return name;
}
@Override
public String getCategory() {
return "Class Config";
}
@Override
public Icon getIcon() {
return icon;
}
public ProjectClassConfig getConf() {
return conf;
}
}
public static class Setting {
protected LaunchSearchModel model;
public Setting(LaunchSearchModel model) {
this.model = model;
}
public void setWorkspace(File workspace) {
model.setWorkspaceDir(workspace);
model.settingUpdated();
}
public File getWorkspace() {
WorkspaceConfig conf = model.getWorkspace();
if (conf != null) {
return conf.getDir();
} else {
return null;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.cas.pge;
//OODT static imports
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.ACTION_IDS;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.ATTEMPT_INGEST_ALL;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.CRAWLER_CONFIG_FILE;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.CRAWLER_CRAWL_FOR_DIRS;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.CRAWLER_RECUR;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.DUMP_METADATA;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.FILE_STAGER;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.INGEST_CLIENT_TRANSFER_SERVICE_FACTORY;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.INGEST_FILE_MANAGER_URL;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.LOG_FILENAME_PATTERN;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.MET_FILE_EXT;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.NAME;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.MIME_EXTRACTOR_REPO;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.PGE_CONFIG_BUILDER;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.PGE_RUNTIME;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.PROPERTY_ADDERS;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.REQUIRED_METADATA;
import static org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys.WORKFLOW_MANAGER_URL;
import static org.apache.oodt.cas.pge.metadata.PgeTaskStatus.CONF_FILE_BUILD;
import static org.apache.oodt.cas.pge.metadata.PgeTaskStatus.CRAWLING;
import static org.apache.oodt.cas.pge.metadata.PgeTaskStatus.RUNNING_PGE;
import static org.apache.oodt.cas.pge.metadata.PgeTaskStatus.STAGING_INPUT;
import static org.apache.oodt.cas.pge.util.GenericPgeObjectFactory.createConfigFilePropertyAdder;
import static org.apache.oodt.cas.pge.util.GenericPgeObjectFactory.createFileStager;
import static org.apache.oodt.cas.pge.util.GenericPgeObjectFactory.createPgeConfigBuilder;
import static org.apache.oodt.cas.pge.util.GenericPgeObjectFactory.createSciPgeConfigFileWriter;
//JDK imports
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URL;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import java.util.regex.Pattern;
//Apache imports
import org.apache.commons.lang.Validate;
//OODT imports
import org.apache.oodt.cas.crawl.AutoDetectProductCrawler;
import org.apache.oodt.cas.crawl.ProductCrawler;
import org.apache.oodt.cas.crawl.StdProductCrawler;
import org.apache.oodt.cas.crawl.status.IngestStatus;
import org.apache.oodt.cas.metadata.Metadata;
import org.apache.oodt.cas.metadata.SerializableMetadata;
import org.apache.oodt.cas.metadata.filenaming.PathUtilsNamingConvention;
import org.apache.oodt.cas.pge.config.DynamicConfigFile;
import org.apache.oodt.cas.pge.config.OutputDir;
import org.apache.oodt.cas.pge.config.PgeConfig;
import org.apache.oodt.cas.pge.config.RegExprOutputFiles;
import org.apache.oodt.cas.pge.config.XmlFilePgeConfigBuilder;
import org.apache.oodt.cas.pge.metadata.PgeMetadata;
import org.apache.oodt.cas.pge.metadata.PgeTaskMetKeys;
import org.apache.oodt.cas.pge.staging.FileManagerFileStager;
import org.apache.oodt.cas.pge.staging.FileStager;
import org.apache.oodt.cas.pge.writers.PcsMetFileWriter;
import org.apache.oodt.cas.pge.writers.SciPgeConfigFileWriter;
import org.apache.oodt.cas.workflow.metadata.CoreMetKeys;
import org.apache.oodt.cas.workflow.structs.WorkflowTaskConfiguration;
import org.apache.oodt.cas.workflow.structs.WorkflowTaskInstance;
import org.apache.oodt.cas.workflow.structs.exceptions.WorkflowTaskInstanceException;
import org.apache.oodt.cas.workflow.system.XmlRpcWorkflowManagerClient;
import org.apache.oodt.cas.workflow.util.ScriptFile;
import org.apache.oodt.commons.exec.ExecUtils;
//Spring imports
import org.springframework.context.support.FileSystemXmlApplicationContext;
//Google imports
import com.google.common.base.Splitter;
import com.google.common.base.Strings;
import com.google.common.collect.Lists;
/**
* Runs a CAS-style Product Generation Executive based on the PCS Wrapper
* Architecture from mattmann et al. on OCO.
*
* @author mattmann (Chris Mattmann)
* @author bfoster (Brian Foster)
*/
public class PGETaskInstance implements WorkflowTaskInstance {
protected Logger logger = Logger.getLogger(PGETaskInstance.class.getName());
protected XmlRpcWorkflowManagerClient wm;
protected String workflowInstId;
protected PgeMetadata pgeMetadata;
protected PgeConfig pgeConfig;
protected PGETaskInstance() {}
@Override
public void run(Metadata metadata, WorkflowTaskConfiguration config)
throws WorkflowTaskInstanceException {
try {
// Initialize CAS-PGE.
pgeMetadata = createPgeMetadata(metadata, config);
pgeConfig = createPgeConfig();
runPropertyAdders();
wm = createWorkflowManagerClient();
workflowInstId = getWorkflowInstanceId();
logger = createLogger(); // use workflow ID specific logger from now on
// Write out PgeMetadata.
dumpMetadataIfRequested();
// Setup the PGE.
createExeDir();
createOuputDirsIfRequested();
updateStatus(CONF_FILE_BUILD.getWorkflowStatusName());
createDynamicConfigFiles();
updateStatus(STAGING_INPUT.getWorkflowStatusName());
stageFiles();
// Run the PGE.
runPge();
// Ingest products.
runIngestCrawler(createProductCrawler());
// Commit dynamic metadata.
updateDynamicMetadata();
} catch (Exception e) {
logger.log(Level.SEVERE, "PGETask FAILED!!! : " + e.getMessage(), e);
throw new WorkflowTaskInstanceException("PGETask FAILED!!! : "
+ e.getMessage(), e);
}
}
protected void updateStatus(String status) throws Exception {
logger.info("Updating status to workflow as [" + status + "]");
if (!wm.updateWorkflowInstanceStatus(workflowInstId, status)) {
throw new Exception(
"Failed to update workflow status : client returned false");
}
}
protected Logger createLogger() throws Exception {
File logDir = new File(pgeConfig.getExeDir(), "logs");
if (!(logDir.exists() || logDir.mkdirs())) {
throw new Exception("mkdirs for logs directory return false");
}
Logger logger = Logger.getLogger(PGETaskInstance.class.getName()
+ "." + workflowInstId);
FileHandler handler = new FileHandler(
new File(logDir, createLogFileName()).getAbsolutePath());
handler.setEncoding("UTF-8");
handler.setFormatter(new SimpleFormatter());
logger.addHandler(handler);
return logger;
}
protected String createLogFileName() throws Exception {
String filenamePattern = pgeMetadata.getMetadata(LOG_FILENAME_PATTERN);
if (filenamePattern != null) {
return filenamePattern;
} else {
return pgeMetadata.getMetadata(NAME) + "." + System.currentTimeMillis()
+ ".log";
}
}
protected PgeMetadata createPgeMetadata(Metadata dynMetadata,
WorkflowTaskConfiguration config) throws Exception {
logger.info("Converting workflow configuration to static metadata...");
Metadata staticMetadata = new Metadata();
for (Object objKey : config.getProperties().keySet()) {
String key = (String) objKey;
PgeTaskMetKeys metKey = PgeTaskMetKeys.getByName(key);
if (metKey != null && metKey.isVector()) {
List<String> values = Lists.newArrayList(
Splitter.on(",").trimResults()
.omitEmptyStrings()
.split(config.getProperty(key)));
logger.finest("Adding static metadata: key = [" + key
+ "] value = " + values);
staticMetadata.addMetadata(key, values);
} else {
String value = config.getProperty(key);
logger.finest("Adding static metadata: key = [" + key
+ "] value = [" + value + "]");
staticMetadata.addMetadata(key, value);
}
}
logger.info("Loading workflow context metadata...");
for (String key : dynMetadata.getAllKeys()) {
logger.finest(
"Adding dynamic metadata: key = [" + key + "] value = "
+ dynMetadata.getAllMetadata(key));
}
return new PgeMetadata(staticMetadata, dynMetadata);
}
protected PgeConfig createPgeConfig() throws Exception {
logger.info("Create PgeConfig...");
String pgeConfigBuilderClass = pgeMetadata
.getMetadata(PGE_CONFIG_BUILDER);
if (pgeConfigBuilderClass != null) {
logger.info("Using PgeConfigBuilder: " + pgeConfigBuilderClass);
return createPgeConfigBuilder(pgeConfigBuilderClass, logger)
.build(pgeMetadata);
} else {
logger.info("Using default PgeConfigBuilder: "
+ XmlFilePgeConfigBuilder.class.getCanonicalName());
return new XmlFilePgeConfigBuilder().build(pgeMetadata);
}
}
protected void runPropertyAdders() throws Exception {
try {
logger.info("Loading/Running property adders...");
List<String> propertyAdders = pgeMetadata
.getAllMetadata(PROPERTY_ADDERS);
if (propertyAdders != null) {
for (String propertyAdder : propertyAdders) {
runPropertyAdder(loadPropertyAdder(propertyAdder));
}
} else {
logger.info("No property adders specified");
}
} catch (Exception e) {
throw new Exception("Failed to instanciate/run Property Adders : "
+ e.getMessage(), e);
}
}
protected ConfigFilePropertyAdder loadPropertyAdder(
String propertyAdderClasspath) throws Exception {
logger.fine("Loading property adder: " + propertyAdderClasspath);
return createConfigFilePropertyAdder(propertyAdderClasspath, logger);
}
protected void runPropertyAdder(ConfigFilePropertyAdder propAdder)
throws Exception {
logger.info("Running property adder: "
+ propAdder.getClass().getCanonicalName());
propAdder.addConfigProperties(pgeMetadata,
pgeConfig.getPropertyAdderCustomArgs());
}
protected XmlRpcWorkflowManagerClient createWorkflowManagerClient()
throws Exception {
String url = pgeMetadata.getMetadata(WORKFLOW_MANAGER_URL);
logger.info("Creating WorkflowManager client for url [" + url + "]");
Validate.notNull(url, "Must specify " + WORKFLOW_MANAGER_URL);
return new XmlRpcWorkflowManagerClient(new URL(url));
}
protected String getWorkflowInstanceId() throws Exception {
String instanceId = pgeMetadata.getMetadata(CoreMetKeys.WORKFLOW_INST_ID);
logger.info("Workflow instanceId is [" + instanceId + "]");
Validate.notNull(instanceId, "Must specify "
+ CoreMetKeys.WORKFLOW_INST_ID);
return instanceId;
}
protected void dumpMetadataIfRequested() throws Exception {
if (Boolean.parseBoolean(pgeMetadata
.getMetadata(DUMP_METADATA))) {
new SerializableMetadata(pgeMetadata.asMetadata())
.writeMetadataToXmlStream(new FileOutputStream(
getDumpMetadataPath()));
}
}
protected String getDumpMetadataPath() throws Exception {
return new File(pgeConfig.getExeDir()).getAbsolutePath() + "/"
+ getDumpMetadataName();
}
protected String getDumpMetadataName() throws Exception {
return "pgetask-metadata.xml";
}
protected void createExeDir() throws Exception {
logger.info("Creating PGE execution working directory: ["
+ pgeConfig.getExeDir() + "]");
File executionDir = new File(pgeConfig.getExeDir());
if (!(executionDir.exists() || executionDir.mkdirs())) {
throw new Exception("mkdirs returned false for creating ["
+ pgeConfig.getExeDir() + "]");
}
}
protected void createOuputDirsIfRequested() throws Exception {
for (OutputDir outputDir : pgeConfig.getOuputDirs()) {
if (outputDir.isCreateBeforeExe()) {
logger.info("Creating PGE file ouput directory: ["
+ outputDir.getPath() + "]");
File dir = new File(outputDir.getPath());
if (!(dir.exists() || dir.mkdirs())) {
throw new Exception("mkdir returned false for creating ["
+ outputDir.getPath() + "]");
}
}
}
}
protected void stageFiles() throws Exception {
if (pgeConfig.getFileStagingInfo() != null) {
FileStager fileStager = getFileStager();
logger.info("Starting file staging...");
fileStager.stageFiles(
pgeConfig.getFileStagingInfo(), pgeMetadata, logger);
} else {
logger.info("No files to stage.");
}
}
protected FileStager getFileStager() throws Exception {
String fileStagerClass = pgeMetadata.getMetadata(FILE_STAGER);
if (fileStagerClass != null) {
logger.info("Loading FileStager [" + fileStagerClass + "]");
return createFileStager(fileStagerClass, logger);
} else {
logger.info("Using default FileStager ["
+ FileManagerFileStager.class.getCanonicalName() + "]");
return new FileManagerFileStager();
}
}
protected void createDynamicConfigFiles() throws Exception {
logger.info("Starting creation of sci pge config files...");
for (DynamicConfigFile dynamicConfigFile : pgeConfig
.getDynamicConfigFiles()) {
createDynamicConfigFile(dynamicConfigFile);
}
logger.info("Successfully wrote all sci pge config files!");
}
protected void createDynamicConfigFile(DynamicConfigFile dynamicConfigFile)
throws Exception {
Validate.notNull(dynamicConfigFile, "dynamicConfigFile cannot be null");
logger.fine("Starting creation of sci pge config file ["
+ dynamicConfigFile.getFilePath() + "]...");
// Create parent directory if it doesn't exist.
File parentDir = new File(dynamicConfigFile.getFilePath())
.getParentFile();
if (!(parentDir.exists() || parentDir.mkdirs())) {
throw new Exception("Failed to create directory where sci pge config file ["
+ dynamicConfigFile.getFilePath() + "] was to be written");
}
// Load writer and write file.
logger.fine("Loading writer class for sci pge config file ["
+ dynamicConfigFile.getFilePath() + "]...");
SciPgeConfigFileWriter writer = createSciPgeConfigFileWriter(
dynamicConfigFile.getWriterClass(), logger);
logger.fine("Loaded writer [" + writer.getClass().getCanonicalName()
+ "] for sci pge config file [" + dynamicConfigFile.getFilePath()
+ "]...");
logger.info("Writing sci pge config file [" + dynamicConfigFile.getFilePath()
+ "]...");
File configFile = writer.createConfigFile(dynamicConfigFile.getFilePath(),
pgeMetadata.asMetadata(), dynamicConfigFile.getArgs());
if (!configFile.exists()) {
throw new Exception("Writer failed to create config file ["
+ configFile + "], exists returned false");
}
}
protected ScriptFile buildPgeRunScript() {
logger.fine("Creating PGE run script for shell [" + pgeConfig.getShellType()
+ "] with contents " + pgeConfig.getExeCmds());
ScriptFile sf = new ScriptFile(pgeConfig.getShellType());
sf.setCommands(pgeConfig.getExeCmds());
return sf;
}
protected File getScriptPath() {
File script = new File(pgeConfig.getExeDir(), getPgeScriptName());
logger.fine("Script file with be written to [" + script + "]");
return script;
}
protected String getPgeScriptName() {
String pgeScriptName = "sciPgeExeScript_" + pgeMetadata.getMetadata(NAME);
logger.fine("Generated script file name [" + pgeScriptName + "]");
return pgeScriptName;
}
protected void runPge() throws Exception {
ScriptFile sf = null;
try {
long startTime = System.currentTimeMillis();
logger.info("PGE start time [" + new Date(startTime) + "]");
// create script to run
sf = buildPgeRunScript();
sf.writeScriptFile(getScriptPath().getAbsolutePath());
// run script and evaluate whether success or failure
updateStatus(RUNNING_PGE.getWorkflowStatusName());
logger.info("Starting execution of PGE...");
if (!wasPgeSuccessful(ExecUtils.callProgram(
pgeConfig.getShellType() + " " + getScriptPath(), logger,
new File(pgeConfig.getExeDir()).getAbsoluteFile()))) {
throw new RuntimeException("Pge didn't finish successfully");
} else {
logger.info(
"Successfully completed running: '" + sf.getCommands() + "'");
}
long endTime = System.currentTimeMillis();
logger.info("PGE end time [" + new Date(startTime) + "]");
long runTime = endTime - startTime;
logger.info("PGE runtime in millis [" + runTime + "]");
pgeMetadata.replaceMetadata(PGE_RUNTIME, Long.toString(runTime));
} catch (Exception e) {
throw new Exception("Exception when executing PGE commands '"
+ (sf != null ? sf.getCommands() : "NULL") + "' : "
+ e.getMessage(), e);
}
}
protected boolean wasPgeSuccessful(int returnCode) {
return returnCode == 0;
}
protected void processOutput() throws FileNotFoundException, IOException {
for (final OutputDir outputDir : this.pgeConfig.getOuputDirs()) {
File[] createdFiles = new File(outputDir.getPath()).listFiles();
for (File createdFile : createdFiles) {
Metadata outputMetadata = new Metadata();
for (RegExprOutputFiles regExprFiles : outputDir
.getRegExprOutputFiles()) {
if (Pattern.matches(regExprFiles.getRegExp(), createdFile
.getName())) {
try {
PcsMetFileWriter writer = (PcsMetFileWriter) Class
.forName(regExprFiles.getConverterClass())
.newInstance();
outputMetadata.replaceMetadata(this.getMetadataForFile(
(regExprFiles.getRenamingConv() != null)
? createdFile = this.renameFile(createdFile, regExprFiles.getRenamingConv())
: createdFile, writer, regExprFiles.getArgs()));
} catch (Exception e) {
logger.severe(
"Failed to create metadata file for '"
+ createdFile + "' : "
+ e.getMessage());
}
}
}
if (outputMetadata.getAllKeys().size() > 0)
this.writeFromMetadata(outputMetadata, createdFile.getAbsolutePath()
+ "." + this.pgeMetadata.getMetadata(MET_FILE_EXT));
}
}
}
protected File renameFile(File file, PathUtilsNamingConvention renamingConv)
throws Exception {
Metadata curMetadata = this.pgeMetadata.asMetadata();
curMetadata.replaceMetadata(renamingConv.getTmpReplaceMet());
return renamingConv.rename(file, curMetadata);
}
protected Metadata getMetadataForFile(File sciPgeCreatedDataFile,
PcsMetFileWriter writer, Object[] args) throws Exception {
return writer.getMetadataForFile(sciPgeCreatedDataFile,
this.pgeMetadata, args);
}
protected void writeFromMetadata(Metadata metadata, String toMetFilePath)
throws FileNotFoundException, IOException {
new SerializableMetadata(metadata, "UTF-8", false)
.writeMetadataToXmlStream(new FileOutputStream(toMetFilePath));
}
protected ProductCrawler createProductCrawler() throws Exception {
/* create a ProductCrawler based on whether or not the output dir specifies a MIME_EXTRACTOR_REPO */
logger.info("Configuring ProductCrawler...");
ProductCrawler crawler = null;
if (pgeMetadata.getMetadata(MIME_EXTRACTOR_REPO) != null &&
!pgeMetadata.getMetadata(MIME_EXTRACTOR_REPO).equals("")){
crawler = new AutoDetectProductCrawler();
((AutoDetectProductCrawler)crawler).
setMimeExtractorRepo(pgeMetadata.getMetadata(MIME_EXTRACTOR_REPO));
}
else{
crawler = new StdProductCrawler();
}
crawler.setClientTransferer(pgeMetadata
.getMetadata(INGEST_CLIENT_TRANSFER_SERVICE_FACTORY));
crawler.setFilemgrUrl(pgeMetadata.getMetadata(INGEST_FILE_MANAGER_URL));
String crawlerConfigFile = pgeMetadata.getMetadata(CRAWLER_CONFIG_FILE);
if (!Strings.isNullOrEmpty(crawlerConfigFile)) {
crawler.setApplicationContext(
new FileSystemXmlApplicationContext(crawlerConfigFile));
List<String> actionIds = pgeMetadata.getAllMetadata(ACTION_IDS);
if (actionIds != null) {
crawler.setActionIds(actionIds);
}
}
crawler.setRequiredMetadata(pgeMetadata.getAllMetadata(REQUIRED_METADATA));
crawler.setCrawlForDirs(Boolean.parseBoolean(pgeMetadata
.getMetadata(CRAWLER_CRAWL_FOR_DIRS)));
crawler.setNoRecur(!Boolean.parseBoolean(
pgeMetadata.getMetadata(CRAWLER_RECUR)));
logger.fine(
"Passing Workflow Metadata to CAS-Crawler as global metadata . . .");
crawler.setGlobalMetadata(pgeMetadata.asMetadata(PgeMetadata.Type.DYNAMIC));
logger.fine("Created ProductCrawler ["
+ crawler.getClass().getCanonicalName() + "]");
return crawler;
}
protected void runIngestCrawler(ProductCrawler crawler) throws Exception {
// Determine if we need to create Metadata files
if (crawler instanceof StdProductCrawler){
this.processOutput();
}
// Determine directories to crawl.
List<File> crawlDirs = new LinkedList<File>();
for (OutputDir outputDir : pgeConfig.getOuputDirs()) {
crawlDirs.add(new File(outputDir.getPath()));
}
// Start crawlin...
updateStatus(CRAWLING.getWorkflowStatusName());
boolean attemptIngestAll = Boolean.parseBoolean(pgeMetadata
.getMetadata(ATTEMPT_INGEST_ALL));
for (File crawlDir : crawlDirs) {
logger.info("Crawling for products in [" + crawlDir + "]");
crawler.crawl(crawlDir);
if (!attemptIngestAll) {
verifyIngests(crawler);
}
}
if (attemptIngestAll) {
verifyIngests(crawler);
}
}
protected void verifyIngests(ProductCrawler crawler) throws Exception {
logger.info("Verifying ingests successful...");
boolean ingestsSuccess = true;
String exceptionMsg = "";
for (IngestStatus status : crawler.getIngestStatus()) {
if (status.getResult().equals(IngestStatus.Result.FAILURE)) {
exceptionMsg += (exceptionMsg.equals("") ? "" : " : ")
+ "Failed to ingest product [file='"
+ status.getProduct().getAbsolutePath() + "',result='"
+ status.getResult() + "',msg='" + status.getMessage() + "']";
ingestsSuccess = false;
} else if (!status.getResult().equals(IngestStatus.Result.SUCCESS)) {
logger.warning("Product was not ingested [file='"
+ status.getProduct().getAbsolutePath() + "',result='"
+ status.getResult() + "',msg='" + status.getMessage() + "']");
}
}
if (!ingestsSuccess) {
throw new Exception(exceptionMsg);
} else {
logger.info("Ingests were successful");
}
}
protected void updateDynamicMetadata() throws Exception {
pgeMetadata.commitMarkedDynamicMetadataKeys();
wm.updateMetadataForWorkflow(workflowInstId,
pgeMetadata.asMetadata(PgeMetadata.Type.DYNAMIC));
}
}
| |
/*
* Copyright 2016-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://aws.amazon.com/apache2.0
*
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.datamodeling;
import com.amazonaws.annotation.SdkInternalApi;
import com.amazonaws.services.dynamodbv2.datamodeling.StandardAnnotationMaps.AnnotationMap;
import com.amazonaws.util.StringUtils;
import java.lang.reflect.Field;
import java.lang.reflect.Method;
import java.util.LinkedHashMap;
import java.util.Map;
/**
* Reflection assistant for {@link DynamoDBMapper}
*/
@SdkInternalApi
final class StandardBeanProperties {
/**
* Gets the bean properties for a given class.
* @param clazz The class.
* @return The bean properties.
*/
static final <T,V> Map<String,Bean<T,V>> of(final Class<T> clazz) {
final Map<String,Bean<T,V>> map = new LinkedHashMap<String,Bean<T,V>>();
for (final Method m : ReflectionUtils.getters(clazz)) {
if (m.getDeclaringClass() != clazz) {
final AnnotationMap annotations = StandardAnnotationMaps.of(m.getDeclaringClass());
if (annotations.table() == null && annotations.document() == null) {
continue;
}
}
final String name = ReflectionUtils.getFieldNameByGetter(m, true);
final Bean<T,V> bean = new Bean(m, new MethodReflect(m));
flatten(bean, bean.annotations.attributeName(name), map);
}
return map;
}
/**
* Flattens or adds the bean to the mapping.
* @param bean The bean property.
* @param name The attribute name override.
* @param map The bean mappings.
*/
private static <T,V> void flatten(Bean<T,V> bean, String name, final Map<String,Bean<T,V>> map) {
if (bean.annotations.ignored()) {
return;
} else if (bean.annotations.flattened() != null) {
final Map<String,String> attributes = bean.annotations.attributes();
final Reflect<T,T> declaring = (Reflect<T,T>)bean.reflect;
for (final Method m : ReflectionUtils.getters(bean.reflect.valueType())) {
name = ReflectionUtils.getFieldNameByGetter(m, true);
if ((name = attributes.remove(name)) != null) {
bean = new Bean(m, new DeclaringMethodReflect(m, declaring));
flatten(bean, name, map);
}
}
if (!attributes.isEmpty()) { //<- this should be empty by now
throw new DynamoDBMappingException("contains unknown flattened attribute(s): " + attributes);
}
} else if (map.put(name, bean) != null) {
throw new DynamoDBMappingException("must not duplicate attribute named " + name);
} else if (bean.annotations.keyType() != null && bean.getGenerateStrategy() == DynamoDBAutoGenerateStrategy.ALWAYS) {
throw new DynamoDBMappingException("must not have auto-generated key with ALWAYS strategy");
}
}
/**
* Holds the reflection bean properties for a given property.
*/
static final class Bean<T,V> implements DynamoDBAutoGenerator<V>, Reflect<T,V> {
private final DynamoDBAutoGenerator<V> generator;
private final MethodReflect<T,V> reflect;
private final AnnotationMap annotations;
/**
* Constructs an object property mapping for the specified method.
* @param getter The getter method.
* @param reflect The reflection property.
*/
private Bean(final Method getter, final MethodReflect<T,V> reflect) {
this.annotations = StandardAnnotationMaps.of(getter, ReflectionUtils.getDeclaredFieldByGetter(getter));
this.generator = annotations.autoGenerator(reflect.valueType());
this.reflect = reflect;
}
/**
* Gets the annotations map.
* @return The annotations map.
*/
final AnnotationMap annotations() {
return this.annotations;
}
/**
* Gets the property's value type.
* @return The value type.
*/
final Reflect<T,V> reflect() {
return this.reflect;
}
/**
* Gets the getter method for this property.
* @return The getter method.
*/
final Method getter() {
return reflect.getter;
}
/**
* Gets the setter method for this property.
* @return The setter method.
*/
final Method setter() {
if (reflect.setter == null) {
throw new DynamoDBMappingException("no access to public/one-argument setter for " + reflect.getter);
}
return reflect.setter;
}
/**
* {@inheritDoc}
*/
@Override
public Class<V> valueType() {
return reflect.valueType();
}
/**
* {@inheritDoc}
*/
@Override
public V get(final T object) {
return reflect.get(object);
}
/**
* {@inheritDoc}
*/
@Override
public void set(final T object, final V value) {
reflect.set(object, value);
}
/**
* {@inheritDoc}
*/
@Override
public final DynamoDBAutoGenerateStrategy getGenerateStrategy() {
return generator == null ? null : generator.getGenerateStrategy();
}
/**
* {@inheritDoc}
*/
@Override
public final V generate(final V currentValue) {
return generator.generate(currentValue);
}
}
/**
* Get/set reflection operations.
* @param <T> The object type.
* @param <V> The value type.
*/
static interface Reflect<T,V> {
/**
* Gets the property's value type.
* @return The value type.
*/
public Class<V> valueType();
/**
* Gets the value from the object instance.
* @param object The object instance.
* @return The value.
*/
public V get(T object);
/**
* Sets the value on the object instance.
* @param object The object instance.
* @param value The value.
*/
public void set(T object, V value);
}
/**
* Get/set reflection operations.
*/
private static class MethodReflect<T,V> implements Reflect<T,V> {
private final Method getter, setter;
private MethodReflect(final Method getter) {
this.setter = ReflectionUtils.getDeclaredSetterByGetter(getter);
this.getter = getter;
}
@Override
public Class<V> valueType() {
return (Class<V>)getter.getReturnType();
}
@Override
public V get(final T object) {
try {
return (V)getter.invoke(object);
} catch (final Exception e) {
throw new DynamoDBMappingException("could not invoke " + getter + " on " + object.getClass(), e);
}
}
@Override
public void set(T object, final V value) {
try {
setter.invoke(object, value);
} catch (final Exception e) {
if (setter == null) {
throw new DynamoDBMappingException("no access to public/one-argument setter for " + getter);
}
throw new DynamoDBMappingException("could not invoke " + setter + " on " + object.getClass(), e);
}
}
}
/**
* Get/set reflection operations with a declaring property.
*/
private static class DeclaringMethodReflect<T,V> extends MethodReflect<T,V> {
private final Reflect<T,T> declaring;
private DeclaringMethodReflect(final Method getter, final Reflect<T,T> declaring) {
super(getter);
this.declaring = declaring;
}
@Override
public V get(final T object) {
final T declaringObject = declaring.get(object);
if (declaringObject == null) {
return null;
}
return super.get(declaringObject);
}
@Override
public void set(final T object, final V value) {
T declaringObject = declaring.get(object);
if (declaringObject == null) {
try {
declaringObject = declaring.valueType().newInstance();
} catch (final Exception e) {
throw new DynamoDBMappingException("could not instantiate " + declaring.valueType(), e);
}
declaring.set(object, declaringObject);
}
super.set(declaringObject, value);
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Skills;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2017-2022 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Skill_Shush extends StdSkill
{
@Override
public String ID()
{
return "Skill_Shush";
}
private final static String localizedName = CMLib.lang().L("Shush");
@Override
public String name()
{
return localizedName;
}
private final static String localizedStaticDisplay = CMLib.lang().L("(Feeling shushed)");
@Override
public String displayText()
{
return localizedStaticDisplay;
}
@Override
protected int canAffectCode()
{
return CAN_MOBS;
}
@Override
protected int canTargetCode()
{
return CAN_MOBS;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_MALICIOUS;
}
private static final String[] triggerStrings = I(new String[] { "SHUSH" });
@Override
public String[] triggerStrings()
{
return triggerStrings;
}
@Override
public int classificationCode()
{
return Ability.ACODE_SKILL | Ability.DOMAIN_INFLUENTIAL;
}
@Override
public int maxRange()
{
return adjustedMaxInvokerRange(1);
}
@Override
public int usageType()
{
return USAGE_MANA|USAGE_MOVEMENT;
}
@Override
public long flags()
{
return super.flags() | Ability.FLAG_MINDALTERING;
}
@Override
public boolean okMessage(final Environmental myHost, final CMMsg msg)
{
if(msg.source()==affected)
{
if((msg.sourceMinor()==CMMsg.TYP_SPEAK)
&&(msg.sourceMessage()!=null)
&&(msg.sourceMessage().indexOf(L(" whisper(s) "))<0))
{
final String say=CMStrings.getSayFromMessage(msg.sourceMessage());
if(say!=null)
{
if(msg.target()!=null)
{
CMLib.commands().forceStandardCommand(msg.source(), "WHISPER", new XVector<String>(new String[]{
"WHISPER",msg.target().Name(),say
}));
}
else
{
CMLib.commands().forceStandardCommand(msg.source(), "WHISPER", new XVector<String>(new String[]{
"WHISPER",say
}));
}
}
return false;
}
else
if((msg.sourceMinor()==CMMsg.TYP_CAST_SPELL)
&&(msg.sourceMajor(CMMsg.MSK_CAST_VERBAL)))
{
msg.source().tell(L("You don't feel comfortable making loud noises right now."));
return false;
}
}
return super.okMessage(myHost, msg);
}
@Override
public boolean tick(final Tickable ticking, final int tickID)
{
if(!super.tick(ticking, tickID))
return false;
if((affected instanceof MOB)
&&(((MOB)affected).isInCombat()))
unInvoke();
return true;
}
@Override
public void unInvoke()
{
if(!(affected instanceof MOB))
return;
final MOB mob=(MOB)affected;
super.unInvoke();
if(canBeUninvoked())
mob.tell(L("You feel free to speak again."));
}
@Override
public int castingQuality(final MOB mob, final Physical target)
{
if((mob!=null)&&(target!=null))
{
if(mob.isInCombat())
return Ability.QUALITY_INDIFFERENT;
}
return super.castingQuality(mob,target);
}
@Override
public boolean invoke(final MOB mob, final List<String> commands, final Physical givenTarget, final boolean auto, final int asLevel)
{
final Room R=mob.location();
final MOB target=this.getTarget(mob,commands,givenTarget);
if((target==null)||(R==null))
return false;
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final int twis = target.charStats().getStat(CharStats.STAT_WISDOM);
final int scha = mob.charStats().getStat(CharStats.STAT_CHARISMA);
final boolean success=(!target.isInCombat()) && proficiencyCheck(mob,-((twis-scha)*2)+getXLEVELLevel(mob),auto);
final Map<MOB,MOB> vics=new HashMap<MOB,MOB>();
for(final Enumeration<MOB> m=R.inhabitants();m.hasMoreElements();)
{
final MOB M=m.nextElement();
vics.put(M, M.getVictim());
}
boolean bookDealer=false;
for(int m=0;m<R.numInhabitants();m++)
{
final MOB M=R.fetchInhabitant(m);
if((M instanceof ShopKeeper)
&&(M.getStartRoom()==R))
{
if((((ShopKeeper)M).isSold(ShopKeeper.DEAL_BOOKS))
||(((ShopKeeper)M).isSold(ShopKeeper.DEAL_READABLES)))
{
bookDealer=true;
}
}
}
final int malicious=bookDealer?0:CMMsg.MASK_MALICIOUS;
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,malicious|CMMsg.MASK_HANDS|CMMsg.TYP_JUSTICE|(auto?CMMsg.MASK_ALWAYS:0),auto?"":L("<S-NAME> shush(es) <T-NAMESELF>."));
if(R.okMessage(mob,msg))
{
R.send(mob,msg);
if(bookDealer)
beneficialAffect(mob, target, asLevel, (adjustedLevel(mob,asLevel)/5)+1+((3*getXLEVELLevel(mob))/3));
else
maliciousAffect(mob,target,asLevel,(adjustedLevel(mob,asLevel)/10)+1+((2*getXLEVELLevel(mob))/3),CMMsg.TYP_MIND|(auto?CMMsg.MASK_ALWAYS:0));
if(target.fetchEffect(ID())!=null)
R.show(target,null,CMMsg.MSG_OK_VISUAL,L("<S-NAME> <S-IS-ARE> shushed!"));
}
}
else
return maliciousFizzle(mob,target,L("<S-NAME> shush(es) <T-NAMESELF>, but <T-NAME> just seem(s) annoyed."));
for(final Enumeration<MOB> m=R.inhabitants();m.hasMoreElements();)
{
final MOB M=m.nextElement();
final MOB vicM=vics.get(M);
if((vicM != null) && (M.getVictim() != vicM))
M.setVictim(vicM);
}
return success;
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.redshift.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* The snapshot copy grant that grants Amazon Redshift permission to encrypt copied snapshots with the specified
* customer master key (CMK) from AWS KMS in the destination region.
* </p>
* <p>
* For more information about managing snapshot copy grants, go to <a
* href="https://docs.aws.amazon.com/redshift/latest/mgmt/working-with-db-encryption.html">Amazon Redshift Database
* Encryption</a> in the <i>Amazon Redshift Cluster Management Guide</i>.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/SnapshotCopyGrant" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SnapshotCopyGrant implements Serializable, Cloneable {
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*/
private String snapshotCopyGrantName;
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*/
private String kmsKeyId;
/**
* <p>
* A list of tag instances.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<Tag> tags;
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*
* @param snapshotCopyGrantName
* The name of the snapshot copy grant.
*/
public void setSnapshotCopyGrantName(String snapshotCopyGrantName) {
this.snapshotCopyGrantName = snapshotCopyGrantName;
}
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*
* @return The name of the snapshot copy grant.
*/
public String getSnapshotCopyGrantName() {
return this.snapshotCopyGrantName;
}
/**
* <p>
* The name of the snapshot copy grant.
* </p>
*
* @param snapshotCopyGrantName
* The name of the snapshot copy grant.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withSnapshotCopyGrantName(String snapshotCopyGrantName) {
setSnapshotCopyGrantName(snapshotCopyGrantName);
return this;
}
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*
* @param kmsKeyId
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted
* permission.
*/
public void setKmsKeyId(String kmsKeyId) {
this.kmsKeyId = kmsKeyId;
}
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*
* @return The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted
* permission.
*/
public String getKmsKeyId() {
return this.kmsKeyId;
}
/**
* <p>
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted permission.
* </p>
*
* @param kmsKeyId
* The unique identifier of the customer master key (CMK) in AWS KMS to which Amazon Redshift is granted
* permission.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withKmsKeyId(String kmsKeyId) {
setKmsKeyId(kmsKeyId);
return this;
}
/**
* <p>
* A list of tag instances.
* </p>
*
* @return A list of tag instances.
*/
public java.util.List<Tag> getTags() {
if (tags == null) {
tags = new com.amazonaws.internal.SdkInternalList<Tag>();
}
return tags;
}
/**
* <p>
* A list of tag instances.
* </p>
*
* @param tags
* A list of tag instances.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new com.amazonaws.internal.SdkInternalList<Tag>(tags);
}
/**
* <p>
* A list of tag instances.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* A list of tag instances.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withTags(Tag... tags) {
if (this.tags == null) {
setTags(new com.amazonaws.internal.SdkInternalList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* A list of tag instances.
* </p>
*
* @param tags
* A list of tag instances.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SnapshotCopyGrant withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getSnapshotCopyGrantName() != null)
sb.append("SnapshotCopyGrantName: ").append(getSnapshotCopyGrantName()).append(",");
if (getKmsKeyId() != null)
sb.append("KmsKeyId: ").append(getKmsKeyId()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SnapshotCopyGrant == false)
return false;
SnapshotCopyGrant other = (SnapshotCopyGrant) obj;
if (other.getSnapshotCopyGrantName() == null ^ this.getSnapshotCopyGrantName() == null)
return false;
if (other.getSnapshotCopyGrantName() != null && other.getSnapshotCopyGrantName().equals(this.getSnapshotCopyGrantName()) == false)
return false;
if (other.getKmsKeyId() == null ^ this.getKmsKeyId() == null)
return false;
if (other.getKmsKeyId() != null && other.getKmsKeyId().equals(this.getKmsKeyId()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getSnapshotCopyGrantName() == null) ? 0 : getSnapshotCopyGrantName().hashCode());
hashCode = prime * hashCode + ((getKmsKeyId() == null) ? 0 : getKmsKeyId().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public SnapshotCopyGrant clone() {
try {
return (SnapshotCopyGrant) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package course.labs.locationlab;
import java.util.ArrayList;
import android.app.ListActivity;
import android.content.Context;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.os.Bundle;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.TextView;
import android.widget.Toast;
public class PlaceViewActivity extends ListActivity implements LocationListener {
private static final long FIVE_MINS = 5 * 60 * 1000;
private static String TAG = "Lab-Location";
private Location mLastLocationReading;
private PlaceViewAdapter mAdapter;
// default minimum time between new readings
private long mMinTime = 5000;
// default minimum distance between old and new readings.
private float mMinDistance = 1000.0f;
private LocationManager mLocationManager;
// A fake location provider used for testing
private MockLocationProvider mMockLocationProvider;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mLocationManager = (LocationManager) getSystemService(Context.LOCATION_SERVICE);
// TODO - Set up the app's user interface
// This class is a ListActivity, so it has its own ListView
// ListView's adapter should be a PlaceViewAdapter
mAdapter = new PlaceViewAdapter(getApplicationContext());
getListView().setFooterDividersEnabled(true);
LayoutInflater inflater = (LayoutInflater) getApplicationContext().getSystemService(LAYOUT_INFLATER_SERVICE);
TextView footerView = (TextView)inflater.inflate(R.layout.footer_view, null);
// TODO - add a footerView to the ListView
// You can use footer_view.xml to define the footer
getListView().addFooterView(footerView);
// TODO - When the footerView's onClick() method is called, it must issue the
// following log call
// log("Entered footerView.OnClickListener.onClick()");
// footerView must respond to user clicks.
// Must handle 3 cases:
// 1) The current location is new - download new Place Badge. Issue the
// following log call:
// log("Starting Place Download");
// 2) The current location has been seen before - issue Toast message.
// Issue the following log call:
// log("You already have this location badge");
// 3) There is no current location - response is up to you. The best
// solution is to disable the footerView until you have a location.
// Issue the following log call:
// log("Location data is not available");
footerView.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
log("Entered footerView.OnClickListener.onClick()");
if (mLocationManager != null) {
mLastLocationReading = mLocationManager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER);
}
//footerView.setClickable(true);
if (mLastLocationReading != null) {
onLocationChanged(mLastLocationReading);
if (mAdapter.intersects(mLastLocationReading)) {
log("You already have this location badge");
Toast.makeText(PlaceViewActivity.this,
"You already have this location badge",
Toast.LENGTH_LONG).show();
}
else {
log("Starting Place Download");
new PlaceDownloaderTask(PlaceViewActivity.this).execute(mLastLocationReading);
//addNewPlace(new PlaceRecord(mLastLocationReading));
}
}
else {
log("Location data is not available");
//footerView.setClickable(false);
}
}
});
getListView().setAdapter(mAdapter);
}
@Override
protected void onResume() {
super.onResume();
mMockLocationProvider = new MockLocationProvider(LocationManager.NETWORK_PROVIDER, this);
// TODO - Check NETWORK_PROVIDER for an existing location reading.
// Only keep this last reading if it is fresh - less than 5 minutes old.
if(mLastLocationReading != null && age(mLastLocationReading) < FIVE_MINS){
mLastLocationReading = new Location(mLocationManager.getLastKnownLocation(LocationManager.NETWORK_PROVIDER));
}
// TODO - register to receive location updates from NETWORK_PROVIDER
if (mLocationManager != null) {
mLocationManager.requestLocationUpdates(LocationManager.NETWORK_PROVIDER, mMinTime, mMinDistance, this);
}
}
@Override
protected void onPause() {
mMockLocationProvider.shutdown();
// TODO - unregister for location updates
mLocationManager.removeUpdates(this);
super.onPause();
}
// Callback method used by PlaceDownloaderTask
public void addNewPlace(PlaceRecord place) {
log("Entered addNewPlace()");
mAdapter.add(place);
}
@Override
public void onLocationChanged(Location currentLocation) {
// TODO - Handle location updates
// Cases to consider
// 1) If there is no last location, keep the current location.
// 2) If the current location is older than the last location, ignore
// the current location
// 3) If the current location is newer than the last locations, keep the
// current location.
if (mLastLocationReading == null) {
mLastLocationReading = currentLocation;
}
if ((currentLocation != null) && (age(currentLocation) > age(mLastLocationReading))) {
mLastLocationReading = currentLocation;
}
}
@Override
public void onProviderDisabled(String provider) {
// not implemented
}
@Override
public void onProviderEnabled(String provider) {
// not implemented
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
// not implemented
}
private long age(Location location) {
return System.currentTimeMillis() - location.getTime();
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.main, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.print_badges:
ArrayList<PlaceRecord> currData = mAdapter.getList();
for (int i = 0; i < currData.size(); i++) {
log(currData.get(i).toString());
}
return true;
case R.id.delete_badges:
mAdapter.removeAllViews();
return true;
case R.id.place_one:
mMockLocationProvider.pushLocation(37.422, -122.084);
return true;
case R.id.place_invalid:
mMockLocationProvider.pushLocation(0, 0);
return true;
case R.id.place_two:
mMockLocationProvider.pushLocation(38.996667, -76.9275);
return true;
default:
return super.onOptionsItemSelected(item);
}
}
private static void log(String msg) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
Log.i(TAG, msg);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.gateway;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateApplier;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.metadata.MetaDataIndexUpgradeService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.routing.RoutingNode;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.IndexFolderUpgrader;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.index.Index;
import org.elasticsearch.plugins.MetaDataUpgrader;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.UnaryOperator;
import static java.util.Collections.emptySet;
import static java.util.Collections.unmodifiableSet;
public class GatewayMetaState extends AbstractComponent implements ClusterStateApplier {
private final NodeEnvironment nodeEnv;
private final MetaStateService metaStateService;
@Nullable
private volatile MetaData previousMetaData;
private volatile Set<Index> previouslyWrittenIndices = emptySet();
public GatewayMetaState(Settings settings, NodeEnvironment nodeEnv, MetaStateService metaStateService,
MetaDataIndexUpgradeService metaDataIndexUpgradeService, MetaDataUpgrader metaDataUpgrader) throws IOException {
super(settings);
this.nodeEnv = nodeEnv;
this.metaStateService = metaStateService;
if (DiscoveryNode.isDataNode(settings)) {
ensureNoPre019ShardState(nodeEnv);
}
if (DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings)) {
nodeEnv.ensureAtomicMoveSupported();
}
if (DiscoveryNode.isMasterNode(settings) || DiscoveryNode.isDataNode(settings)) {
try {
ensureNoPre019State();
IndexFolderUpgrader.upgradeIndicesIfNeeded(settings, nodeEnv);
final MetaData metaData = metaStateService.loadFullState();
final MetaData upgradedMetaData = upgradeMetaData(metaData, metaDataIndexUpgradeService, metaDataUpgrader);
// We finished global state validation and successfully checked all indices for backward compatibility
// and found no non-upgradable indices, which means the upgrade can continue.
// Now it's safe to overwrite global and index metadata.
if (metaData != upgradedMetaData) {
if (MetaData.isGlobalStateEquals(metaData, upgradedMetaData) == false) {
metaStateService.writeGlobalState("upgrade", upgradedMetaData);
}
for (IndexMetaData indexMetaData : upgradedMetaData) {
if (metaData.hasIndexMetaData(indexMetaData) == false) {
metaStateService.writeIndex("upgrade", indexMetaData);
}
}
}
long startNS = System.nanoTime();
metaStateService.loadFullState();
logger.debug("took {} to load state", TimeValue.timeValueMillis(TimeValue.nsecToMSec(System.nanoTime() - startNS)));
} catch (Exception e) {
logger.error("failed to read local state, exiting...", e);
throw e;
}
}
}
public MetaData loadMetaState() throws IOException {
return metaStateService.loadFullState();
}
@Override
public void applyClusterState(ClusterChangedEvent event) {
final ClusterState state = event.state();
if (state.blocks().disableStatePersistence()) {
// reset the current metadata, we need to start fresh...
this.previousMetaData = null;
previouslyWrittenIndices = emptySet();
return;
}
MetaData newMetaData = state.metaData();
// we don't check if metaData changed, since we might be called several times and we need to check dangling...
Set<Index> relevantIndices = Collections.emptySet();
boolean success = true;
// write the state if this node is a master eligible node or if it is a data node and has shards allocated on it
if (state.nodes().getLocalNode().isMasterNode() || state.nodes().getLocalNode().isDataNode()) {
if (previousMetaData == null) {
try {
// we determine if or if not we write meta data on data only nodes by looking at the shard routing
// and only write if a shard of this index is allocated on this node
// however, closed indices do not appear in the shard routing. if the meta data for a closed index is
// updated it will therefore not be written in case the list of previouslyWrittenIndices is empty (because state
// persistence was disabled or the node was restarted), see getRelevantIndicesOnDataOnlyNode().
// we therefore have to check here if we have shards on disk and add their indices to the previouslyWrittenIndices list
if (isDataOnlyNode(state)) {
Set<Index> newPreviouslyWrittenIndices = new HashSet<>(previouslyWrittenIndices.size());
for (IndexMetaData indexMetaData : newMetaData) {
IndexMetaData indexMetaDataOnDisk = null;
if (indexMetaData.getState().equals(IndexMetaData.State.CLOSE)) {
indexMetaDataOnDisk = metaStateService.loadIndexState(indexMetaData.getIndex());
}
if (indexMetaDataOnDisk != null) {
newPreviouslyWrittenIndices.add(indexMetaDataOnDisk.getIndex());
}
}
newPreviouslyWrittenIndices.addAll(previouslyWrittenIndices);
previouslyWrittenIndices = unmodifiableSet(newPreviouslyWrittenIndices);
}
} catch (Exception e) {
success = false;
}
}
// check if the global state changed?
if (previousMetaData == null || !MetaData.isGlobalStateEquals(previousMetaData, newMetaData)) {
try {
metaStateService.writeGlobalState("changed", newMetaData);
} catch (Exception e) {
success = false;
}
}
relevantIndices = getRelevantIndices(event.state(), event.previousState(), previouslyWrittenIndices);
final Iterable<IndexMetaWriteInfo> writeInfo = resolveStatesToBeWritten(previouslyWrittenIndices, relevantIndices, previousMetaData, event.state().metaData());
// check and write changes in indices
for (IndexMetaWriteInfo indexMetaWrite : writeInfo) {
try {
metaStateService.writeIndex(indexMetaWrite.reason, indexMetaWrite.newMetaData);
} catch (Exception e) {
success = false;
}
}
}
if (success) {
previousMetaData = newMetaData;
previouslyWrittenIndices = unmodifiableSet(relevantIndices);
}
}
public static Set<Index> getRelevantIndices(ClusterState state, ClusterState previousState, Set<Index> previouslyWrittenIndices) {
Set<Index> relevantIndices;
if (isDataOnlyNode(state)) {
relevantIndices = getRelevantIndicesOnDataOnlyNode(state, previousState, previouslyWrittenIndices);
} else if (state.nodes().getLocalNode().isMasterNode()) {
relevantIndices = getRelevantIndicesForMasterEligibleNode(state);
} else {
relevantIndices = Collections.emptySet();
}
return relevantIndices;
}
protected static boolean isDataOnlyNode(ClusterState state) {
return ((state.nodes().getLocalNode().isMasterNode() == false) && state.nodes().getLocalNode().isDataNode());
}
/**
* Throws an IAE if a pre 0.19 state is detected
*/
private void ensureNoPre019State() throws IOException {
for (Path dataLocation : nodeEnv.nodeDataPaths()) {
final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME);
if (!Files.exists(stateLocation)) {
continue;
}
try (DirectoryStream<Path> stream = Files.newDirectoryStream(stateLocation)) {
for (Path stateFile : stream) {
if (logger.isTraceEnabled()) {
logger.trace("[upgrade]: processing [{}]", stateFile.getFileName());
}
final String name = stateFile.getFileName().toString();
if (name.startsWith("metadata-")) {
throw new IllegalStateException("Detected pre 0.19 metadata file please upgrade to a version before "
+ Version.CURRENT.minimumIndexCompatibilityVersion()
+ " first to upgrade state structures - metadata found: [" + stateFile.getParent().toAbsolutePath());
}
}
}
}
}
/**
* Elasticsearch 2.0 removed several deprecated features and as well as support for Lucene 3.x. This method calls
* {@link MetaDataIndexUpgradeService} to makes sure that indices are compatible with the current version. The
* MetaDataIndexUpgradeService might also update obsolete settings if needed.
* Allows upgrading global custom meta data via {@link MetaDataUpgrader#customMetaDataUpgraders}
*
* @return input <code>metaData</code> if no upgrade is needed or an upgraded metaData
*/
static MetaData upgradeMetaData(MetaData metaData,
MetaDataIndexUpgradeService metaDataIndexUpgradeService,
MetaDataUpgrader metaDataUpgrader) throws IOException {
// upgrade index meta data
boolean changed = false;
final MetaData.Builder upgradedMetaData = MetaData.builder(metaData);
for (IndexMetaData indexMetaData : metaData) {
IndexMetaData newMetaData = metaDataIndexUpgradeService.upgradeIndexMetaData(indexMetaData,
Version.CURRENT.minimumIndexCompatibilityVersion());
changed |= indexMetaData != newMetaData;
upgradedMetaData.put(newMetaData, false);
}
// upgrade global custom meta data
if (applyPluginUpgraders(metaData.getCustoms(), metaDataUpgrader.customMetaDataUpgraders,
upgradedMetaData::removeCustom,upgradedMetaData::putCustom)) {
changed = true;
}
// upgrade current templates
if (applyPluginUpgraders(metaData.getTemplates(), metaDataUpgrader.indexTemplateMetaDataUpgraders,
upgradedMetaData::removeTemplate, (s, indexTemplateMetaData) -> upgradedMetaData.put(indexTemplateMetaData))) {
changed = true;
}
return changed ? upgradedMetaData.build() : metaData;
}
private static <Data> boolean applyPluginUpgraders(ImmutableOpenMap<String, Data> existingData,
UnaryOperator<Map<String, Data>> upgrader,
Consumer<String> removeData,
BiConsumer<String, Data> putData) {
// collect current data
Map<String, Data> existingMap = new HashMap<>();
for (ObjectObjectCursor<String, Data> customCursor : existingData) {
existingMap.put(customCursor.key, customCursor.value);
}
// upgrade global custom meta data
Map<String, Data> upgradedCustoms = upgrader.apply(existingMap);
if (upgradedCustoms.equals(existingMap) == false) {
// remove all data first so a plugin can remove custom metadata or templates if needed
existingMap.keySet().forEach(removeData);
for (Map.Entry<String, Data> upgradedCustomEntry : upgradedCustoms.entrySet()) {
putData.accept(upgradedCustomEntry.getKey(), upgradedCustomEntry.getValue());
}
return true;
}
return false;
}
// shard state BWC
private void ensureNoPre019ShardState(NodeEnvironment nodeEnv) throws IOException {
for (Path dataLocation : nodeEnv.nodeDataPaths()) {
final Path stateLocation = dataLocation.resolve(MetaDataStateFormat.STATE_DIR_NAME);
if (Files.exists(stateLocation)) {
try (DirectoryStream<Path> stream = Files.newDirectoryStream(stateLocation, "shards-*")) {
for (Path stateFile : stream) {
throw new IllegalStateException("Detected pre 0.19 shard state file please upgrade to a version before "
+ Version.CURRENT.minimumIndexCompatibilityVersion()
+ " first to upgrade state structures - shard state found: [" + stateFile.getParent().toAbsolutePath());
}
}
}
}
}
/**
* Loads the current meta state for each index in the new cluster state and checks if it has to be persisted.
* Each index state that should be written to disk will be returned. This is only run for data only nodes.
* It will return only the states for indices that actually have a shard allocated on the current node.
*
* @param previouslyWrittenIndices A list of indices for which the state was already written before
* @param potentiallyUnwrittenIndices The list of indices for which state should potentially be written
* @param previousMetaData The last meta data we know of. meta data for all indices in previouslyWrittenIndices list is persisted now
* @param newMetaData The new metadata
* @return iterable over all indices states that should be written to disk
*/
public static Iterable<GatewayMetaState.IndexMetaWriteInfo> resolveStatesToBeWritten(Set<Index> previouslyWrittenIndices, Set<Index> potentiallyUnwrittenIndices, MetaData previousMetaData, MetaData newMetaData) {
List<GatewayMetaState.IndexMetaWriteInfo> indicesToWrite = new ArrayList<>();
for (Index index : potentiallyUnwrittenIndices) {
IndexMetaData newIndexMetaData = newMetaData.getIndexSafe(index);
IndexMetaData previousIndexMetaData = previousMetaData == null ? null : previousMetaData.index(index);
String writeReason = null;
if (previouslyWrittenIndices.contains(index) == false || previousIndexMetaData == null) {
writeReason = "freshly created";
} else if (previousIndexMetaData.getVersion() != newIndexMetaData.getVersion()) {
writeReason = "version changed from [" + previousIndexMetaData.getVersion() + "] to [" + newIndexMetaData.getVersion() + "]";
}
if (writeReason != null) {
indicesToWrite.add(new GatewayMetaState.IndexMetaWriteInfo(newIndexMetaData, previousIndexMetaData, writeReason));
}
}
return indicesToWrite;
}
public static Set<Index> getRelevantIndicesOnDataOnlyNode(ClusterState state, ClusterState previousState, Set<Index> previouslyWrittenIndices) {
RoutingNode newRoutingNode = state.getRoutingNodes().node(state.nodes().getLocalNodeId());
if (newRoutingNode == null) {
throw new IllegalStateException("cluster state does not contain this node - cannot write index meta state");
}
Set<Index> indices = new HashSet<>();
for (ShardRouting routing : newRoutingNode) {
indices.add(routing.index());
}
// we have to check the meta data also: closed indices will not appear in the routing table, but we must still write the state if we have it written on disk previously
for (IndexMetaData indexMetaData : state.metaData()) {
boolean isOrWasClosed = indexMetaData.getState().equals(IndexMetaData.State.CLOSE);
// if the index is open we might still have to write the state if it just transitioned from closed to open
// so we have to check for that as well.
IndexMetaData previousMetaData = previousState.metaData().index(indexMetaData.getIndex());
if (previousMetaData != null) {
isOrWasClosed = isOrWasClosed || previousMetaData.getState().equals(IndexMetaData.State.CLOSE);
}
if (previouslyWrittenIndices.contains(indexMetaData.getIndex()) && isOrWasClosed) {
indices.add(indexMetaData.getIndex());
}
}
return indices;
}
public static Set<Index> getRelevantIndicesForMasterEligibleNode(ClusterState state) {
Set<Index> relevantIndices;
relevantIndices = new HashSet<>();
// we have to iterate over the metadata to make sure we also capture closed indices
for (IndexMetaData indexMetaData : state.metaData()) {
relevantIndices.add(indexMetaData.getIndex());
}
return relevantIndices;
}
public static class IndexMetaWriteInfo {
final IndexMetaData newMetaData;
final String reason;
final IndexMetaData previousMetaData;
public IndexMetaWriteInfo(IndexMetaData newMetaData, IndexMetaData previousMetaData, String reason) {
this.newMetaData = newMetaData;
this.reason = reason;
this.previousMetaData = previousMetaData;
}
public IndexMetaData getNewMetaData() {
return newMetaData;
}
public String getReason() {
return reason;
}
}
}
| |
/*
* Copyright (c) jmelzer 2012.
* All rights reserved.
*/
package com.jmelzer.data.util;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
public class DateUtilsJm {
/**
* <p>Checks if two dates are on the same day ignoring time.</p>
* @param date1 the first date, not altered, not null
* @param date2 the second date, not altered, not null
* @return true if they represent the same day
* @throws IllegalArgumentException if either date is <code>null</code>
*/
public static boolean isSameDay(Date date1, Date date2) {
if (date1 == null || date2 == null) {
throw new IllegalArgumentException("The dates must not be null");
}
Calendar cal1 = Calendar.getInstance();
cal1.setTime(date1);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(date2);
return isSameDay(cal1, cal2);
}
/**
* <p>Checks if two calendars represent the same day ignoring time.</p>
* @param cal1 the first calendar, not altered, not null
* @param cal2 the second calendar, not altered, not null
* @return true if they represent the same day
* @throws IllegalArgumentException if either calendar is <code>null</code>
*/
public static boolean isSameDay(Calendar cal1, Calendar cal2) {
if (cal1 == null || cal2 == null) {
throw new IllegalArgumentException("The dates must not be null");
}
return (cal1.get(Calendar.ERA) == cal2.get(Calendar.ERA) &&
cal1.get(Calendar.YEAR) == cal2.get(Calendar.YEAR) &&
cal1.get(Calendar.DAY_OF_YEAR) == cal2.get(Calendar.DAY_OF_YEAR));
}
/**
* <p>Checks if a date is today.</p>
* @param date the date, not altered, not null.
* @return true if the date is today.
* @throws IllegalArgumentException if the date is <code>null</code>
*/
public static boolean isToday(Date date) {
return isSameDay(date, Calendar.getInstance().getTime());
}
/**
* <p>Checks if a calendar date is today.</p>
* @param cal the calendar, not altered, not null
* @return true if cal date is today
* @throws IllegalArgumentException if the calendar is <code>null</code>
*/
public static boolean isToday_(Calendar cal) {
return isSameDay(cal, Calendar.getInstance());
}
/**
* <p>Checks if the first date is before the second date ignoring time.</p>
* @param date1 the first date, not altered, not null
* @param date2 the second date, not altered, not null
* @return true if the first date day is before the second date day.
* @throws IllegalArgumentException if the date is <code>null</code>
*/
public static boolean isBeforeDay(Date date1, Date date2) {
if (date1 == null || date2 == null) {
throw new IllegalArgumentException("The dates must not be null");
}
Calendar cal1 = Calendar.getInstance();
cal1.setTime(date1);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(date2);
return isBeforeDay(cal1, cal2);
}
/**
* <p>Checks if the first calendar date is before the second calendar date ignoring time.</p>
* @param cal1 the first calendar, not altered, not null.
* @param cal2 the second calendar, not altered, not null.
* @return true if cal1 date is before cal2 date ignoring time.
* @throws IllegalArgumentException if either of the calendars are <code>null</code>
*/
public static boolean isBeforeDay(Calendar cal1, Calendar cal2) {
if (cal1 == null || cal2 == null) {
throw new IllegalArgumentException("The dates must not be null");
}
if (cal1.get(Calendar.ERA) < cal2.get(Calendar.ERA)) return true;
if (cal1.get(Calendar.ERA) > cal2.get(Calendar.ERA)) return false;
if (cal1.get(Calendar.YEAR) < cal2.get(Calendar.YEAR)) return true;
if (cal1.get(Calendar.YEAR) > cal2.get(Calendar.YEAR)) return false;
return cal1.get(Calendar.DAY_OF_YEAR) < cal2.get(Calendar.DAY_OF_YEAR);
}
/**
* <p>Checks if the first date is after the second date ignoring time.</p>
* @param date1 the first date, not altered, not null
* @param date2 the second date, not altered, not null
* @return true if the first date day is after the second date day.
* @throws IllegalArgumentException if the date is <code>null</code>
*/
public static boolean isAfterDay(Date date1, Date date2) {
if (date1 == null || date2 == null) {
throw new IllegalArgumentException("The dates must not be null");
}
Calendar cal1 = Calendar.getInstance();
cal1.setTime(date1);
Calendar cal2 = Calendar.getInstance();
cal2.setTime(date2);
return isAfterDay(cal1, cal2);
}
/**
* <p>Checks if the first calendar date is after the second calendar date ignoring time.</p>
* @param cal1 the first calendar, not altered, not null.
* @param cal2 the second calendar, not altered, not null.
* @return true if cal1 date is after cal2 date ignoring time.
* @throws IllegalArgumentException if either of the calendars are <code>null</code>
*/
public static boolean isAfterDay(Calendar cal1, Calendar cal2) {
if (cal1 == null || cal2 == null) {
throw new IllegalArgumentException("The dates must not be null");
}
if (cal1.get(Calendar.ERA) < cal2.get(Calendar.ERA)) return false;
if (cal1.get(Calendar.ERA) > cal2.get(Calendar.ERA)) return true;
if (cal1.get(Calendar.YEAR) < cal2.get(Calendar.YEAR)) return false;
if (cal1.get(Calendar.YEAR) > cal2.get(Calendar.YEAR)) return true;
return cal1.get(Calendar.DAY_OF_YEAR) > cal2.get(Calendar.DAY_OF_YEAR);
}
/**
* <p>Checks if a date is after today and within a number of days in the future.</p>
* @param date the date to check, not altered, not null.
* @param days the number of days.
* @return true if the date day is after today and within days in the future .
* @throws IllegalArgumentException if the date is <code>null</code>
*/
public static boolean isWithinDaysFuture(Date date, int days) {
if (date == null) {
throw new IllegalArgumentException("The date must not be null");
}
Calendar cal = Calendar.getInstance();
cal.setTime(date);
return isWithinDaysFuture(cal, days);
}
/**
* <p>Checks if a calendar date is after today and within a number of days in the future.</p>
* @param cal the calendar, not altered, not null
* @param days the number of days.
* @return true if the calendar date day is after today and within days in the future .
* @throws IllegalArgumentException if the calendar is <code>null</code>
*/
public static boolean isWithinDaysFuture(Calendar cal, int days) {
if (cal == null) {
throw new IllegalArgumentException("The date must not be null");
}
Calendar today = Calendar.getInstance();
Calendar future = Calendar.getInstance();
future.add(Calendar.DAY_OF_YEAR, days);
return (isAfterDay(cal, today) && ! isAfterDay(cal, future));
}
/** Returns the given date with the time set to the start of the day. */
public static Date getStart(Date date) {
return clearTime(date);
}
/** Returns the given date with the time values cleared. */
public static Date clearTime(Date date) {
if (date == null) {
return null;
}
Calendar c = Calendar.getInstance();
c.setTime(date);
c.set(Calendar.HOUR_OF_DAY, 0);
c.set(Calendar.MINUTE, 0);
c.set(Calendar.SECOND, 0);
c.set(Calendar.MILLISECOND, 0);
return c.getTime();
}
/** Determines whether or not a date has any time values (hour, minute,
* seconds or millisecondsReturns the given date with the time values cleared. */
/**
* Determines whether or not a date has any time values.
* @param date The date.
* @return true iff the date is not null and any of the date's hour, minute,
* seconds or millisecond values are greater than zero.
*/
public static boolean hasTime(Date date) {
if (date == null) {
return false;
}
Calendar c = Calendar.getInstance();
c.setTime(date);
if (c.get(Calendar.HOUR_OF_DAY) > 0) {
return true;
}
if (c.get(Calendar.MINUTE) > 0) {
return true;
}
if (c.get(Calendar.SECOND) > 0) {
return true;
}
if (c.get(Calendar.MILLISECOND) > 0) {
return true;
}
return false;
}
/** Returns the given date with time set to the end of the day */
public static Date getEnd(Date date) {
if (date == null) {
return null;
}
Calendar c = Calendar.getInstance();
c.setTime(date);
c.set(Calendar.HOUR_OF_DAY, 23);
c.set(Calendar.MINUTE, 59);
c.set(Calendar.SECOND, 59);
c.set(Calendar.MILLISECOND, 999);
return c.getTime();
}
/**
* Returns the maximum of two dates. A null date is treated as being less
* than any non-null date.
*/
public static Date max(Date d1, Date d2) {
if (d1 == null && d2 == null) return null;
if (d1 == null) return d2;
if (d2 == null) return d1;
return (d1.after(d2)) ? d1 : d2;
}
/**
* Returns the minimum of two dates. A null date is treated as being greater
* than any non-null date.
*/
public static Date min(Date d1, Date d2) {
if (d1 == null && d2 == null) return null;
if (d1 == null) return d2;
if (d2 == null) return d1;
return (d1.before(d2)) ? d1 : d2;
}
/** The maximum date possible. */
public static Date MAX_DATE = new Date(Long.MAX_VALUE);
public static String getActualYear() {
return "" + Calendar.getInstance().get(Calendar.YEAR);
}
public static long parseGermanTime(String date) {
DateFormat DATUM = SimpleDateFormat.getDateInstance();
try {
return DATUM.parse(date).getTime();
} catch (ParseException e) {
e.printStackTrace();
return 0;
}
}
public static long diffSince(Date date) {
Date now = new Date();
return date.getTime()-now.getTime();
}
}
| |
/*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.cassandra.db.lifecycle;
import java.io.File;
import java.nio.file.Path;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Iterables;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.db.compaction.OperationType;
import org.apache.cassandra.db.lifecycle.LogRecord.Type;
import org.apache.cassandra.io.sstable.SSTable;
import org.apache.cassandra.io.sstable.format.SSTableReader;
import org.apache.cassandra.io.sstable.format.big.BigFormat;
import org.apache.cassandra.utils.Throwables;
import static org.apache.cassandra.utils.Throwables.merge;
/**
* A transaction log file. We store transaction records into a log file, which is
* copied into multiple identical replicas on different disks, @see LogFileReplica.
*
* This class supports the transactional logic of LogTransaction and the removing
* of unfinished leftovers when a transaction is completed, or aborted, or when
* we clean up on start-up.
*
* @see LogTransaction
*/
final class LogFile implements AutoCloseable
{
private static final Logger logger = LoggerFactory.getLogger(LogFile.class);
static String EXT = ".log";
static char SEP = '_';
// cc_txn_opname_id.log (where cc is one of the sstable versions defined in BigVersion)
static Pattern FILE_REGEX = Pattern.compile(String.format("^(.{2})_txn_(.*)_(.*)%s$", EXT));
// A set of physical files on disk, each file is an identical replica
private final LogReplicaSet replicas = new LogReplicaSet();
// The transaction records, this set must be ORDER PRESERVING
private final LinkedHashSet<LogRecord> records = new LinkedHashSet<>();
// The type of the transaction
private final OperationType type;
// The unique id of the transaction
private final UUID id;
static LogFile make(File logReplica)
{
return make(logReplica.getName(), Collections.singletonList(logReplica));
}
static LogFile make(String fileName, List<File> logReplicas)
{
Matcher matcher = LogFile.FILE_REGEX.matcher(fileName);
boolean matched = matcher.matches();
assert matched && matcher.groupCount() == 3;
// For now we don't need this but it is there in case we need to change
// file format later on, the version is the sstable version as defined in BigFormat
//String version = matcher.group(1);
OperationType operationType = OperationType.fromFileName(matcher.group(2));
UUID id = UUID.fromString(matcher.group(3));
return new LogFile(operationType, id, logReplicas);
}
Throwable syncDirectory(Throwable accumulate)
{
return replicas.syncDirectory(accumulate);
}
OperationType type()
{
return type;
}
UUID id()
{
return id;
}
Throwable removeUnfinishedLeftovers(Throwable accumulate)
{
try
{
// we sync the parent directories before content deletion to ensure
// any previously deleted files (see SSTableTider) are not
// incorrectly picked up by record.getExistingFiles() in
// deleteRecordFiles(), see CASSANDRA-12261
Throwables.maybeFail(syncDirectory(accumulate));
deleteFilesForRecordsOfType(committed() ? Type.REMOVE : Type.ADD);
// we sync the parent directories between contents and log deletion
// to ensure there is a happens before edge between them
Throwables.maybeFail(syncDirectory(accumulate));
accumulate = replicas.delete(accumulate);
}
catch (Throwable t)
{
accumulate = merge(accumulate, t);
}
return accumulate;
}
static boolean isLogFile(File file)
{
return LogFile.FILE_REGEX.matcher(file.getName()).matches();
}
LogFile(OperationType type, UUID id, List<File> replicas)
{
this(type, id);
this.replicas.addReplicas(replicas);
}
LogFile(OperationType type, UUID id)
{
this.type = type;
this.id = id;
}
boolean verify()
{
records.clear();
if (!replicas.readRecords(records))
{
logger.error("Failed to read records for transaction log {}", this);
return false;
}
records.forEach(LogFile::verifyRecord);
Optional<LogRecord> firstInvalid = records.stream().filter(LogRecord::isInvalidOrPartial).findFirst();
if (!firstInvalid.isPresent())
return true;
LogRecord failedOn = firstInvalid.get();
if (getLastRecord() != failedOn)
{
setErrorInReplicas(failedOn);
return false;
}
records.stream().filter((r) -> r != failedOn).forEach(LogFile::verifyRecordWithCorruptedLastRecord);
if (records.stream()
.filter((r) -> r != failedOn)
.filter(LogRecord::isInvalid)
.map(this::setErrorInReplicas)
.findFirst().isPresent())
{
setErrorInReplicas(failedOn);
return false;
}
// if only the last record is corrupt and all other records have matching files on disk, @see verifyRecord,
// then we simply exited whilst serializing the last record and we carry on
logger.warn("Last record of transaction {} is corrupt or incomplete [{}], " +
"but all previous records match state on disk; continuing",
id, failedOn.error());
return true;
}
LogRecord setErrorInReplicas(LogRecord record)
{
replicas.setErrorInReplicas(record);
return record;
}
static void verifyRecord(LogRecord record)
{
if (record.checksum != record.computeChecksum())
{
record.setError(String.format("Invalid checksum for sstable [%s]: [%d] should have been [%d]",
record.fileName(),
record.checksum,
record.computeChecksum()));
return;
}
if (record.type != Type.REMOVE)
return;
// Paranoid sanity checks: we create another record by looking at the files as they are
// on disk right now and make sure the information still matches. We don't want to delete
// files by mistake if the user has copied them from backup and forgot to remove a txn log
// file that obsoleted the very same files. So we check the latest update time and make sure
// it matches. Because we delete files from oldest to newest, the latest update time should
// always match.
record.status.onDiskRecord = record.withExistingFiles();
if (record.updateTime != record.status.onDiskRecord.updateTime && record.status.onDiskRecord.updateTime > 0)
{
record.setError(String.format("Unexpected files detected for sstable [%s]: " +
"last update time [%tT] should have been [%tT]",
record.fileName(),
record.status.onDiskRecord.updateTime,
record.updateTime));
}
}
static void verifyRecordWithCorruptedLastRecord(LogRecord record)
{
if (record.type == Type.REMOVE && record.status.onDiskRecord.numFiles < record.numFiles)
{ // if we found a corruption in the last record, then we continue only
// if the number of files matches exactly for all previous records.
record.setError(String.format("Incomplete fileset detected for sstable [%s]: " +
"number of files [%d] should have been [%d].",
record.fileName(),
record.status.onDiskRecord.numFiles,
record.numFiles));
}
}
void commit()
{
assert !completed() : "Already completed!";
addRecord(LogRecord.makeCommit(System.currentTimeMillis()));
}
void abort()
{
assert !completed() : "Already completed!";
addRecord(LogRecord.makeAbort(System.currentTimeMillis()));
}
private boolean isLastRecordValidWithType(Type type)
{
LogRecord lastRecord = getLastRecord();
return lastRecord != null &&
lastRecord.type == type &&
lastRecord.isValid();
}
boolean committed()
{
return isLastRecordValidWithType(Type.COMMIT);
}
boolean aborted()
{
return isLastRecordValidWithType(Type.ABORT);
}
boolean completed()
{
return committed() || aborted();
}
void add(Type type, SSTable table)
{
add(makeRecord(type, table));
}
void add(LogRecord record)
{
if (!addRecord(record))
throw new IllegalStateException();
}
public void addAll(Type type, Iterable<SSTableReader> toBulkAdd)
{
for (LogRecord record : makeRecords(type, toBulkAdd).values())
if (!addRecord(record))
throw new IllegalStateException();
}
Map<SSTable, LogRecord> makeRecords(Type type, Iterable<SSTableReader> tables)
{
assert type == Type.ADD || type == Type.REMOVE;
for (SSTableReader sstable : tables)
{
File directory = sstable.descriptor.directory;
String fileName = StringUtils.join(directory, File.separator, getFileName());
replicas.maybeCreateReplica(directory, fileName, records);
}
return LogRecord.make(type, tables);
}
private LogRecord makeRecord(Type type, SSTable table)
{
assert type == Type.ADD || type == Type.REMOVE;
File directory = table.descriptor.directory;
String fileName = StringUtils.join(directory, File.separator, getFileName());
replicas.maybeCreateReplica(directory, fileName, records);
return LogRecord.make(type, table);
}
/**
* this version of makeRecord takes an existing LogRecord and converts it to a
* record with the given type. This avoids listing the directory and if the
* LogRecord already exists, we have all components for the sstable
*/
private LogRecord makeRecord(Type type, SSTable table, LogRecord record)
{
assert type == Type.ADD || type == Type.REMOVE;
File directory = table.descriptor.directory;
String fileName = StringUtils.join(directory, File.separator, getFileName());
replicas.maybeCreateReplica(directory, fileName, records);
return record.asType(type);
}
private boolean addRecord(LogRecord record)
{
if (records.contains(record))
return false;
replicas.append(record);
return records.add(record);
}
void remove(Type type, SSTable table)
{
LogRecord record = makeRecord(type, table);
assert records.contains(record) : String.format("[%s] is not tracked by %s", record, id);
deleteRecordFiles(record);
records.remove(record);
}
boolean contains(Type type, SSTable table)
{
return contains(makeRecord(type, table));
}
boolean contains(Type type, SSTable sstable, LogRecord record)
{
return contains(makeRecord(type, sstable, record));
}
private boolean contains(LogRecord record)
{
return records.contains(record);
}
void deleteFilesForRecordsOfType(Type type)
{
records.stream()
.filter(type::matches)
.forEach(LogFile::deleteRecordFiles);
records.clear();
}
private static void deleteRecordFiles(LogRecord record)
{
List<File> files = record.getExistingFiles();
// we sort the files in ascending update time order so that the last update time
// stays the same even if we only partially delete files, see comment in isInvalid()
files.sort((f1, f2) -> Long.compare(f1.lastModified(), f2.lastModified()));
files.forEach(LogTransaction::delete);
}
/**
* Extract from the files passed in all those that are of the given type.
*
* Scan all records and select those that are of the given type, valid, and
* located in the same folder. For each such record extract from the files passed in
* those that belong to this record.
*
* @return a map linking each mapped record to its files, where the files where passed in as parameters.
*/
Map<LogRecord, Set<File>> getFilesOfType(Path folder, NavigableSet<File> files, Type type)
{
Map<LogRecord, Set<File>> ret = new HashMap<>();
records.stream()
.filter(type::matches)
.filter(LogRecord::isValid)
.filter(r -> r.isInFolder(folder))
.forEach((r) -> ret.put(r, getRecordFiles(files, r)));
return ret;
}
LogRecord getLastRecord()
{
return Iterables.getLast(records, null);
}
private static Set<File> getRecordFiles(NavigableSet<File> files, LogRecord record)
{
String fileName = record.fileName();
return files.stream().filter(f -> f.getName().startsWith(fileName)).collect(Collectors.toSet());
}
boolean exists()
{
return replicas.exists();
}
public void close()
{
replicas.close();
}
@Override
public String toString()
{
return toString(false);
}
public String toString(boolean showContents)
{
StringBuilder str = new StringBuilder();
str.append('[');
str.append(getFileName());
str.append(" in ");
str.append(replicas.getDirectories());
str.append(']');
if (showContents)
{
str.append(System.lineSeparator());
str.append("Files and contents follow:");
str.append(System.lineSeparator());
replicas.printContentsWithAnyErrors(str);
}
return str.toString();
}
@VisibleForTesting
List<File> getFiles()
{
return replicas.getFiles();
}
@VisibleForTesting
List<String> getFilePaths()
{
return replicas.getFilePaths();
}
private String getFileName()
{
return StringUtils.join(BigFormat.latestVersion,
LogFile.SEP,
"txn",
LogFile.SEP,
type.fileName,
LogFile.SEP,
id.toString(),
LogFile.EXT);
}
public boolean isEmpty()
{
return records.isEmpty();
}
}
| |
/*
* Copyright 2017 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.centraldogma.server;
import static com.google.common.base.MoreObjects.firstNonNull;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Strings.isNullOrEmpty;
import static com.linecorp.centraldogma.internal.api.v1.HttpApiV1Constants.API_V0_PATH_PREFIX;
import static com.linecorp.centraldogma.internal.api.v1.HttpApiV1Constants.API_V1_PATH_PREFIX;
import static com.linecorp.centraldogma.internal.api.v1.HttpApiV1Constants.HEALTH_CHECK_PATH;
import static com.linecorp.centraldogma.internal.api.v1.HttpApiV1Constants.METRICS_PATH;
import static com.linecorp.centraldogma.server.auth.AuthProvider.BUILTIN_WEB_BASE_PATH;
import static com.linecorp.centraldogma.server.auth.AuthProvider.LOGIN_API_ROUTES;
import static com.linecorp.centraldogma.server.auth.AuthProvider.LOGIN_PATH;
import static com.linecorp.centraldogma.server.auth.AuthProvider.LOGOUT_API_ROUTES;
import static com.linecorp.centraldogma.server.auth.AuthProvider.LOGOUT_PATH;
import static com.linecorp.centraldogma.server.internal.storage.project.ProjectInitializer.initializeInternalProject;
import static java.util.Objects.requireNonNull;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CompletionStage;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.ForkJoinPool;
import java.util.concurrent.LinkedTransferQueue;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BiFunction;
import java.util.function.Consumer;
import java.util.function.Function;
import javax.annotation.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.module.SimpleModule;
import com.github.benmanes.caffeine.cache.Caffeine;
import com.github.benmanes.caffeine.cache.stats.CacheStats;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.linecorp.armeria.common.HttpData;
import com.linecorp.armeria.common.HttpHeaderNames;
import com.linecorp.armeria.common.HttpHeaders;
import com.linecorp.armeria.common.HttpRequest;
import com.linecorp.armeria.common.HttpResponse;
import com.linecorp.armeria.common.HttpStatus;
import com.linecorp.armeria.common.MediaType;
import com.linecorp.armeria.common.ServerCacheControl;
import com.linecorp.armeria.common.metric.MeterIdPrefixFunction;
import com.linecorp.armeria.common.metric.PrometheusMeterRegistries;
import com.linecorp.armeria.common.util.EventLoopGroups;
import com.linecorp.armeria.common.util.Exceptions;
import com.linecorp.armeria.common.util.StartStopSupport;
import com.linecorp.armeria.common.util.SystemInfo;
import com.linecorp.armeria.server.AbstractHttpService;
import com.linecorp.armeria.server.HttpService;
import com.linecorp.armeria.server.Route;
import com.linecorp.armeria.server.Server;
import com.linecorp.armeria.server.ServerBuilder;
import com.linecorp.armeria.server.ServerPort;
import com.linecorp.armeria.server.ServiceNaming;
import com.linecorp.armeria.server.ServiceRequestContext;
import com.linecorp.armeria.server.auth.AuthService;
import com.linecorp.armeria.server.auth.Authorizer;
import com.linecorp.armeria.server.docs.DocService;
import com.linecorp.armeria.server.encoding.EncodingService;
import com.linecorp.armeria.server.file.FileService;
import com.linecorp.armeria.server.file.HttpFile;
import com.linecorp.armeria.server.healthcheck.HealthCheckService;
import com.linecorp.armeria.server.logging.AccessLogWriter;
import com.linecorp.armeria.server.metric.MetricCollectingService;
import com.linecorp.armeria.server.metric.PrometheusExpositionService;
import com.linecorp.armeria.server.thrift.THttpService;
import com.linecorp.armeria.server.thrift.ThriftCallService;
import com.linecorp.centraldogma.common.ShuttingDownException;
import com.linecorp.centraldogma.internal.CsrfToken;
import com.linecorp.centraldogma.internal.Jackson;
import com.linecorp.centraldogma.internal.thrift.CentralDogmaService;
import com.linecorp.centraldogma.server.auth.AuthConfig;
import com.linecorp.centraldogma.server.auth.AuthProvider;
import com.linecorp.centraldogma.server.auth.AuthProviderParameters;
import com.linecorp.centraldogma.server.auth.SessionManager;
import com.linecorp.centraldogma.server.command.Command;
import com.linecorp.centraldogma.server.command.CommandExecutor;
import com.linecorp.centraldogma.server.command.StandaloneCommandExecutor;
import com.linecorp.centraldogma.server.internal.admin.auth.CachedSessionManager;
import com.linecorp.centraldogma.server.internal.admin.auth.CsrfTokenAuthorizer;
import com.linecorp.centraldogma.server.internal.admin.auth.ExpiredSessionDeletingSessionManager;
import com.linecorp.centraldogma.server.internal.admin.auth.FileBasedSessionManager;
import com.linecorp.centraldogma.server.internal.admin.auth.OrElseDefaultHttpFileService;
import com.linecorp.centraldogma.server.internal.admin.auth.SessionTokenAuthorizer;
import com.linecorp.centraldogma.server.internal.admin.service.DefaultLogoutService;
import com.linecorp.centraldogma.server.internal.admin.service.RepositoryService;
import com.linecorp.centraldogma.server.internal.admin.service.UserService;
import com.linecorp.centraldogma.server.internal.admin.util.RestfulJsonResponseConverter;
import com.linecorp.centraldogma.server.internal.api.AdministrativeService;
import com.linecorp.centraldogma.server.internal.api.ContentServiceV1;
import com.linecorp.centraldogma.server.internal.api.MetadataApiService;
import com.linecorp.centraldogma.server.internal.api.ProjectServiceV1;
import com.linecorp.centraldogma.server.internal.api.RepositoryServiceV1;
import com.linecorp.centraldogma.server.internal.api.TokenService;
import com.linecorp.centraldogma.server.internal.api.WatchService;
import com.linecorp.centraldogma.server.internal.api.auth.ApplicationTokenAuthorizer;
import com.linecorp.centraldogma.server.internal.api.converter.HttpApiRequestConverter;
import com.linecorp.centraldogma.server.internal.api.converter.HttpApiResponseConverter;
import com.linecorp.centraldogma.server.internal.mirror.DefaultMirroringServicePlugin;
import com.linecorp.centraldogma.server.internal.replication.ZooKeeperCommandExecutor;
import com.linecorp.centraldogma.server.internal.storage.project.DefaultProjectManager;
import com.linecorp.centraldogma.server.internal.storage.project.SafeProjectManager;
import com.linecorp.centraldogma.server.internal.thrift.CentralDogmaExceptionTranslator;
import com.linecorp.centraldogma.server.internal.thrift.CentralDogmaServiceImpl;
import com.linecorp.centraldogma.server.internal.thrift.CentralDogmaTimeoutScheduler;
import com.linecorp.centraldogma.server.internal.thrift.TokenlessClientLogger;
import com.linecorp.centraldogma.server.metadata.MetadataService;
import com.linecorp.centraldogma.server.metadata.MetadataServiceInjector;
import com.linecorp.centraldogma.server.plugin.Plugin;
import com.linecorp.centraldogma.server.plugin.PluginTarget;
import com.linecorp.centraldogma.server.storage.project.ProjectManager;
import io.micrometer.core.instrument.MeterRegistry;
import io.micrometer.core.instrument.binder.jvm.ClassLoaderMetrics;
import io.micrometer.core.instrument.binder.jvm.DiskSpaceMetrics;
import io.micrometer.core.instrument.binder.jvm.ExecutorServiceMetrics;
import io.micrometer.core.instrument.binder.jvm.JvmGcMetrics;
import io.micrometer.core.instrument.binder.jvm.JvmMemoryMetrics;
import io.micrometer.core.instrument.binder.jvm.JvmThreadMetrics;
import io.micrometer.core.instrument.binder.system.FileDescriptorMetrics;
import io.micrometer.core.instrument.binder.system.ProcessorMetrics;
import io.micrometer.core.instrument.binder.system.UptimeMetrics;
import io.micrometer.prometheus.PrometheusMeterRegistry;
import io.netty.util.concurrent.DefaultThreadFactory;
import io.netty.util.concurrent.GlobalEventExecutor;
/**
* Central Dogma server.
*
* @see CentralDogmaBuilder
*/
public class CentralDogma implements AutoCloseable {
private static final Logger logger = LoggerFactory.getLogger(CentralDogma.class);
static {
Jackson.registerModules(new SimpleModule().addSerializer(CacheStats.class, new CacheStatsSerializer()));
}
/**
* Creates a new instance from the given configuration file.
*
* @throws IOException if failed to load the configuration from the specified file
*/
public static CentralDogma forConfig(File configFile) throws IOException {
requireNonNull(configFile, "configFile");
return new CentralDogma(Jackson.readValue(configFile, CentralDogmaConfig.class));
}
private final CentralDogmaStartStop startStop;
private final AtomicInteger numPendingStopRequests = new AtomicInteger();
@Nullable
private final PluginGroup pluginsForAllReplicas;
@Nullable
private final PluginGroup pluginsForLeaderOnly;
private final CentralDogmaConfig cfg;
@Nullable
private volatile ProjectManager pm;
@Nullable
private volatile Server server;
@Nullable
private ExecutorService repositoryWorker;
@Nullable
private ScheduledExecutorService purgeWorker;
@Nullable
private CommandExecutor executor;
@Nullable
private PrometheusMeterRegistry meterRegistry;
@Nullable
private SessionManager sessionManager;
CentralDogma(CentralDogmaConfig cfg) {
this.cfg = requireNonNull(cfg, "cfg");
pluginsForAllReplicas = PluginGroup.loadPlugins(
CentralDogma.class.getClassLoader(), PluginTarget.ALL_REPLICAS, cfg);
pluginsForLeaderOnly = PluginGroup.loadPlugins(
CentralDogma.class.getClassLoader(), PluginTarget.LEADER_ONLY, cfg);
startStop = new CentralDogmaStartStop(pluginsForAllReplicas);
}
/**
* Returns the configuration of the server.
*
* @return the {@link CentralDogmaConfig} instance which is used for configuring this {@link CentralDogma}.
*/
public CentralDogmaConfig config() {
return cfg;
}
/**
* Returns the primary port of the server.
*
* @return the primary {@link ServerPort} if the server is started. {@link Optional#empty()} otherwise.
*/
@Nullable
public ServerPort activePort() {
final Server server = this.server;
return server != null ? server.activePort() : null;
}
/**
* Returns the ports of the server.
*
* @return the {@link Map} which contains the pairs of local {@link InetSocketAddress} and
* {@link ServerPort} is the server is started. {@link Optional#empty()} otherwise.
*/
public Map<InetSocketAddress, ServerPort> activePorts() {
final Server server = this.server;
if (server != null) {
return server.activePorts();
} else {
return Collections.emptyMap();
}
}
/**
* Returns the {@link MirroringService} of the server.
*
* @return the {@link MirroringService} if the server is started and mirroring is enabled.
* {@link Optional#empty()} otherwise.
*/
public Optional<MirroringService> mirroringService() {
if (pluginsForLeaderOnly == null) {
return Optional.empty();
}
return pluginsForLeaderOnly.findFirstPlugin(DefaultMirroringServicePlugin.class)
.map(DefaultMirroringServicePlugin::mirroringService);
}
/**
* Returns the {@link Plugin}s which have been loaded.
*
* @param target the {@link PluginTarget} of the {@link Plugin}s to be returned
*/
public List<Plugin> plugins(PluginTarget target) {
switch (requireNonNull(target, "target")) {
case LEADER_ONLY:
return pluginsForLeaderOnly != null ? ImmutableList.copyOf(pluginsForLeaderOnly.plugins())
: ImmutableList.of();
case ALL_REPLICAS:
return pluginsForAllReplicas != null ? ImmutableList.copyOf(pluginsForAllReplicas.plugins())
: ImmutableList.of();
default:
// Should not reach here.
throw new Error("Unknown plugin target: " + target);
}
}
/**
* Returns the {@link MeterRegistry} that contains the stats related with the server.
*/
public Optional<MeterRegistry> meterRegistry() {
return Optional.ofNullable(meterRegistry);
}
/**
* Starts the server.
*/
public CompletableFuture<Void> start() {
return startStop.start(true);
}
/**
* Stops the server. This method does nothing if the server is stopped already.
*/
public CompletableFuture<Void> stop() {
numPendingStopRequests.incrementAndGet();
return startStop.stop().thenRun(numPendingStopRequests::decrementAndGet);
}
@Override
public void close() {
startStop.close();
}
private void doStart() throws Exception {
boolean success = false;
ExecutorService repositoryWorker = null;
ScheduledExecutorService purgeWorker = null;
ProjectManager pm = null;
CommandExecutor executor = null;
PrometheusMeterRegistry meterRegistry = null;
Server server = null;
SessionManager sessionManager = null;
try {
meterRegistry = PrometheusMeterRegistries.newRegistry();
logger.info("Starting the Central Dogma ..");
final ThreadPoolExecutor repositoryWorkerImpl = new ThreadPoolExecutor(
cfg.numRepositoryWorkers(), cfg.numRepositoryWorkers(),
60, TimeUnit.SECONDS, new LinkedTransferQueue<>(),
new DefaultThreadFactory("repository-worker", true));
repositoryWorkerImpl.allowCoreThreadTimeOut(true);
repositoryWorker = ExecutorServiceMetrics.monitor(meterRegistry, repositoryWorkerImpl,
"repositoryWorker");
logger.info("Starting the project manager: {}", cfg.dataDir());
purgeWorker = Executors.newSingleThreadScheduledExecutor(
new DefaultThreadFactory("purge-worker", true));
pm = new DefaultProjectManager(cfg.dataDir(), repositoryWorker, purgeWorker,
meterRegistry, cfg.repositoryCacheSpec());
logger.info("Started the project manager: {}", pm);
logger.info("Current settings:\n{}", cfg);
sessionManager = initializeSessionManager();
logger.info("Starting the command executor ..");
executor = startCommandExecutor(pm, repositoryWorker, purgeWorker,
meterRegistry, sessionManager);
if (executor.isWritable()) {
logger.info("Started the command executor.");
initializeInternalProject(executor);
}
logger.info("Starting the RPC server.");
server = startServer(pm, executor, meterRegistry, sessionManager);
logger.info("Started the RPC server at: {}", server.activePorts());
logger.info("Started the Central Dogma successfully.");
success = true;
} finally {
if (success) {
this.repositoryWorker = repositoryWorker;
this.purgeWorker = purgeWorker;
this.pm = pm;
this.executor = executor;
this.meterRegistry = meterRegistry;
this.server = server;
this.sessionManager = sessionManager;
} else {
doStop(server, executor, pm, repositoryWorker, purgeWorker, sessionManager);
}
}
}
private CommandExecutor startCommandExecutor(
ProjectManager pm, Executor repositoryWorker,
ScheduledExecutorService purgeWorker, MeterRegistry meterRegistry,
@Nullable SessionManager sessionManager) {
final Consumer<CommandExecutor> onTakeLeadership = exec -> {
if (pluginsForLeaderOnly != null) {
logger.info("Starting plugins on the leader replica ..");
pluginsForLeaderOnly
.start(cfg, pm, exec, meterRegistry, purgeWorker).handle((unused, cause) -> {
if (cause == null) {
logger.info("Started plugins on the leader replica.");
} else {
logger.error("Failed to start plugins on the leader replica..", cause);
}
return null;
});
}
};
final Consumer<CommandExecutor> onReleaseLeadership = exec -> {
if (pluginsForLeaderOnly != null) {
logger.info("Stopping plugins on the leader replica ..");
pluginsForLeaderOnly.stop(cfg, pm, exec, meterRegistry, purgeWorker).handle((unused, cause) -> {
if (cause == null) {
logger.info("Stopped plugins on the leader replica.");
} else {
logger.error("Failed to stop plugins on the leader replica.", cause);
}
return null;
});
}
};
final CommandExecutor executor;
final ReplicationMethod replicationMethod = cfg.replicationConfig().method();
switch (replicationMethod) {
case ZOOKEEPER:
executor = newZooKeeperCommandExecutor(pm, repositoryWorker, meterRegistry, sessionManager,
onTakeLeadership, onReleaseLeadership);
break;
case NONE:
logger.info("No replication mechanism specified; entering standalone");
executor = new StandaloneCommandExecutor(pm, repositoryWorker, sessionManager,
cfg.writeQuotaPerRepository(),
onTakeLeadership, onReleaseLeadership);
break;
default:
throw new Error("unknown replication method: " + replicationMethod);
}
try {
final CompletableFuture<Void> startFuture = executor.start();
while (!startFuture.isDone()) {
if (numPendingStopRequests.get() > 0) {
// Stop request has been issued.
executor.stop().get();
break;
}
try {
startFuture.get(100, TimeUnit.MILLISECONDS);
} catch (TimeoutException unused) {
// Taking long time ..
}
}
// Trigger the exception if any.
startFuture.get();
} catch (Exception e) {
logger.warn("Failed to start the command executor. Entering read-only.", e);
}
return executor;
}
@Nullable
private SessionManager initializeSessionManager() throws Exception {
final AuthConfig authCfg = cfg.authConfig();
if (authCfg == null) {
return null;
}
boolean success = false;
SessionManager manager = null;
try {
manager = new FileBasedSessionManager(new File(cfg.dataDir(), "_sessions").toPath(),
authCfg.sessionValidationSchedule());
manager = new CachedSessionManager(manager, Caffeine.from(authCfg.sessionCacheSpec()).build());
manager = new ExpiredSessionDeletingSessionManager(manager);
success = true;
return manager;
} finally {
if (!success && manager != null) {
try {
// It will eventually close FileBasedSessionManager because the other managers just forward
// the close method call to their delegate.
manager.close();
} catch (Exception e) {
logger.warn("Failed to close a session manager.", e);
}
}
}
}
private Server startServer(ProjectManager pm, CommandExecutor executor,
PrometheusMeterRegistry meterRegistry, @Nullable SessionManager sessionManager) {
final ServerBuilder sb = Server.builder();
sb.verboseResponses(true);
cfg.ports().forEach(sb::port);
if (cfg.ports().stream().anyMatch(ServerPort::hasTls)) {
try {
final TlsConfig tlsConfig = cfg.tls();
if (tlsConfig != null) {
sb.tls(tlsConfig.keyCertChainFile(), tlsConfig.keyFile(), tlsConfig.keyPassword());
} else {
logger.warn(
"Missing TLS configuration. Generating a self-signed certificate for TLS support.");
sb.tlsSelfSigned();
}
} catch (Exception e) {
Exceptions.throwUnsafely(e);
}
}
sb.clientAddressSources(cfg.clientAddressSourceList());
sb.clientAddressTrustedProxyFilter(cfg.trustedProxyAddressPredicate());
cfg.numWorkers().ifPresent(
numWorkers -> sb.workerGroup(EventLoopGroups.newEventLoopGroup(numWorkers), true));
cfg.maxNumConnections().ifPresent(sb::maxNumConnections);
cfg.idleTimeoutMillis().ifPresent(sb::idleTimeoutMillis);
cfg.requestTimeoutMillis().ifPresent(sb::requestTimeoutMillis);
cfg.maxFrameLength().ifPresent(sb::maxRequestLength);
cfg.gracefulShutdownTimeout().ifPresent(
t -> sb.gracefulShutdownTimeoutMillis(t.quietPeriodMillis(), t.timeoutMillis()));
final MetadataService mds = new MetadataService(pm, executor);
final WatchService watchService = new WatchService(meterRegistry);
final AuthProvider authProvider = createAuthProvider(executor, sessionManager, mds);
configureThriftService(sb, pm, executor, watchService, mds);
sb.service("/title", webAppTitleFile(cfg.webAppTitle(), SystemInfo.hostname()).asService());
sb.service(HEALTH_CHECK_PATH, HealthCheckService.of());
sb.serviceUnder("/docs/",
DocService.builder()
.exampleHeaders(CentralDogmaService.class,
HttpHeaders.of(HttpHeaderNames.AUTHORIZATION,
"Bearer " + CsrfToken.ANONYMOUS))
.build());
configureHttpApi(sb, pm, executor, watchService, mds, authProvider, sessionManager);
configureMetrics(sb, meterRegistry);
// Configure access log format.
final String accessLogFormat = cfg.accessLogFormat();
if (isNullOrEmpty(accessLogFormat)) {
sb.accessLogWriter(AccessLogWriter.disabled(), true);
} else if ("common".equals(accessLogFormat)) {
sb.accessLogWriter(AccessLogWriter.common(), true);
} else if ("combined".equals(accessLogFormat)) {
sb.accessLogWriter(AccessLogWriter.combined(), true);
} else {
sb.accessLogFormat(accessLogFormat);
}
final Server s = sb.build();
s.start().join();
return s;
}
static HttpFile webAppTitleFile(@Nullable String webAppTitle, String hostname) {
requireNonNull(hostname, "hostname");
final Map<String, String> titleAndHostname = ImmutableMap.of(
"title", firstNonNull(webAppTitle, "Central Dogma at {{hostname}}"),
"hostname", hostname);
try {
final HttpData data = HttpData.ofUtf8(Jackson.writeValueAsString(titleAndHostname));
return HttpFile.builder(data)
.contentType(MediaType.JSON_UTF_8)
.cacheControl(ServerCacheControl.REVALIDATED)
.build();
} catch (JsonProcessingException e) {
throw new Error("Failed to encode the title and hostname:", e);
}
}
@Nullable
private AuthProvider createAuthProvider(
CommandExecutor commandExecutor, @Nullable SessionManager sessionManager, MetadataService mds) {
final AuthConfig authCfg = cfg.authConfig();
if (authCfg == null) {
return null;
}
checkState(sessionManager != null, "SessionManager is null");
final AuthProviderParameters parameters = new AuthProviderParameters(
// Find application first, then find the session token.
new ApplicationTokenAuthorizer(mds::findTokenBySecret).orElse(
new SessionTokenAuthorizer(sessionManager, authCfg.administrators())),
cfg,
sessionManager::generateSessionId,
// Propagate login and logout events to the other replicas.
session -> commandExecutor.execute(Command.createSession(session)),
sessionId -> commandExecutor.execute(Command.removeSession(sessionId)));
return authCfg.factory().create(parameters);
}
private CommandExecutor newZooKeeperCommandExecutor(
ProjectManager pm, Executor repositoryWorker, MeterRegistry meterRegistry,
@Nullable SessionManager sessionManager,
@Nullable Consumer<CommandExecutor> onTakeLeadership,
@Nullable Consumer<CommandExecutor> onReleaseLeadership) {
final ZooKeeperReplicationConfig zkCfg = (ZooKeeperReplicationConfig) cfg.replicationConfig();
// Delete the old UUID replica ID which is not used anymore.
new File(cfg.dataDir(), "replica_id").delete();
// TODO(trustin): Provide a way to restart/reload the replicator
// so that we can recover from ZooKeeper maintenance automatically.
return new ZooKeeperCommandExecutor(
zkCfg, cfg.dataDir(),
new StandaloneCommandExecutor(pm, repositoryWorker, sessionManager,
/* onTakeLeadership */ null, /* onReleaseLeadership */ null),
meterRegistry, pm, config().writeQuotaPerRepository(), onTakeLeadership, onReleaseLeadership);
}
private void configureThriftService(ServerBuilder sb, ProjectManager pm, CommandExecutor executor,
WatchService watchService, MetadataService mds) {
final CentralDogmaServiceImpl service =
new CentralDogmaServiceImpl(pm, executor, watchService, mds);
HttpService thriftService =
ThriftCallService.of(service)
.decorate(CentralDogmaTimeoutScheduler::new)
.decorate(CentralDogmaExceptionTranslator::new)
.decorate(THttpService.newDecorator());
if (cfg.isCsrfTokenRequiredForThrift()) {
thriftService = thriftService.decorate(AuthService.newDecorator(new CsrfTokenAuthorizer()));
} else {
thriftService = thriftService.decorate(TokenlessClientLogger::new);
}
// Enable content compression for API responses.
thriftService = thriftService.decorate(contentEncodingDecorator());
sb.service("/cd/thrift/v1", thriftService);
}
private void configureHttpApi(ServerBuilder sb,
ProjectManager pm, CommandExecutor executor,
WatchService watchService, MetadataService mds,
@Nullable AuthProvider authProvider,
@Nullable SessionManager sessionManager) {
Function<? super HttpService, ? extends HttpService> decorator;
if (authProvider != null) {
sb.service("/security_enabled", new AbstractHttpService() {
@Override
protected HttpResponse doGet(ServiceRequestContext ctx, HttpRequest req) {
return HttpResponse.of(HttpStatus.OK);
}
});
final AuthConfig authCfg = cfg.authConfig();
assert authCfg != null : "authCfg";
assert sessionManager != null : "sessionManager";
final Authorizer<HttpRequest> tokenAuthorizer =
new ApplicationTokenAuthorizer(mds::findTokenBySecret)
.orElse(new SessionTokenAuthorizer(sessionManager,
authCfg.administrators()));
decorator = MetadataServiceInjector
.newDecorator(mds)
.andThen(AuthService.builder()
.add(tokenAuthorizer)
.onFailure(new CentralDogmaAuthFailureHandler())
.newDecorator());
} else {
decorator = MetadataServiceInjector
.newDecorator(mds)
.andThen(AuthService.newDecorator(new CsrfTokenAuthorizer()));
}
final SafeProjectManager safePm = new SafeProjectManager(pm);
final HttpApiRequestConverter v1RequestConverter = new HttpApiRequestConverter(safePm);
final HttpApiResponseConverter v1ResponseConverter = new HttpApiResponseConverter();
// Enable content compression for API responses.
decorator = decorator.andThen(contentEncodingDecorator());
sb.annotatedService(API_V1_PATH_PREFIX,
new AdministrativeService(safePm, executor), decorator,
v1RequestConverter, v1ResponseConverter);
sb.annotatedService(API_V1_PATH_PREFIX,
new ProjectServiceV1(safePm, executor, mds), decorator,
v1RequestConverter, v1ResponseConverter);
sb.annotatedService(API_V1_PATH_PREFIX,
new RepositoryServiceV1(safePm, executor, mds), decorator,
v1RequestConverter, v1ResponseConverter);
sb.annotatedService()
.pathPrefix(API_V1_PATH_PREFIX)
.defaultServiceNaming(new ServiceNaming() {
private final String serviceName = ContentServiceV1.class.getName();
private final String watchServiceName =
serviceName.replace("ContentServiceV1", "WatchContentServiceV1");
@Override
public String serviceName(ServiceRequestContext ctx) {
if (ctx.request().headers().contains(HttpHeaderNames.IF_NONE_MATCH)) {
return watchServiceName;
}
return serviceName;
}
})
.decorator(decorator)
.requestConverters(v1RequestConverter)
.responseConverters(v1ResponseConverter)
.build(new ContentServiceV1(safePm, executor, watchService));
if (authProvider != null) {
final AuthConfig authCfg = cfg.authConfig();
assert authCfg != null : "authCfg";
sb.annotatedService(API_V1_PATH_PREFIX,
new MetadataApiService(mds, authCfg.loginNameNormalizer()),
decorator, v1RequestConverter, v1ResponseConverter);
sb.annotatedService(API_V1_PATH_PREFIX, new TokenService(pm, executor, mds),
decorator, v1RequestConverter, v1ResponseConverter);
// authentication services:
Optional.ofNullable(authProvider.loginApiService())
.ifPresent(login -> LOGIN_API_ROUTES.forEach(mapping -> sb.service(mapping, login)));
// Provide logout API by default.
final HttpService logout =
Optional.ofNullable(authProvider.logoutApiService())
.orElseGet(() -> new DefaultLogoutService(executor));
for (Route route : LOGOUT_API_ROUTES) {
sb.service(route, decorator.apply(logout));
}
authProvider.moreServices().forEach(sb::service);
}
if (cfg.isWebAppEnabled()) {
final RestfulJsonResponseConverter httpApiV0Converter = new RestfulJsonResponseConverter();
// TODO(hyangtack): Simplify this if https://github.com/line/armeria/issues/582 is resolved.
sb.annotatedService(API_V0_PATH_PREFIX, new UserService(safePm, executor),
decorator, httpApiV0Converter)
.annotatedService(API_V0_PATH_PREFIX, new RepositoryService(safePm, executor),
decorator, httpApiV0Converter);
if (authProvider != null) {
// Will redirect to /web/auth/login by default.
sb.service(LOGIN_PATH, authProvider.webLoginService());
// Will redirect to /web/auth/logout by default.
sb.service(LOGOUT_PATH, authProvider.webLogoutService());
sb.serviceUnder(BUILTIN_WEB_BASE_PATH, new OrElseDefaultHttpFileService(
FileService.builder(CentralDogma.class.getClassLoader(), "auth-webapp")
.cacheControl(ServerCacheControl.REVALIDATED)
.build(),
"/index.html"));
}
sb.serviceUnder("/",
FileService.builder(CentralDogma.class.getClassLoader(), "webapp")
.cacheControl(ServerCacheControl.REVALIDATED)
.build());
}
}
private static Function<? super HttpService, EncodingService> contentEncodingDecorator() {
return delegate -> EncodingService
.builder()
.encodableContentTypes(contentType -> {
if ("application".equals(contentType.type())) {
final String subtype = contentType.subtype();
switch (subtype) {
case "json":
case "xml":
case "x-thrift":
return true;
default:
return subtype.endsWith("+json") ||
subtype.endsWith("+xml") ||
subtype.startsWith("vnd.apache.thrift.");
}
}
return false;
})
.build(delegate);
}
private void configureMetrics(ServerBuilder sb, PrometheusMeterRegistry registry) {
sb.meterRegistry(registry);
sb.service(METRICS_PATH, new PrometheusExpositionService(registry.getPrometheusRegistry()));
sb.decorator(MetricCollectingService.newDecorator(MeterIdPrefixFunction.ofDefault("api")));
// Bind system metrics.
new FileDescriptorMetrics().bindTo(registry);
new ProcessorMetrics().bindTo(registry);
new ClassLoaderMetrics().bindTo(registry);
new UptimeMetrics().bindTo(registry);
new DiskSpaceMetrics(cfg.dataDir()).bindTo(registry);
new JvmGcMetrics().bindTo(registry);
new JvmMemoryMetrics().bindTo(registry);
new JvmThreadMetrics().bindTo(registry);
// Bind global thread pool metrics.
ExecutorServiceMetrics.monitor(registry, ForkJoinPool.commonPool(), "commonPool");
}
private void doStop() {
if (server == null) {
return;
}
final Server server = this.server;
final CommandExecutor executor = this.executor;
final ProjectManager pm = this.pm;
final ExecutorService repositoryWorker = this.repositoryWorker;
final ExecutorService purgeWorker = this.purgeWorker;
final SessionManager sessionManager = this.sessionManager;
this.server = null;
this.executor = null;
this.pm = null;
this.repositoryWorker = null;
this.sessionManager = null;
if (meterRegistry != null) {
meterRegistry.close();
meterRegistry = null;
}
logger.info("Stopping the Central Dogma ..");
if (!doStop(server, executor, pm, repositoryWorker, purgeWorker, sessionManager)) {
logger.warn("Stopped the Central Dogma with failure.");
} else {
logger.info("Stopped the Central Dogma successfully.");
}
}
private static boolean doStop(
@Nullable Server server, @Nullable CommandExecutor executor,
@Nullable ProjectManager pm,
@Nullable ExecutorService repositoryWorker, @Nullable ExecutorService purgeWorker,
@Nullable SessionManager sessionManager) {
boolean success = true;
try {
if (sessionManager != null) {
logger.info("Stopping the session manager ..");
sessionManager.close();
logger.info("Stopped the session manager.");
}
} catch (Throwable t) {
success = false;
logger.warn("Failed to stop the session manager:", t);
}
try {
if (pm != null) {
logger.info("Stopping the project manager ..");
pm.close(ShuttingDownException::new);
logger.info("Stopped the project manager.");
}
} catch (Throwable t) {
success = false;
logger.warn("Failed to stop the project manager:", t);
}
try {
if (executor != null) {
logger.info("Stopping the command executor ..");
executor.stop();
logger.info("Stopped the command executor.");
}
} catch (Throwable t) {
success = false;
logger.warn("Failed to stop the command executor:", t);
}
final BiFunction<ExecutorService, String, Boolean> stopWorker = (worker, name) -> {
try {
if (worker != null && !worker.isTerminated()) {
logger.info("Stopping the {} worker ..", name);
boolean interruptLater = false;
while (!worker.isTerminated()) {
worker.shutdownNow();
try {
worker.awaitTermination(1, TimeUnit.SECONDS);
} catch (InterruptedException e) {
// Interrupt later.
interruptLater = true;
}
}
logger.info("Stopped the {} worker.", name);
if (interruptLater) {
Thread.currentThread().interrupt();
}
}
return true;
} catch (Throwable t) {
logger.warn("Failed to stop the " + name + " worker:", t);
return false;
}
};
if (!stopWorker.apply(repositoryWorker, "repository")) {
success = false;
}
if (!stopWorker.apply(purgeWorker, "purge")) {
success = false;
}
try {
if (server != null) {
logger.info("Stopping the RPC server ..");
server.stop().join();
logger.info("Stopped the RPC server.");
}
} catch (Throwable t) {
success = false;
logger.warn("Failed to stop the RPC server:", t);
}
return success;
}
private final class CentralDogmaStartStop extends StartStopSupport<Void, Void, Void, Void> {
@Nullable
private final PluginGroup pluginsForAllReplicas;
CentralDogmaStartStop(@Nullable PluginGroup pluginsForAllReplicas) {
super(GlobalEventExecutor.INSTANCE);
this.pluginsForAllReplicas = pluginsForAllReplicas;
}
@Override
protected CompletionStage<Void> doStart(@Nullable Void unused) throws Exception {
return execute("startup", () -> {
try {
CentralDogma.this.doStart();
if (pluginsForAllReplicas != null) {
final ProjectManager pm = CentralDogma.this.pm;
final CommandExecutor executor = CentralDogma.this.executor;
final MeterRegistry meterRegistry = CentralDogma.this.meterRegistry;
if (pm != null && executor != null && meterRegistry != null) {
pluginsForAllReplicas.start(cfg, pm, executor, meterRegistry, purgeWorker).join();
}
}
} catch (Exception e) {
Exceptions.throwUnsafely(e);
}
});
}
@Override
protected CompletionStage<Void> doStop(@Nullable Void unused) throws Exception {
return execute("shutdown", () -> {
if (pluginsForAllReplicas != null) {
final ProjectManager pm = CentralDogma.this.pm;
final CommandExecutor executor = CentralDogma.this.executor;
final MeterRegistry meterRegistry = CentralDogma.this.meterRegistry;
if (pm != null && executor != null && meterRegistry != null) {
pluginsForAllReplicas.stop(cfg, pm, executor, meterRegistry, purgeWorker).join();
}
}
CentralDogma.this.doStop();
});
}
private CompletionStage<Void> execute(String mode, Runnable task) {
final CompletableFuture<Void> future = new CompletableFuture<>();
final Thread thread = new Thread(() -> {
try {
task.run();
future.complete(null);
} catch (Throwable cause) {
future.completeExceptionally(cause);
}
}, "dogma-" + mode + "-0x" + Long.toHexString(CentralDogma.this.hashCode() & 0xFFFFFFFFL));
thread.start();
return future;
}
}
}
| |
// This file was generated by Mendix Business Modeler.
//
// WARNING: Code you write here will be lost the next time you deploy the project.
package amazons3connector.proxies;
import com.mendix.core.Core;
import com.mendix.core.CoreException;
import com.mendix.systemwideinterfaces.core.IContext;
import com.mendix.systemwideinterfaces.core.IMendixIdentifier;
import com.mendix.systemwideinterfaces.core.IMendixObject;
/**
* Represents the file metadata as stored in S3
*/
public class S3SummaryObject
{
private final IMendixObject s3SummaryObjectMendixObject;
private final IContext context;
/**
* Internal name of this entity
*/
public static final String entityName = "AmazonS3Connector.S3SummaryObject";
/**
* Enum describing members of this entity
*/
public enum MemberNames
{
Key("Key"),
FileName("FileName"),
FolderPath("FolderPath"),
S3Object_S3Bucket("AmazonS3Connector.S3Object_S3Bucket");
private String metaName;
MemberNames(String s)
{
metaName = s;
}
@Override
public String toString()
{
return metaName;
}
}
public S3SummaryObject(IContext context)
{
this(context, Core.instantiate(context, "AmazonS3Connector.S3SummaryObject"));
}
protected S3SummaryObject(IContext context, IMendixObject s3SummaryObjectMendixObject)
{
if (s3SummaryObjectMendixObject == null)
throw new IllegalArgumentException("The given object cannot be null.");
if (!Core.isSubClassOf("AmazonS3Connector.S3SummaryObject", s3SummaryObjectMendixObject.getType()))
throw new IllegalArgumentException("The given object is not a AmazonS3Connector.S3SummaryObject");
this.s3SummaryObjectMendixObject = s3SummaryObjectMendixObject;
this.context = context;
}
/**
* @deprecated Use 'S3SummaryObject.load(IContext, IMendixIdentifier)' instead.
*/
@Deprecated
public static amazons3connector.proxies.S3SummaryObject initialize(IContext context, IMendixIdentifier mendixIdentifier) throws CoreException
{
return amazons3connector.proxies.S3SummaryObject.load(context, mendixIdentifier);
}
/**
* Initialize a proxy using context (recommended). This context will be used for security checking when the get- and set-methods without context parameters are called.
* The get- and set-methods with context parameter should be used when for instance sudo access is necessary (IContext.getSudoContext() can be used to obtain sudo access).
*/
public static amazons3connector.proxies.S3SummaryObject initialize(IContext context, IMendixObject mendixObject)
{
return new amazons3connector.proxies.S3SummaryObject(context, mendixObject);
}
public static amazons3connector.proxies.S3SummaryObject load(IContext context, IMendixIdentifier mendixIdentifier) throws CoreException
{
IMendixObject mendixObject = Core.retrieveId(context, mendixIdentifier);
return amazons3connector.proxies.S3SummaryObject.initialize(context, mendixObject);
}
/**
* Commit the changes made on this proxy object.
*/
public final void commit() throws CoreException
{
Core.commit(context, getMendixObject());
}
/**
* Commit the changes made on this proxy object using the specified context.
*/
public final void commit(IContext context) throws CoreException
{
Core.commit(context, getMendixObject());
}
/**
* Delete the object.
*/
public final void delete()
{
Core.delete(context, getMendixObject());
}
/**
* Delete the object using the specified context.
*/
public final void delete(IContext context)
{
Core.delete(context, getMendixObject());
}
/**
* @return value of Key
*/
public final String getKey()
{
return getKey(getContext());
}
/**
* @param context
* @return value of Key
*/
public final String getKey(IContext context)
{
return (String) getMendixObject().getValue(context, MemberNames.Key.toString());
}
/**
* Set value of Key
* @param key
*/
public final void setKey(String key)
{
setKey(getContext(), key);
}
/**
* Set value of Key
* @param context
* @param key
*/
public final void setKey(IContext context, String key)
{
getMendixObject().setValue(context, MemberNames.Key.toString(), key);
}
/**
* @return value of FileName
*/
public final String getFileName()
{
return getFileName(getContext());
}
/**
* @param context
* @return value of FileName
*/
public final String getFileName(IContext context)
{
return (String) getMendixObject().getValue(context, MemberNames.FileName.toString());
}
/**
* Set value of FileName
* @param filename
*/
public final void setFileName(String filename)
{
setFileName(getContext(), filename);
}
/**
* Set value of FileName
* @param context
* @param filename
*/
public final void setFileName(IContext context, String filename)
{
getMendixObject().setValue(context, MemberNames.FileName.toString(), filename);
}
/**
* @return value of FolderPath
*/
public final String getFolderPath()
{
return getFolderPath(getContext());
}
/**
* @param context
* @return value of FolderPath
*/
public final String getFolderPath(IContext context)
{
return (String) getMendixObject().getValue(context, MemberNames.FolderPath.toString());
}
/**
* Set value of FolderPath
* @param folderpath
*/
public final void setFolderPath(String folderpath)
{
setFolderPath(getContext(), folderpath);
}
/**
* Set value of FolderPath
* @param context
* @param folderpath
*/
public final void setFolderPath(IContext context, String folderpath)
{
getMendixObject().setValue(context, MemberNames.FolderPath.toString(), folderpath);
}
/**
* @return value of S3Object_S3Bucket
*/
public final amazons3connector.proxies.S3Bucket getS3Object_S3Bucket() throws CoreException
{
return getS3Object_S3Bucket(getContext());
}
/**
* @param context
* @return value of S3Object_S3Bucket
*/
public final amazons3connector.proxies.S3Bucket getS3Object_S3Bucket(IContext context) throws CoreException
{
amazons3connector.proxies.S3Bucket result = null;
IMendixIdentifier identifier = getMendixObject().getValue(context, MemberNames.S3Object_S3Bucket.toString());
if (identifier != null)
result = amazons3connector.proxies.S3Bucket.load(context, identifier);
return result;
}
/**
* Set value of S3Object_S3Bucket
* @param s3object_s3bucket
*/
public final void setS3Object_S3Bucket(amazons3connector.proxies.S3Bucket s3object_s3bucket)
{
setS3Object_S3Bucket(getContext(), s3object_s3bucket);
}
/**
* Set value of S3Object_S3Bucket
* @param context
* @param s3object_s3bucket
*/
public final void setS3Object_S3Bucket(IContext context, amazons3connector.proxies.S3Bucket s3object_s3bucket)
{
if (s3object_s3bucket == null)
getMendixObject().setValue(context, MemberNames.S3Object_S3Bucket.toString(), null);
else
getMendixObject().setValue(context, MemberNames.S3Object_S3Bucket.toString(), s3object_s3bucket.getMendixObject().getId());
}
/**
* @return the IMendixObject instance of this proxy for use in the Core interface.
*/
public final IMendixObject getMendixObject()
{
return s3SummaryObjectMendixObject;
}
/**
* @return the IContext instance of this proxy, or null if no IContext instance was specified at initialization.
*/
public final IContext getContext()
{
return context;
}
@Override
public boolean equals(Object obj)
{
if (obj == this)
return true;
if (obj != null && getClass().equals(obj.getClass()))
{
final amazons3connector.proxies.S3SummaryObject that = (amazons3connector.proxies.S3SummaryObject) obj;
return getMendixObject().equals(that.getMendixObject());
}
return false;
}
@Override
public int hashCode()
{
return getMendixObject().hashCode();
}
/**
* @return String name of this class
*/
public static String getType()
{
return "AmazonS3Connector.S3SummaryObject";
}
/**
* @return String GUID from this object, format: ID_0000000000
* @deprecated Use getMendixObject().getId().toLong() to get a unique identifier for this object.
*/
@Deprecated
public String getGUID()
{
return "ID_" + getMendixObject().getId().toLong();
}
}
| |
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.shell;
import android.annotation.SuppressLint;
import android.content.Context;
import android.content.res.Configuration;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputMethodManager;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.TextView;
import android.widget.TextView.OnEditorActionListener;
import org.chromium.base.ApiCompatibilityUtils;
import org.chromium.base.CommandLine;
import org.chromium.chrome.browser.EmptyTabObserver;
import org.chromium.chrome.browser.Tab;
import org.chromium.chrome.browser.TabObserver;
import org.chromium.chrome.browser.UrlUtilities;
import org.chromium.chrome.browser.appmenu.AppMenuButtonHelper;
import org.chromium.chrome.browser.appmenu.AppMenuHandler;
import org.chromium.chrome.browser.widget.SmoothProgressBar;
import org.chromium.chrome.shell.omnibox.SuggestionPopup;
import org.chromium.content.common.ContentSwitches;
/**
* A Toolbar {@link View} that shows the URL and navigation buttons.
*/
public class ChromeShellToolbar extends LinearLayout {
private static final long COMPLETED_PROGRESS_TIMEOUT_MS = 200;
private final Runnable mClearProgressRunnable = new Runnable() {
@Override
public void run() {
mProgressBar.setProgress(0);
}
};
private final Runnable mUpdateProgressRunnable = new Runnable() {
@Override
public void run() {
mProgressBar.setProgress(mProgress);
if (mLoading) {
mStopReloadButton.setImageResource(
R.drawable.btn_close);
} else {
mStopReloadButton.setImageResource(R.drawable.btn_toolbar_reload);
ApiCompatibilityUtils.postOnAnimationDelayed(ChromeShellToolbar.this,
mClearProgressRunnable, COMPLETED_PROGRESS_TIMEOUT_MS);
}
}
};
private EditText mUrlTextView;
private SmoothProgressBar mProgressBar;
private ChromeShellTab mTab;
private final TabObserver mTabObserver;
private AppMenuHandler mMenuHandler;
private AppMenuButtonHelper mAppMenuButtonHelper;
private TabManager mTabManager;
private SuggestionPopup mSuggestionPopup;
private ImageButton mStopReloadButton;
private ImageButton mAddButton;
private int mProgress = 0;
private boolean mLoading = true;
private boolean mFocus = false;
/**
* @param context The Context the view is running in.
* @param attrs The attributes of the XML tag that is inflating the view.
*/
public ChromeShellToolbar(Context context, AttributeSet attrs) {
super(context, attrs);
// When running performance benchmark, we don't want to observe the tab
// invalidation which would interfere with browser's processing content
// frame. See crbug.com/394976.
if (CommandLine.getInstance().hasSwitch(
ContentSwitches.RUNNING_PERFORMANCE_BENCHMARK)) {
mTabObserver = new EmptyTabObserver();
} else {
mTabObserver = new TabObserverImpl();
}
}
/**
* The toolbar will visually represent the state of {@code tab}.
* @param tab The Tab that should be represented.
*/
public void showTab(ChromeShellTab tab) {
if (mTab != null) mTab.removeObserver(mTabObserver);
mTab = tab;
if (mTab != null) {
mTab.addObserver(mTabObserver);
mUrlTextView.setText(mTab.getWebContents().getUrl());
}
}
/**
* Set the TabManager responsible for activating the tab switcher.
* @param tabManager The active TabManager.
*/
public void setTabManager(TabManager tabManager) {
mTabManager = tabManager;
}
private void onUpdateUrl(String url) {
mUrlTextView.setText(url);
}
private void onLoadProgressChanged(int progress) {
removeCallbacks(mClearProgressRunnable);
removeCallbacks(mUpdateProgressRunnable);
mProgress = progress;
mLoading = progress != 100;
ApiCompatibilityUtils.postOnAnimation(this, mUpdateProgressRunnable);
}
/**
* Closes the suggestion popup.
*/
public void hideSuggestions() {
if (mSuggestionPopup != null) mSuggestionPopup.hideSuggestions();
}
@Override
protected void onFinishInflate() {
super.onFinishInflate();
mProgressBar = (SmoothProgressBar) findViewById(R.id.progress);
initializeUrlField();
initializeTabSwitcherButton();
initializeMenuButton();
initializeStopReloadButton();
initializeAddButton();
}
void setMenuHandler(AppMenuHandler menuHandler) {
mMenuHandler = menuHandler;
mAppMenuButtonHelper = new AppMenuButtonHelper(mMenuHandler);
}
private void initializeUrlField() {
mUrlTextView = (EditText) findViewById(R.id.url);
mUrlTextView.setOnEditorActionListener(new OnEditorActionListener() {
@Override
public boolean onEditorAction(TextView v, int actionId, KeyEvent event) {
if ((actionId != EditorInfo.IME_ACTION_GO) && (event == null
|| event.getKeyCode() != KeyEvent.KEYCODE_ENTER
|| event.getAction() != KeyEvent.ACTION_DOWN)) {
return false;
}
// This will set |mTab| by calling showTab().
// TODO(aurimas): Factor out initial tab creation to the activity level.
Tab tab = mTabManager.openUrl(
UrlUtilities.fixupUrl(mUrlTextView.getText().toString()));
mUrlTextView.clearFocus();
setKeyboardVisibilityForUrl(false);
tab.getView().requestFocus();
return true;
}
});
mUrlTextView.setOnFocusChangeListener(new OnFocusChangeListener() {
@Override
public void onFocusChange(View v, boolean hasFocus) {
setKeyboardVisibilityForUrl(hasFocus);
mFocus = hasFocus;
updateToolbarState();
if (!hasFocus && mTab != null) {
mUrlTextView.setText(mTab.getWebContents().getUrl());
mSuggestionPopup.dismissPopup();
}
}
});
mUrlTextView.setOnKeyListener(new OnKeyListener() {
@Override
public boolean onKey(View v, int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_BACK) {
mUrlTextView.clearFocus();
if (mTab != null) {
mTab.getView().requestFocus();
}
return true;
}
return false;
}
});
mSuggestionPopup = new SuggestionPopup(getContext(), mUrlTextView, this);
mUrlTextView.addTextChangedListener(mSuggestionPopup);
}
private void initializeTabSwitcherButton() {
ImageButton tabSwitcherButton = (ImageButton) findViewById(R.id.tab_switcher);
tabSwitcherButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mTabManager != null) mTabManager.toggleTabSwitcher();
}
});
}
private void initializeMenuButton() {
ImageButton menuButton = (ImageButton) findViewById(R.id.menu_button);
menuButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View view) {
if (mMenuHandler != null) mMenuHandler.showAppMenu(view, false, false);
}
});
menuButton.setOnTouchListener(new OnTouchListener() {
@SuppressLint("ClickableViewAccessibility")
@Override
public boolean onTouch(View view, MotionEvent event) {
return mAppMenuButtonHelper != null && mAppMenuButtonHelper.onTouch(view, event);
}
});
}
private void initializeStopReloadButton() {
mStopReloadButton = (ImageButton) findViewById(R.id.stop_reload_button);
mStopReloadButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
if (mTab == null) return;
if (mLoading) {
mTab.getWebContents().stop();
} else {
mTab.getWebContents().getNavigationController().reload(true);
}
}
});
}
private void initializeAddButton() {
mAddButton = (ImageButton) findViewById(R.id.add_button);
mAddButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mTabManager.createNewTab();
}
});
}
/**
* Shows or hides the add button, the stop/reload button and the URL bar.
*/
public void updateToolbarState() {
boolean tabSwitcherState = mTabManager.isTabSwitcherVisible();
mAddButton.setVisibility(tabSwitcherState ? VISIBLE : GONE);
mStopReloadButton.setVisibility(tabSwitcherState || mFocus ? GONE : VISIBLE);
mUrlTextView.setVisibility(tabSwitcherState ? INVISIBLE : VISIBLE);
}
/**
* @return Current tab that is shown by ChromeShell.
*/
public ChromeShellTab getCurrentTab() {
return mTab;
}
/**
* Change the visibility of the software keyboard.
* @param visible Whether the keyboard should be shown or hidden.
*/
public void setKeyboardVisibilityForUrl(boolean visible) {
InputMethodManager imm = (InputMethodManager) getContext().getSystemService(
Context.INPUT_METHOD_SERVICE);
if (visible) {
imm.showSoftInput(mUrlTextView, InputMethodManager.SHOW_IMPLICIT);
} else {
imm.hideSoftInputFromWindow(mUrlTextView.getWindowToken(), 0);
}
}
@Override
protected void onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
if (mMenuHandler != null) mMenuHandler.hideAppMenu();
}
private class TabObserverImpl extends EmptyTabObserver {
@Override
public void onLoadProgressChanged(Tab tab, int progress) {
if (tab == mTab) ChromeShellToolbar.this.onLoadProgressChanged(progress);
}
@Override
public void onUpdateUrl(Tab tab, String url) {
if (tab == mTab) ChromeShellToolbar.this.onUpdateUrl(url);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.timelineservice.reader;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.HttpURLConnection;
import java.net.URI;
import java.net.URL;
import java.util.Set;
import javax.ws.rs.core.MediaType;
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.JettyUtils;
import org.apache.hadoop.yarn.api.records.timeline.TimelineAbout;
import org.apache.hadoop.yarn.api.records.timeline.TimelineHealth;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntity;
import org.apache.hadoop.yarn.api.records.timelineservice.TimelineEntityType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.timelineservice.storage.FileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TestFileSystemTimelineReaderImpl;
import org.apache.hadoop.yarn.server.timelineservice.storage.TimelineReader;
import org.apache.hadoop.yarn.webapp.YarnJacksonJaxbJsonProvider;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import com.sun.jersey.api.client.Client;
import com.sun.jersey.api.client.ClientResponse;
import com.sun.jersey.api.client.ClientResponse.Status;
import com.sun.jersey.api.client.GenericType;
import com.sun.jersey.api.client.config.ClientConfig;
import com.sun.jersey.api.client.config.DefaultClientConfig;
import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
public class TestTimelineReaderWebServices {
private static final String ROOT_DIR = new File("target",
TestTimelineReaderWebServices.class.getSimpleName()).getAbsolutePath();
private int serverPort;
private TimelineReaderServer server;
@BeforeClass
public static void setup() throws Exception {
TestFileSystemTimelineReaderImpl.initializeDataDirectory(ROOT_DIR);
}
@AfterClass
public static void tearDown() throws Exception {
FileUtils.deleteDirectory(new File(ROOT_DIR));
}
@Before
public void init() throws Exception {
try {
Configuration config = new YarnConfiguration();
config.setBoolean(YarnConfiguration.TIMELINE_SERVICE_ENABLED, true);
config.setFloat(YarnConfiguration.TIMELINE_SERVICE_VERSION, 2.0f);
config.set(YarnConfiguration.TIMELINE_SERVICE_READER_WEBAPP_ADDRESS,
"localhost:0");
config.set(YarnConfiguration.RM_CLUSTER_ID, "cluster1");
config.setClass(YarnConfiguration.TIMELINE_SERVICE_READER_CLASS,
FileSystemTimelineReaderImpl.class, TimelineReader.class);
config.set(FileSystemTimelineReaderImpl.TIMELINE_SERVICE_STORAGE_DIR_ROOT,
ROOT_DIR);
server = new TimelineReaderServer();
server.init(config);
server.start();
serverPort = server.getWebServerPort();
} catch (Exception e) {
Assert.fail("Web server failed to start");
}
}
@After
public void stop() throws Exception {
if (server != null) {
server.stop();
server = null;
}
}
private static TimelineEntity newEntity(String type, String id) {
TimelineEntity entity = new TimelineEntity();
entity.setIdentifier(new TimelineEntity.Identifier(type, id));
return entity;
}
private static void verifyHttpResponse(Client client, URI uri,
Status expectedStatus) {
ClientResponse resp =
client.resource(uri).accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON).get(ClientResponse.class);
assertNotNull(resp);
assertEquals(resp.getStatusInfo().getStatusCode(),
expectedStatus.getStatusCode());
}
private static Client createClient() {
ClientConfig cfg = new DefaultClientConfig();
cfg.getClasses().add(YarnJacksonJaxbJsonProvider.class);
return new Client(new URLConnectionClientHandler(
new DummyURLConnectionFactory()), cfg);
}
private static ClientResponse getResponse(Client client, URI uri)
throws Exception {
ClientResponse resp =
client.resource(uri).accept(MediaType.APPLICATION_JSON)
.type(MediaType.APPLICATION_JSON).get(ClientResponse.class);
if (resp == null ||
resp.getStatusInfo().getStatusCode() !=
ClientResponse.Status.OK.getStatusCode()) {
String msg = new String();
if (resp != null) {
msg = String.valueOf(resp.getStatusInfo().getStatusCode());
}
throw new IOException("Incorrect response from timeline reader. " +
"Status=" + msg);
}
return resp;
}
private static class DummyURLConnectionFactory
implements HttpURLConnectionFactory {
@Override
public HttpURLConnection getHttpURLConnection(final URL url)
throws IOException {
try {
return (HttpURLConnection)url.openConnection();
} catch (UndeclaredThrowableException e) {
throw new IOException(e.getCause());
}
}
}
@Test
public void testAbout() throws Exception {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/");
Client client = createClient();
try {
ClientResponse resp = getResponse(client, uri);
TimelineAbout about = resp.getEntity(TimelineAbout.class);
Assert.assertNotNull(about);
Assert.assertEquals("Timeline Reader API", about.getAbout());
} finally {
client.destroy();
}
}
@Test
public void testGetEntityDefaultView() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals((Long)1425016502000L, entity.getCreatedTime());
// Default view i.e. when no fields are specified, entity contains only
// entity id, entity type and created time.
assertEquals(0, entity.getConfigs().size());
assertEquals(0, entity.getMetrics().size());
} finally {
client.destroy();
}
}
@Test
public void testGetEntityWithUserAndFlowInfo() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
"userid=user1&flowname=flow1&flowrunid=1");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals((Long)1425016502000L, entity.getCreatedTime());
} finally {
client.destroy();
}
}
@Test
public void testGetEntityCustomFields() throws Exception {
Client client = createClient();
try {
// Fields are case insensitive.
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
"fields=CONFIGS,Metrics,info");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals(3, entity.getConfigs().size());
assertEquals(3, entity.getMetrics().size());
assertTrue("UID should be present",
entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY));
// Includes UID.
assertEquals(3, entity.getInfo().size());
// No events will be returned as events are not part of fields.
assertEquals(0, entity.getEvents().size());
} finally {
client.destroy();
}
}
@Test
public void testGetEntityAllFields() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_1?" +
"fields=ALL");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
assertEquals(3, entity.getConfigs().size());
assertEquals(3, entity.getMetrics().size());
assertTrue("UID should be present",
entity.getInfo().containsKey(TimelineReaderUtils.UID_KEY));
// Includes UID.
assertEquals(3, entity.getInfo().size());
assertEquals(2, entity.getEvents().size());
} finally {
client.destroy();
}
}
@Test
public void testGetEntityNotPresent() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app/id_10");
verifyHttpResponse(client, uri, Status.NOT_FOUND);
} finally {
client.destroy();
}
}
@Test
public void testQueryWithoutCluster() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entities/app/id_1");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entity = resp.getEntity(TimelineEntity.class);
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entity);
assertEquals("id_1", entity.getId());
assertEquals("app", entity.getType());
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/apps/app1/entities/app");
resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(4, entities.size());
} finally {
client.destroy();
}
}
@Test
public void testGetEntities() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(4, entities.size());
assertTrue("Entities id_1, id_2, id_3 and id_4 should have been" +
" present in response",
entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_2")) &&
entities.contains(newEntity("app", "id_3")) &&
entities.contains(newEntity("app", "id_4")));
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesWithLimit() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?limit=2");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
// Entities returned are based on most recent created time.
assertTrue("Entities with id_1 and id_4 should have been present " +
"in response based on entity created time.",
entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_4")));
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?limit=3");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
// Even though 2 entities out of 4 have same created time, one entity
// is left out due to limit
assertEquals(3, entities.size());
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesBasedOnCreatedTime() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"createdtimestart=1425016502030&createdtimeend=1425016502060");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_4 should have been present in response.",
entities.contains(newEntity("app", "id_4")));
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?createdtimeend" +
"=1425016502010");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(3, entities.size());
assertFalse("Entity with id_4 should not have been present in response.",
entities.contains(newEntity("app", "id_4")));
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?createdtimestart=" +
"1425016502010");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_4 should have been present in response.",
entities.contains(newEntity("app", "id_4")));
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesByRelations() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?relatesto=" +
"flow:flow1");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_1 should have been present in response.",
entities.contains(newEntity("app", "id_1")));
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
"type1:tid1_2,type2:tid2_1%60");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_1 should have been present in response.",
entities.contains(newEntity("app", "id_1")));
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?isrelatedto=" +
"type1:tid1_1:tid1_2,type2:tid2_1%60");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_1 should have been present in response.",
entities.contains(newEntity("app", "id_1")));
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesByConfigFilters() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"conffilters=config_1%20eq%20123%20AND%20config_3%20eq%20abc");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_3 should have been present in response.",
entities.contains(newEntity("app", "id_3")));
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesByInfoFilters() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"infofilters=info2%20eq%203.5");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_3 should have been present in response.",
entities.contains(newEntity("app", "id_3")));
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesByMetricFilters() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"metricfilters=metric3%20ge%200");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(2, entities.size());
assertTrue("Entities with id_1 and id_2 should have been present" +
" in response.",
entities.contains(newEntity("app", "id_1")) &&
entities.contains(newEntity("app", "id_2")));
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesByEventFilters() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"eventfilters=event_2,event_4");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(1, entities.size());
assertTrue("Entity with id_3 should have been present in response.",
entities.contains(newEntity("app", "id_3")));
} finally {
client.destroy();
}
}
@Test
public void testGetEntitiesNoMatch() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?" +
"metricfilters=metric7%20ge%200&isrelatedto=type1:tid1_1:tid1_2,"+
"type2:tid2_1%60&relatesto=flow:flow1&eventfilters=event_2,event_4" +
"&infofilters=info2%20eq%203.5&createdtimestart=1425016502030&" +
"createdtimeend=1425016502060");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>(){});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
assertEquals(0, entities.size());
} finally {
client.destroy();
}
}
@Test
public void testInvalidValuesHandling() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/" +
"timeline/clusters/cluster1/apps/app1/entities/app?flowrunid=a23b");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app/id_1?flowrunid=2ab15");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/timeline/" +
"clusters/cluster1/apps/app1/entities/app?limit=#$561av");
verifyHttpResponse(client, uri, Status.BAD_REQUEST);
} finally {
client.destroy();
}
}
@Test
public void testGetAppAttempts() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "entities/YARN_APPLICATION_ATTEMPT");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
int totalEntities = entities.size();
assertEquals(2, totalEntities);
assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-1")));
assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-2")));
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/appattempts");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
assertNotNull(entities);
int retrievedEntity = entities.size();
assertEquals(2, retrievedEntity);
assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-1")));
assertTrue(
"Entity with app-attempt-2 should have been present in response.",
entities.contains(
newEntity(TimelineEntityType.YARN_APPLICATION_ATTEMPT.toString(),
"app-attempt-2")));
assertEquals(totalEntities, retrievedEntity);
} finally {
client.destroy();
}
}
@Test
public void testGetAppAttempt() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/entities/"
+ "YARN_APPLICATION_ATTEMPT/app-attempt-1");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entities1 =
resp.getEntity(new GenericType<TimelineEntity>() {
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities1);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/appattempts/app-attempt-1");
resp = getResponse(client, uri);
TimelineEntity entities2 =
resp.getEntity(new GenericType<TimelineEntity>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
assertNotNull(entities2);
assertEquals(entities1, entities2);
} finally {
client.destroy();
}
}
@Test
public void testGetContainers() throws Exception {
Client client = createClient();
try {
// total 3 containers in a application.
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/entities/YARN_CONTAINER");
ClientResponse resp = getResponse(client, uri);
Set<TimelineEntity> entities =
resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities);
int totalEntities = entities.size();
assertEquals(3, totalEntities);
assertTrue(
"Entity with container_1_1 should have been present in response.",
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")));
assertTrue(
"Entity with container_2_1 should have been present in response.",
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")));
assertTrue(
"Entity with container_2_2 should have been present in response.",
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")));
// for app-attempt1 1 container has run
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "appattempts/app-attempt-1/containers");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
assertNotNull(entities);
int retrievedEntity = entities.size();
assertEquals(1, retrievedEntity);
assertTrue(
"Entity with container_1_1 should have been present in response.",
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_1_1")));
// for app-attempt2 2 containers has run
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "appattempts/app-attempt-2/containers");
resp = getResponse(client, uri);
entities = resp.getEntity(new GenericType<Set<TimelineEntity>>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
assertNotNull(entities);
retrievedEntity += entities.size();
assertEquals(2, entities.size());
assertTrue(
"Entity with container_2_1 should have been present in response.",
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_1")));
assertTrue(
"Entity with container_2_2 should have been present in response.",
entities.contains(newEntity(
TimelineEntityType.YARN_CONTAINER.toString(), "container_2_2")));
assertEquals(totalEntities, retrievedEntity);
} finally {
client.destroy();
}
}
@Test
public void testGetContainer() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/"
+ "entities/YARN_CONTAINER/container_2_2");
ClientResponse resp = getResponse(client, uri);
TimelineEntity entities1 =
resp.getEntity(new GenericType<TimelineEntity>() {
});
assertEquals(MediaType.APPLICATION_JSON + "; " + JettyUtils.UTF_8,
resp.getType().toString());
assertNotNull(entities1);
uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/clusters/cluster1/apps/app1/containers/container_2_2");
resp = getResponse(client, uri);
TimelineEntity entities2 =
resp.getEntity(new GenericType<TimelineEntity>() {
});
assertEquals(MediaType.APPLICATION_JSON_TYPE, resp.getType());
assertNotNull(entities2);
assertEquals(entities1, entities2);
} finally {
client.destroy();
}
}
@Test
public void testHealthCheck() throws Exception {
Client client = createClient();
try {
URI uri = URI.create("http://localhost:" + serverPort + "/ws/v2/"
+ "timeline/health");
ClientResponse resp = getResponse(client, uri);
TimelineHealth timelineHealth =
resp.getEntity(new GenericType<TimelineHealth>() {
});
assertEquals(200, resp.getStatus());
assertEquals(TimelineHealth.TimelineHealthStatus.RUNNING,
timelineHealth.getHealthStatus());
} finally {
client.destroy();
}
}
}
| |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.content.browser;
import android.app.Activity;
import android.content.Context;
import android.content.res.Configuration;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.os.Build;
import android.util.AttributeSet;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.View;
import android.view.accessibility.AccessibilityEvent;
import android.view.accessibility.AccessibilityNodeInfo;
import android.view.inputmethod.EditorInfo;
import android.view.inputmethod.InputConnection;
import android.widget.FrameLayout;
import com.google.common.annotations.VisibleForTesting;
import org.chromium.content.common.TraceEvent;
import org.chromium.ui.WindowAndroid;
/**
* The containing view for {@link ContentViewCore} that exists in the Android UI hierarchy and
* exposes the various {@link View} functionality to it.
*
* TODO(joth): Remove any methods overrides from this class that were added for WebView
* compatibility.
*/
public class ContentView extends FrameLayout
implements ContentViewCore.InternalAccessDelegate, PageInfo {
private final ContentViewCore mContentViewCore;
private float mCurrentTouchOffsetX;
private float mCurrentTouchOffsetY;
/**
* Creates an instance of a ContentView.
* @param context The Context the view is running in, through which it can
* access the current theme, resources, etc.
* @param nativeWebContents A pointer to the native web contents.
* @param windowAndroid An instance of the WindowAndroid.
* @return A ContentView instance.
*/
public static ContentView newInstance(Context context, int nativeWebContents,
WindowAndroid windowAndroid) {
return newInstance(context, nativeWebContents, windowAndroid, null,
android.R.attr.webViewStyle);
}
/**
* Creates an instance of a ContentView.
* @param context The Context the view is running in, through which it can
* access the current theme, resources, etc.
* @param nativeWebContents A pointer to the native web contents.
* @param windowAndroid An instance of the WindowAndroid.
* @param attrs The attributes of the XML tag that is inflating the view.
* @return A ContentView instance.
*/
public static ContentView newInstance(Context context, int nativeWebContents,
WindowAndroid windowAndroid, AttributeSet attrs) {
// TODO(klobag): use the WebViewStyle as the default style for now. It enables scrollbar.
// When ContentView is moved to framework, we can define its own style in the res.
return newInstance(context, nativeWebContents, windowAndroid, attrs,
android.R.attr.webViewStyle);
}
/**
* Creates an instance of a ContentView.
* @param context The Context the view is running in, through which it can
* access the current theme, resources, etc.
* @param nativeWebContents A pointer to the native web contents.
* @param windowAndroid An instance of the WindowAndroid.
* @param attrs The attributes of the XML tag that is inflating the view.
* @param defStyle The default style to apply to this view.
* @return A ContentView instance.
*/
public static ContentView newInstance(Context context, int nativeWebContents,
WindowAndroid windowAndroid, AttributeSet attrs, int defStyle) {
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.JELLY_BEAN) {
return new ContentView(context, nativeWebContents, windowAndroid, attrs, defStyle);
} else {
return new JellyBeanContentView(context, nativeWebContents, windowAndroid, attrs,
defStyle);
}
}
protected ContentView(Context context, int nativeWebContents, WindowAndroid windowAndroid,
AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
if (getScrollBarStyle() == View.SCROLLBARS_INSIDE_OVERLAY) {
setHorizontalScrollBarEnabled(false);
setVerticalScrollBarEnabled(false);
}
setFocusable(true);
setFocusableInTouchMode(true);
mContentViewCore = new ContentViewCore(context);
mContentViewCore.initialize(this, this, nativeWebContents, windowAndroid,
Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN ?
ContentViewCore.INPUT_EVENTS_DELIVERED_AT_VSYNC :
ContentViewCore.INPUT_EVENTS_DELIVERED_IMMEDIATELY);
}
// PageInfo implementation.
@Override
public String getUrl() {
return mContentViewCore.getUrl();
}
@Override
public String getTitle() {
return mContentViewCore.getTitle();
}
@Override
public boolean isReadyForSnapshot() {
return !isCrashed() && isReady();
}
@Override
public Bitmap getBitmap() {
return getBitmap(getWidth(), getHeight());
}
@Override
public Bitmap getBitmap(int width, int height) {
return mContentViewCore.getBitmap(width, height);
}
@Override
public int getBackgroundColor() {
return mContentViewCore.getBackgroundColor();
}
@Override
public View getView() {
return this;
}
/**
* @return The core component of the ContentView that handles JNI communication. Should only be
* used for passing to native.
*/
public ContentViewCore getContentViewCore() {
return mContentViewCore;
}
/**
* @return The cache of scales and positions used to convert coordinates from/to CSS.
*/
public RenderCoordinates getRenderCoordinates() {
return mContentViewCore.getRenderCoordinates();
}
/**
* Returns true if the given Activity has hardware acceleration enabled
* in its manifest, or in its foreground window.
*
* TODO(husky): Remove when ContentViewCore.initialize() is refactored (see TODO there)
* TODO(dtrainor) This is still used by other classes. Make sure to pull some version of this
* out before removing it.
*/
public static boolean hasHardwareAcceleration(Activity activity) {
return ContentViewCore.hasHardwareAcceleration(activity);
}
/**
* Destroy the internal state of the WebView. This method may only be called
* after the WebView has been removed from the view system. No other methods
* may be called on this WebView after this method has been called.
*/
public void destroy() {
mContentViewCore.destroy();
}
/**
* Returns true initially, false after destroy() has been called.
* It is illegal to call any other public method after destroy().
*/
public boolean isAlive() {
return mContentViewCore.isAlive();
}
/**
* For internal use. Throws IllegalStateException if mNativeContentView is 0.
* Use this to ensure we get a useful Java stack trace, rather than a native
* crash dump, from use-after-destroy bugs in Java code.
*/
void checkIsAlive() throws IllegalStateException {
mContentViewCore.checkIsAlive();
}
public void setContentViewClient(ContentViewClient client) {
mContentViewCore.setContentViewClient(client);
}
@VisibleForTesting
public ContentViewClient getContentViewClient() {
return mContentViewCore.getContentViewClient();
}
/**
* Load url without fixing up the url string. Consumers of ContentView are responsible for
* ensuring the URL passed in is properly formatted (i.e. the scheme has been added if left
* off during user input).
*
* @param params Parameters for this load.
*/
public void loadUrl(LoadUrlParams params) {
mContentViewCore.loadUrl(params);
}
/**
* Stops loading the current web contents.
*/
public void stopLoading() {
mContentViewCore.stopLoading();
}
/**
* @return Whether the current WebContents has a previous navigation entry.
*/
public boolean canGoBack() {
return mContentViewCore.canGoBack();
}
/**
* @return Whether the current WebContents has a navigation entry after the current one.
*/
public boolean canGoForward() {
return mContentViewCore.canGoForward();
}
/**
* @param offset The offset into the navigation history.
* @return Whether we can move in history by given offset
*/
public boolean canGoToOffset(int offset) {
return mContentViewCore.canGoToOffset(offset);
}
/**
* Navigates to the specified offset from the "current entry". Does nothing if the offset is out
* of bounds.
* @param offset The offset into the navigation history.
*/
public void goToOffset(int offset) {
mContentViewCore.goToOffset(offset);
}
/**
* Goes to the navigation entry before the current one.
*/
public void goBack() {
mContentViewCore.goBack();
}
/**
* Goes to the navigation entry following the current one.
*/
public void goForward() {
mContentViewCore.goForward();
}
/**
* Reload the current page.
*/
public void reload() {
mContentViewCore.reload();
}
/**
* Clears the WebView's page history in both the backwards and forwards
* directions.
*/
public void clearHistory() {
mContentViewCore.clearHistory();
}
/**
* Start profiling the update speed. You must call {@link #stopFpsProfiling}
* to stop profiling.
*/
@VisibleForTesting
public void startFpsProfiling() {
// TODO(nileshagrawal): Implement this.
}
/**
* Stop profiling the update speed.
*/
@VisibleForTesting
public float stopFpsProfiling() {
// TODO(nileshagrawal): Implement this.
return 0.0f;
}
/**
* Fling the ContentView from the current position.
* @param x Fling touch starting position
* @param y Fling touch starting position
* @param velocityX Initial velocity of the fling (X) measured in pixels per second.
* @param velocityY Initial velocity of the fling (Y) measured in pixels per second.
*/
@VisibleForTesting
public void fling(long timeMs, int x, int y, int velocityX, int velocityY) {
mContentViewCore.getContentViewGestureHandler().fling(timeMs, x, y, velocityX, velocityY);
}
/**
* Start pinch zoom. You must call {@link #pinchEnd} to stop.
*/
@VisibleForTesting
public void pinchBegin(long timeMs, int x, int y) {
mContentViewCore.getContentViewGestureHandler().pinchBegin(timeMs, x, y);
}
/**
* Stop pinch zoom.
*/
@VisibleForTesting
public void pinchEnd(long timeMs) {
mContentViewCore.getContentViewGestureHandler().pinchEnd(timeMs);
}
void setIgnoreSingleTap(boolean value) {
mContentViewCore.getContentViewGestureHandler().setIgnoreSingleTap(value);
}
/** @see ContentViewGestureHandler#setIgnoreRemainingTouchEvents */
public void setIgnoreRemainingTouchEvents() {
mContentViewCore.getContentViewGestureHandler().setIgnoreRemainingTouchEvents();
}
/**
* Modify the ContentView magnification level. The effect of calling this
* method is exactly as after "pinch zoom".
*
* @param timeMs The event time in milliseconds.
* @param delta The ratio of the new magnification level over the current
* magnification level.
* @param anchorX The magnification anchor (X) in the current view
* coordinate.
* @param anchorY The magnification anchor (Y) in the current view
* coordinate.
*/
@VisibleForTesting
public void pinchBy(long timeMs, int anchorX, int anchorY, float delta) {
mContentViewCore.getContentViewGestureHandler().pinchBy(timeMs, anchorX, anchorY, delta);
}
/**
* Injects the passed JavaScript code in the current page and evaluates it.
*
* @throws IllegalStateException If the ContentView has been destroyed.
*/
public void evaluateJavaScript(String script) throws IllegalStateException {
mContentViewCore.evaluateJavaScript(script, null);
}
/**
* This method should be called when the containing activity is paused.
**/
public void onActivityPause() {
mContentViewCore.onActivityPause();
}
/**
* This method should be called when the containing activity is resumed.
**/
public void onActivityResume() {
mContentViewCore.onActivityResume();
}
/**
* To be called when the ContentView is shown.
**/
public void onShow() {
mContentViewCore.onShow();
}
/**
* To be called when the ContentView is hidden.
**/
public void onHide() {
mContentViewCore.onHide();
}
/**
* Return the ContentSettings object used to retrieve the settings for this
* ContentView.
* @return A ContentSettings object that can be used to retrieve this ContentView's
* settings.
*/
public ContentSettings getContentSettings() {
return mContentViewCore.getContentSettings();
}
/**
* Hides the select action bar.
*/
public void hideSelectActionBar() {
mContentViewCore.hideSelectActionBar();
}
// FrameLayout overrides.
// Needed by ContentViewCore.InternalAccessDelegate
@Override
public boolean drawChild(Canvas canvas, View child, long drawingTime) {
return super.drawChild(canvas, child, drawingTime);
}
@Override
protected void onSizeChanged(int w, int h, int ow, int oh) {
TraceEvent.begin();
super.onSizeChanged(w, h, ow, oh);
mContentViewCore.onSizeChanged(w, h, ow, oh);
TraceEvent.end();
}
@Override
public InputConnection onCreateInputConnection(EditorInfo outAttrs) {
return mContentViewCore.onCreateInputConnection(outAttrs);
}
@Override
public boolean onCheckIsTextEditor() {
return mContentViewCore.onCheckIsTextEditor();
}
@Override
protected void onFocusChanged(boolean gainFocus, int direction, Rect previouslyFocusedRect) {
TraceEvent.begin();
super.onFocusChanged(gainFocus, direction, previouslyFocusedRect);
mContentViewCore.onFocusChanged(gainFocus);
TraceEvent.end();
}
@Override
public void onWindowFocusChanged(boolean hasWindowFocus) {
super.onWindowFocusChanged(hasWindowFocus);
mContentViewCore.onWindowFocusChanged(hasWindowFocus);
}
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
return mContentViewCore.onKeyUp(keyCode, event);
}
@Override
public boolean dispatchKeyEventPreIme(KeyEvent event) {
return mContentViewCore.dispatchKeyEventPreIme(event);
}
@Override
public boolean dispatchKeyEvent(KeyEvent event) {
if (isFocused()) {
return mContentViewCore.dispatchKeyEvent(event);
} else {
return super.dispatchKeyEvent(event);
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
MotionEvent offset = createOffsetMotionEvent(event);
boolean consumed = mContentViewCore.onTouchEvent(offset);
offset.recycle();
return consumed;
}
/**
* Mouse move events are sent on hover enter, hover move and hover exit.
* They are sent on hover exit because sometimes it acts as both a hover
* move and hover exit.
*/
@Override
public boolean onHoverEvent(MotionEvent event) {
MotionEvent offset = createOffsetMotionEvent(event);
boolean consumed = mContentViewCore.onHoverEvent(offset);
offset.recycle();
return consumed;
}
@Override
public boolean onGenericMotionEvent(MotionEvent event) {
return mContentViewCore.onGenericMotionEvent(event);
}
@Override
public boolean performLongClick() {
return false;
}
/**
* Sets the current amount to offset incoming touch events by. This is used to handle content
* moving and not lining up properly with the android input system.
* @param dx The X offset in pixels to shift touch events.
* @param dy The Y offset in pixels to shift touch events.
*/
public void setCurrentMotionEventOffsets(float dx, float dy) {
mCurrentTouchOffsetX = dx;
mCurrentTouchOffsetY = dy;
}
private MotionEvent createOffsetMotionEvent(MotionEvent src) {
MotionEvent dst = MotionEvent.obtain(src);
dst.offsetLocation(mCurrentTouchOffsetX, mCurrentTouchOffsetY);
return dst;
}
@Override
protected void onConfigurationChanged(Configuration newConfig) {
mContentViewCore.onConfigurationChanged(newConfig);
}
/**
* Currently the ContentView scrolling happens in the native side. In
* the Java view system, it is always pinned at (0, 0). scrollBy() and scrollTo()
* are overridden, so that View's mScrollX and mScrollY will be unchanged at
* (0, 0). This is critical for drawing ContentView correctly.
*/
@Override
public void scrollBy(int x, int y) {
mContentViewCore.scrollBy(x, y);
}
@Override
public void scrollTo(int x, int y) {
mContentViewCore.scrollTo(x, y);
}
@Override
protected int computeHorizontalScrollExtent() {
// TODO (dtrainor): Need to expose scroll events properly to public. Either make getScroll*
// work or expose computeHorizontalScrollOffset()/computeVerticalScrollOffset as public.
return mContentViewCore.computeHorizontalScrollExtent();
}
@Override
protected int computeHorizontalScrollOffset() {
return mContentViewCore.computeHorizontalScrollOffset();
}
@Override
protected int computeHorizontalScrollRange() {
return mContentViewCore.computeHorizontalScrollRange();
}
@Override
protected int computeVerticalScrollExtent() {
return mContentViewCore.computeVerticalScrollExtent();
}
@Override
protected int computeVerticalScrollOffset() {
return mContentViewCore.computeVerticalScrollOffset();
}
@Override
protected int computeVerticalScrollRange() {
return mContentViewCore.computeVerticalScrollRange();
}
// End FrameLayout overrides.
@Override
public boolean awakenScrollBars(int startDelay, boolean invalidate) {
return mContentViewCore.awakenScrollBars(startDelay, invalidate);
}
@Override
public boolean awakenScrollBars() {
return super.awakenScrollBars();
}
public int getSingleTapX() {
return mContentViewCore.getContentViewGestureHandler().getSingleTapX();
}
public int getSingleTapY() {
return mContentViewCore.getContentViewGestureHandler().getSingleTapY();
}
@Override
public void onInitializeAccessibilityNodeInfo(AccessibilityNodeInfo info) {
super.onInitializeAccessibilityNodeInfo(info);
mContentViewCore.onInitializeAccessibilityNodeInfo(info);
}
/**
* Fills in scrolling values for AccessibilityEvents.
* @param event Event being fired.
*/
@Override
public void onInitializeAccessibilityEvent(AccessibilityEvent event) {
super.onInitializeAccessibilityEvent(event);
mContentViewCore.onInitializeAccessibilityEvent(event);
}
@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
mContentViewCore.onAttachedToWindow();
}
@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
mContentViewCore.onDetachedFromWindow();
}
@Override
protected void onVisibilityChanged(View changedView, int visibility) {
super.onVisibilityChanged(changedView, visibility);
mContentViewCore.onVisibilityChanged(changedView, visibility);
}
/**
* Register the delegate to be used when content can not be handled by
* the rendering engine, and should be downloaded instead. This will replace
* the current delegate.
* @param delegate An implementation of ContentViewDownloadDelegate.
*/
public void setDownloadDelegate(ContentViewDownloadDelegate delegate) {
mContentViewCore.setDownloadDelegate(delegate);
}
// Called by DownloadController.
ContentViewDownloadDelegate getDownloadDelegate() {
return mContentViewCore.getDownloadDelegate();
}
public boolean getUseDesktopUserAgent() {
return mContentViewCore.getUseDesktopUserAgent();
}
/**
* Set whether or not we're using a desktop user agent for the currently loaded page.
* @param override If true, use a desktop user agent. Use a mobile one otherwise.
* @param reloadOnChange Reload the page if the UA has changed.
*/
public void setUseDesktopUserAgent(boolean override, boolean reloadOnChange) {
mContentViewCore.setUseDesktopUserAgent(override, reloadOnChange);
}
/**
* @return Whether the native ContentView has crashed.
*/
public boolean isCrashed() {
return mContentViewCore.isCrashed();
}
/**
* @return Whether a reload happens when this ContentView is activated.
*/
public boolean needsReload() {
return mContentViewCore.needsReload();
}
/**
* Checks whether the WebView can be zoomed in.
*
* @return True if the WebView can be zoomed in.
*/
// This method uses the term 'zoom' for legacy reasons, but relates
// to what chrome calls the 'page scale factor'.
public boolean canZoomIn() {
return mContentViewCore.canZoomIn();
}
/**
* Checks whether the WebView can be zoomed out.
*
* @return True if the WebView can be zoomed out.
*/
// This method uses the term 'zoom' for legacy reasons, but relates
// to what chrome calls the 'page scale factor'.
public boolean canZoomOut() {
return mContentViewCore.canZoomOut();
}
/**
* Zooms in the WebView by 25% (or less if that would result in zooming in
* more than possible).
*
* @return True if there was a zoom change, false otherwise.
*/
// This method uses the term 'zoom' for legacy reasons, but relates
// to what chrome calls the 'page scale factor'.
public boolean zoomIn() {
return mContentViewCore.zoomIn();
}
/**
* Zooms out the WebView by 20% (or less if that would result in zooming out
* more than possible).
*
* @return True if there was a zoom change, false otherwise.
*/
// This method uses the term 'zoom' for legacy reasons, but relates
// to what chrome calls the 'page scale factor'.
public boolean zoomOut() {
return mContentViewCore.zoomOut();
}
/**
* Resets the zoom factor of the WebView.
*
* @return True if there was a zoom change, false otherwise.
*/
// This method uses the term 'zoom' for legacy reasons, but relates
// to what chrome calls the 'page scale factor'.
public boolean zoomReset() {
return mContentViewCore.zoomReset();
}
/**
* Return the current scale of the WebView
* @return The current scale.
*/
public float getScale() {
return mContentViewCore.getScale();
}
/**
* If the view is ready to draw contents to the screen. In hardware mode,
* the initialization of the surface texture may not occur until after the
* view has been added to the layout. This method will return {@code true}
* once the texture is actually ready.
*/
public boolean isReady() {
return mContentViewCore.isReady();
}
/**
* Returns whether or not accessibility injection is being used.
*/
public boolean isInjectingAccessibilityScript() {
return mContentViewCore.isInjectingAccessibilityScript();
}
/**
* Enable or disable accessibility features.
*/
public void setAccessibilityState(boolean state) {
mContentViewCore.setAccessibilityState(state);
}
/**
* Stop any TTS notifications that are currently going on.
*/
public void stopCurrentAccessibilityNotifications() {
mContentViewCore.stopCurrentAccessibilityNotifications();
}
/**
* Inform WebKit that Fullscreen mode has been exited by the user.
*/
public void exitFullscreen() {
mContentViewCore.exitFullscreen();
}
/**
* Return content scroll y.
*
* @return The vertical scroll position in pixels.
*/
public int getContentScrollY() {
return mContentViewCore.computeVerticalScrollOffset();
}
/**
* Return content height.
*
* @return The height of the content in pixels.
*/
public int getContentHeight() {
return mContentViewCore.computeVerticalScrollRange();
}
///////////////////////////////////////////////////////////////////////////////////////////////
// Start Implementation of ContentViewCore.InternalAccessDelegate //
///////////////////////////////////////////////////////////////////////////////////////////////
@Override
public boolean super_onKeyUp(int keyCode, KeyEvent event) {
return super.onKeyUp(keyCode, event);
}
@Override
public boolean super_dispatchKeyEventPreIme(KeyEvent event) {
return super.dispatchKeyEventPreIme(event);
}
@Override
public boolean super_dispatchKeyEvent(KeyEvent event) {
return super.dispatchKeyEvent(event);
}
@Override
public boolean super_onGenericMotionEvent(MotionEvent event) {
return super.onGenericMotionEvent(event);
}
@Override
public void super_onConfigurationChanged(Configuration newConfig) {
super.onConfigurationChanged(newConfig);
}
@Override
public boolean super_awakenScrollBars(int startDelay, boolean invalidate) {
return super.awakenScrollBars(startDelay, invalidate);
}
///////////////////////////////////////////////////////////////////////////////////////////////
// End Implementation of ContentViewCore.InternalAccessDelegate //
///////////////////////////////////////////////////////////////////////////////////////////////
}
| |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package discounty.com.google.zxing.client.result;
import discounty.com.google.zxing.Result;
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
/**
* <p>Abstract class representing the result of decoding a barcode, as more than
* a String -- as some type of structured data. This might be a subclass which represents
* a URL, or an e-mail address. {@link #parseResult(Result)} will turn a raw
* decoded string into the most appropriate type of structured representation.</p>
*
* <p>Thanks to Jeff Griffin for proposing rewrite of these classes that relies less
* on exception-based mechanisms during parsing.</p>
*
* @author Sean Owen
*/
public abstract class ResultParser {
private static final ResultParser[] PARSERS = {
new BookmarkDoCoMoResultParser(),
new AddressBookDoCoMoResultParser(),
new EmailDoCoMoResultParser(),
new AddressBookAUResultParser(),
new VCardResultParser(),
new BizcardResultParser(),
new VEventResultParser(),
new EmailAddressResultParser(),
new SMTPResultParser(),
new TelResultParser(),
new SMSMMSResultParser(),
new SMSTOMMSTOResultParser(),
new GeoResultParser(),
new WifiResultParser(),
new URLTOResultParser(),
new URIResultParser(),
new ISBNResultParser(),
new ProductResultParser(),
new ExpandedProductResultParser(),
new VINResultParser(),
};
private static final Pattern DIGITS = Pattern.compile("\\d+");
private static final Pattern AMPERSAND = Pattern.compile("&");
private static final Pattern EQUALS = Pattern.compile("=");
private static final String BYTE_ORDER_MARK = "\ufeff";
/**
* Attempts to parse the raw {@link Result}'s contents as a particular type
* of information (email, URL, etc.) and return a {@link ParsedResult} encapsulating
* the result of parsing.
*
* @param theResult the raw {@link Result} to parse
* @return {@link ParsedResult} encapsulating the parsing result
*/
public abstract ParsedResult parse(Result theResult);
protected static String getMassagedText(Result result) {
String text = result.getText();
if (text.startsWith(BYTE_ORDER_MARK)) {
text = text.substring(1);
}
return text;
}
public static ParsedResult parseResult(Result theResult) {
for (ResultParser parser : PARSERS) {
ParsedResult result = parser.parse(theResult);
if (result != null) {
return result;
}
}
return new TextParsedResult(theResult.getText(), null);
}
protected static void maybeAppend(String value, StringBuilder result) {
if (value != null) {
result.append('\n');
result.append(value);
}
}
protected static void maybeAppend(String[] value, StringBuilder result) {
if (value != null) {
for (String s : value) {
result.append('\n');
result.append(s);
}
}
}
protected static String[] maybeWrap(String value) {
return value == null ? null : new String[] { value };
}
protected static String unescapeBackslash(String escaped) {
int backslash = escaped.indexOf('\\');
if (backslash < 0) {
return escaped;
}
int max = escaped.length();
StringBuilder unescaped = new StringBuilder(max - 1);
unescaped.append(escaped.toCharArray(), 0, backslash);
boolean nextIsEscaped = false;
for (int i = backslash; i < max; i++) {
char c = escaped.charAt(i);
if (nextIsEscaped || c != '\\') {
unescaped.append(c);
nextIsEscaped = false;
} else {
nextIsEscaped = true;
}
}
return unescaped.toString();
}
protected static int parseHexDigit(char c) {
if (c >= '0' && c <= '9') {
return c - '0';
}
if (c >= 'a' && c <= 'f') {
return 10 + (c - 'a');
}
if (c >= 'A' && c <= 'F') {
return 10 + (c - 'A');
}
return -1;
}
protected static boolean isStringOfDigits(CharSequence value, int length) {
return value != null && length > 0 && length == value.length() && DIGITS.matcher(value).matches();
}
protected static boolean isSubstringOfDigits(CharSequence value, int offset, int length) {
if (value == null || length <= 0) {
return false;
}
int max = offset + length;
return value.length() >= max && DIGITS.matcher(value.subSequence(offset, max)).matches();
}
static Map<String,String> parseNameValuePairs(String uri) {
int paramStart = uri.indexOf('?');
if (paramStart < 0) {
return null;
}
Map<String,String> result = new HashMap<>(3);
for (String keyValue : AMPERSAND.split(uri.substring(paramStart + 1))) {
appendKeyValue(keyValue, result);
}
return result;
}
private static void appendKeyValue(CharSequence keyValue, Map<String,String> result) {
String[] keyValueTokens = EQUALS.split(keyValue, 2);
if (keyValueTokens.length == 2) {
String key = keyValueTokens[0];
String value = keyValueTokens[1];
try {
value = urlDecode(value);
result.put(key, value);
} catch (IllegalArgumentException iae) {
// continue; invalid data such as an escape like %0t
}
}
}
static String urlDecode(String encoded) {
try {
return URLDecoder.decode(encoded, "UTF-8");
} catch (UnsupportedEncodingException uee) {
throw new IllegalStateException(uee); // can't happen
}
}
static String[] matchPrefixedField(String prefix, String rawText, char endChar, boolean trim) {
List<String> matches = null;
int i = 0;
int max = rawText.length();
while (i < max) {
i = rawText.indexOf(prefix, i);
if (i < 0) {
break;
}
i += prefix.length(); // Skip past this prefix we found to start
int start = i; // Found the start of a match here
boolean more = true;
while (more) {
i = rawText.indexOf(endChar, i);
if (i < 0) {
// No terminating end character? uh, done. Set i such that loop terminates and break
i = rawText.length();
more = false;
} else if (countPrecedingBackslashes(rawText, i) % 2 != 0) {
// semicolon was escaped (odd count of preceding backslashes) so continue
i++;
} else {
// found a match
if (matches == null) {
matches = new ArrayList<>(3); // lazy init
}
String element = unescapeBackslash(rawText.substring(start, i));
if (trim) {
element = element.trim();
}
if (!element.isEmpty()) {
matches.add(element);
}
i++;
more = false;
}
}
}
if (matches == null || matches.isEmpty()) {
return null;
}
return matches.toArray(new String[matches.size()]);
}
private static int countPrecedingBackslashes(CharSequence s, int pos) {
int count = 0;
for (int i = pos - 1; i >= 0; i--) {
if (s.charAt(i) == '\\') {
count++;
} else {
break;
}
}
return count;
}
static String matchSinglePrefixedField(String prefix, String rawText, char endChar, boolean trim) {
String[] matches = matchPrefixedField(prefix, rawText, endChar, trim);
return matches == null ? null : matches[0];
}
}
| |
/*
* Copyright (c) 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* COPYRIGHT AND PERMISSION NOTICE
*
* Copyright (C) 1991-2012 Unicode, Inc. All rights reserved. Distributed under
* the Terms of Use in http://www.unicode.org/copyright.html.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of the Unicode data files and any associated documentation (the "Data
* Files") or Unicode software and any associated documentation (the
* "Software") to deal in the Data Files or Software without restriction,
* including without limitation the rights to use, copy, modify, merge,
* publish, distribute, and/or sell copies of the Data Files or Software, and
* to permit persons to whom the Data Files or Software are furnished to do so,
* provided that (a) the above copyright notice(s) and this permission notice
* appear with all copies of the Data Files or Software, (b) both the above
* copyright notice(s) and this permission notice appear in associated
* documentation, and (c) there is clear notice in each modified Data File or
* in the Software as well as in the documentation associated with the Data
* File(s) or Software that the data or software has been modified.
*
* THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
* KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF
* THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS
* INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR
* CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
* DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER
* TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
* OF THE DATA FILES OR SOFTWARE.
*
* Except as contained in this notice, the name of a copyright holder shall not
* be used in advertising or otherwise to promote the sale, use or other
* dealings in these Data Files or Software without prior written authorization
* of the copyright holder.
*/
// Note: this file has been generated by a tool.
package sun.text.resources.ru;
import sun.util.resources.OpenListResourceBundle;
public class JavaTimeSupplementary_ru extends OpenListResourceBundle {
@Override
protected final Object[][] getContents() {
return new Object[][] {
{ "QuarterAbbreviations",
new String[] {
"1-\u0439 \u043a\u0432.",
"2-\u0439 \u043a\u0432.",
"3-\u0439 \u043a\u0432.",
"4-\u0439 \u043a\u0432.",
}
},
{ "QuarterNames",
new String[] {
"1-\u0439 \u043a\u0432\u0430\u0440\u0442\u0430\u043b",
"2-\u0439 \u043a\u0432\u0430\u0440\u0442\u0430\u043b",
"3-\u0439 \u043a\u0432\u0430\u0440\u0442\u0430\u043b",
"4-\u0439 \u043a\u0432\u0430\u0440\u0442\u0430\u043b",
}
},
{ "QuarterNarrows",
new String[] {
"1",
"2",
"3",
"4",
}
},
{ "calendarname.buddhist",
"\u0411\u0443\u0434\u0434\u0438\u0439\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "calendarname.gregorian",
"\u0413\u0440\u0438\u0433\u043e\u0440\u0438\u0430\u043d\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "calendarname.gregory",
"\u0413\u0440\u0438\u0433\u043e\u0440\u0438\u0430\u043d\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "calendarname.islamic",
"\u0418\u0441\u043b\u0430\u043c\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "calendarname.islamic-civil",
"\u0418\u0441\u043b\u0430\u043c\u0441\u043a\u0438\u0439 \u0433\u0440\u0430\u0436\u0434\u0430\u043d\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "calendarname.islamicc",
"\u0418\u0441\u043b\u0430\u043c\u0441\u043a\u0438\u0439 \u0433\u0440\u0430\u0436\u0434\u0430\u043d\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "calendarname.japanese",
"\u042f\u043f\u043e\u043d\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "calendarname.roc",
"\u041a\u0438\u0442\u0430\u0439\u0441\u043a\u0438\u0439 \u043a\u0430\u043b\u0435\u043d\u0434\u0430\u0440\u044c" },
{ "field.dayperiod",
"\u0414\u041f/\u041f\u041f" },
{ "field.era",
"\u042d\u0440\u0430" },
{ "field.hour",
"\u0427\u0430\u0441" },
{ "field.minute",
"\u041c\u0438\u043d\u0443\u0442\u0430" },
{ "field.month",
"\u041c\u0435\u0441\u044f\u0446" },
{ "field.second",
"\u0421\u0435\u043a\u0443\u043d\u0434\u0430" },
{ "field.week",
"\u041d\u0435\u0434\u0435\u043b\u044f" },
{ "field.weekday",
"\u0414\u0435\u043d\u044c \u043d\u0435\u0434\u0435\u043b\u0438" },
{ "field.year",
"\u0413\u043e\u0434" },
{ "field.zone",
"\u0427\u0430\u0441\u043e\u0432\u043e\u0439 \u043f\u043e\u044f\u0441" },
{ "islamic.DatePatterns",
new String[] {
"EEEE, d MMMM y\u00a0'\u0433'. GGGG",
"d MMMM y\u00a0'\u0433'. GGGG",
"dd.MM.yyyy GGGG",
"dd.MM.yy GGGG",
}
},
{ "islamic.MonthAbbreviations",
new String[] {
"\u041c\u0443\u0445\u0430\u0440\u0440\u0430\u043c",
"\u0421\u0430\u0444\u0430\u0440",
"\u0420\u0430\u0431\u0438-\u0443\u043b\u044c-\u0430\u0432\u0432\u0430\u043b\u044c",
"\u0420\u0430\u0431\u0438-\u0443\u043b\u044c-\u0430\u0445\u0438\u0440",
"\u0414\u0436\u0443\u043c\u0430\u0434-\u0443\u043b\u044c-\u0430\u0432\u0432\u0430\u043b\u044c",
"\u0414\u0436\u0443\u043c\u0430\u0434-\u0443\u043b\u044c-\u0430\u0445\u0438\u0440",
"\u0420\u0430\u0434\u0436\u0430\u0431",
"\u0428\u0430\u0430\u0431\u0430\u043d",
"\u0420\u0430\u043c\u0430\u0434\u0430\u043d",
"\u0428\u0430\u0432\u0432\u0430\u043b\u044c",
"\u0417\u0443\u043b\u044c-\u041a\u0430\u0430\u0434\u0430",
"\u0417\u0443\u043b\u044c-\u0425\u0438\u0434\u0436\u0436\u0430",
"",
}
},
{ "islamic.MonthNames",
new String[] {
"\u041c\u0443\u0445\u0430\u0440\u0440\u0430\u043c",
"\u0421\u0430\u0444\u0430\u0440",
"\u0420\u0430\u0431\u0438-\u0443\u043b\u044c-\u0430\u0432\u0432\u0430\u043b\u044c",
"\u0420\u0430\u0431\u0438-\u0443\u043b\u044c-\u0430\u0445\u0438\u0440",
"\u0414\u0436\u0443\u043c\u0430\u0434-\u0443\u043b\u044c-\u0430\u0432\u0432\u0430\u043b\u044c",
"\u0414\u0436\u0443\u043c\u0430\u0434-\u0443\u043b\u044c-\u0430\u0445\u0438\u0440",
"\u0420\u0430\u0434\u0436\u0430\u0431",
"\u0428\u0430\u0430\u0431\u0430\u043d",
"\u0420\u0430\u043c\u0430\u0434\u0430\u043d",
"\u0428\u0430\u0432\u0432\u0430\u043b\u044c",
"\u0417\u0443\u043b\u044c-\u041a\u0430\u0430\u0434\u0430",
"\u0417\u0443\u043b\u044c-\u0425\u0438\u0434\u0436\u0436\u0430",
"",
}
},
{ "islamic.MonthNarrows",
new String[] {
"1",
"2",
"3",
"4",
"5",
"6",
"7",
"8",
"9",
"10",
"11",
"12",
"",
}
},
{ "java.time.buddhist.DatePatterns",
new String[] {
"EEEE, d MMMM y\u00a0'\u0433'. G",
"d MMMM y\u00a0'\u0433'. G",
"dd.MM.yyyy G",
"dd.MM.yy G",
}
},
{ "java.time.islamic.DatePatterns",
new String[] {
"EEEE, d MMMM y\u00a0'\u0433'. G",
"d MMMM y\u00a0'\u0433'. G",
"dd.MM.yyyy G",
"dd.MM.yy G",
}
},
{ "java.time.japanese.DatePatterns",
new String[] {
"EEEE, d MMMM y\u00a0'\u0433'. G",
"d MMMM y\u00a0'\u0433'. G",
"dd.MM.yyyy G",
"dd.MM.yy G",
}
},
{ "java.time.japanese.long.Eras",
new String[] {
"\u043d.\u044d.",
"\u042d\u043f\u043e\u0445\u0430 \u041c\u044d\u0439\u0434\u0437\u0438",
"\u042d\u043f\u043e\u0445\u0430 \u0422\u0430\u0439\u0441\u044c\u043e",
"\u0421\u044c\u043e\u0432\u0430",
"\u042d\u043f\u043e\u0445\u0430 \u0425\u044d\u0439\u0441\u044d\u0439",
}
},
{ "java.time.japanese.short.Eras",
new String[] {
"\u043d.\u044d.",
"\u042d\u043f\u043e\u0445\u0430 \u041c\u044d\u0439\u0434\u0437\u0438",
"\u042d\u043f\u043e\u0445\u0430 \u0422\u0430\u0439\u0441\u044c\u043e",
"\u0421\u044c\u043e\u0432\u0430",
"\u042d\u043f\u043e\u0445\u0430 \u0425\u044d\u0439\u0441\u044d\u0439",
}
},
{ "java.time.long.Eras",
new String[] {
"\u0434\u043e \u043d.\u044d.",
"\u043d.\u044d.",
}
},
{ "java.time.roc.DatePatterns",
new String[] {
"EEEE, d MMMM y\u00a0'\u0433'. G",
"d MMMM y\u00a0'\u0433'. G",
"dd.MM.yyyy G",
"dd.MM.yy G",
}
},
{ "java.time.short.Eras",
new String[] {
"\u0434\u043e \u043d.\u044d.",
"\u043d.\u044d.",
}
},
{ "roc.DatePatterns",
new String[] {
"EEEE, d MMMM y\u00a0'\u0433'. GGGG",
"d MMMM y\u00a0'\u0433'. GGGG",
"dd.MM.yyyy GGGG",
"dd.MM.yy GGGG",
}
},
};
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.lang.reflect.UndeclaredThrowableException;
import java.net.SocketTimeoutException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.PreemptiveFastFailException;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.hadoop.hbase.util.ExceptionUtil;
import org.apache.hadoop.ipc.RemoteException;
import com.google.protobuf.ServiceException;
/**
* Runs an rpc'ing {@link RetryingCallable}. Sets into rpc client
* threadlocal outstanding timeouts as so we don't persist too much.
* Dynamic rather than static so can set the generic appropriately.
*
* This object has a state. It should not be used by in parallel by different threads.
* Reusing it is possible however, even between multiple threads. However, the user will
* have to manage the synchronization on its side: there is no synchronization inside the class.
*/
@InterfaceAudience.Private
public class RpcRetryingCallerImpl<T> implements RpcRetryingCaller<T> {
public static final Log LOG = LogFactory.getLog(RpcRetryingCallerImpl.class);
/**
* When we started making calls.
*/
private long globalStartTime;
/** How many retries are allowed before we start to log */
private final int startLogErrorsCnt;
private final long pause;
private final int retries;
private final AtomicBoolean cancelled = new AtomicBoolean(false);
private final RetryingCallerInterceptor interceptor;
private final RetryingCallerInterceptorContext context;
public RpcRetryingCallerImpl(long pause, int retries, int startLogErrorsCnt) {
this(pause, retries, RetryingCallerInterceptorFactory.NO_OP_INTERCEPTOR, startLogErrorsCnt);
}
public RpcRetryingCallerImpl(long pause, int retries,
RetryingCallerInterceptor interceptor, int startLogErrorsCnt) {
this.pause = pause;
this.retries = retries;
this.interceptor = interceptor;
context = interceptor.createEmptyContext();
this.startLogErrorsCnt = startLogErrorsCnt;
}
private int getRemainingTime(int callTimeout) {
if (callTimeout <= 0) {
return 0;
} else {
if (callTimeout == Integer.MAX_VALUE) return Integer.MAX_VALUE;
int remainingTime = (int) (callTimeout -
(EnvironmentEdgeManager.currentTime() - this.globalStartTime));
if (remainingTime < 1) {
// If there is no time left, we're trying anyway. It's too late.
// 0 means no timeout, and it's not the intent here. So we secure both cases by
// resetting to the minimum.
remainingTime = 1;
}
return remainingTime;
}
}
@Override
public void cancel(){
cancelled.set(true);
synchronized (cancelled){
cancelled.notifyAll();
}
}
@Override
public T callWithRetries(RetryingCallable<T> callable, int callTimeout)
throws IOException, RuntimeException {
List<RetriesExhaustedException.ThrowableWithExtraContext> exceptions =
new ArrayList<RetriesExhaustedException.ThrowableWithExtraContext>();
this.globalStartTime = EnvironmentEdgeManager.currentTime();
context.clear();
for (int tries = 0;; tries++) {
long expectedSleep;
try {
callable.prepare(tries != 0); // if called with false, check table status on ZK
interceptor.intercept(context.prepare(callable, tries));
return callable.call(getRemainingTime(callTimeout));
} catch (PreemptiveFastFailException e) {
throw e;
} catch (Throwable t) {
ExceptionUtil.rethrowIfInterrupt(t);
if (tries > startLogErrorsCnt) {
LOG.info("Call exception, tries=" + tries + ", retries=" + retries + ", started=" +
(EnvironmentEdgeManager.currentTime() - this.globalStartTime) + " ms ago, "
+ "cancelled=" + cancelled.get() + ", msg="
+ callable.getExceptionMessageAdditionalDetail());
}
// translateException throws exception when should not retry: i.e. when request is bad.
interceptor.handleFailure(context, t);
t = translateException(t);
callable.throwable(t, retries != 1);
RetriesExhaustedException.ThrowableWithExtraContext qt =
new RetriesExhaustedException.ThrowableWithExtraContext(t,
EnvironmentEdgeManager.currentTime(), toString());
exceptions.add(qt);
if (tries >= retries - 1) {
throw new RetriesExhaustedException(tries, exceptions);
}
// If the server is dead, we need to wait a little before retrying, to give
// a chance to the regions to be
// tries hasn't been bumped up yet so we use "tries + 1" to get right pause time
expectedSleep = callable.sleep(pause, tries + 1);
// If, after the planned sleep, there won't be enough time left, we stop now.
long duration = singleCallDuration(expectedSleep);
if (duration > callTimeout) {
String msg = "callTimeout=" + callTimeout + ", callDuration=" + duration +
": " + callable.getExceptionMessageAdditionalDetail();
throw (SocketTimeoutException)(new SocketTimeoutException(msg).initCause(t));
}
} finally {
interceptor.updateFailureInfo(context);
}
try {
if (expectedSleep > 0) {
synchronized (cancelled) {
if (cancelled.get()) return null;
cancelled.wait(expectedSleep);
}
}
if (cancelled.get()) return null;
} catch (InterruptedException e) {
throw new InterruptedIOException("Interrupted after " + tries + " tries on " + retries);
}
}
}
/**
* @return Calculate how long a single call took
*/
private long singleCallDuration(final long expectedSleep) {
return (EnvironmentEdgeManager.currentTime() - this.globalStartTime) + expectedSleep;
}
@Override
public T callWithoutRetries(RetryingCallable<T> callable, int callTimeout)
throws IOException, RuntimeException {
// The code of this method should be shared with withRetries.
this.globalStartTime = EnvironmentEdgeManager.currentTime();
try {
callable.prepare(false);
return callable.call(callTimeout);
} catch (Throwable t) {
Throwable t2 = translateException(t);
ExceptionUtil.rethrowIfInterrupt(t2);
// It would be nice to clear the location cache here.
if (t2 instanceof IOException) {
throw (IOException)t2;
} else {
throw new RuntimeException(t2);
}
}
}
/**
* Get the good or the remote exception if any, throws the DoNotRetryIOException.
* @param t the throwable to analyze
* @return the translated exception, if it's not a DoNotRetryIOException
* @throws DoNotRetryIOException - if we find it, we throw it instead of translating.
*/
static Throwable translateException(Throwable t) throws DoNotRetryIOException {
if (t instanceof UndeclaredThrowableException) {
if (t.getCause() != null) {
t = t.getCause();
}
}
if (t instanceof RemoteException) {
t = ((RemoteException)t).unwrapRemoteException();
}
if (t instanceof LinkageError) {
throw new DoNotRetryIOException(t);
}
if (t instanceof ServiceException) {
ServiceException se = (ServiceException)t;
Throwable cause = se.getCause();
if (cause != null && cause instanceof DoNotRetryIOException) {
throw (DoNotRetryIOException)cause;
}
// Don't let ServiceException out; its rpc specific.
t = cause;
// t could be a RemoteException so go around again.
translateException(t);
} else if (t instanceof DoNotRetryIOException) {
throw (DoNotRetryIOException)t;
}
return t;
}
@Override
public String toString() {
return "RpcRetryingCaller{" + "globalStartTime=" + globalStartTime +
", pause=" + pause + ", retries=" + retries + '}';
}
}
| |
package com.breakersoft.plow.dao.pgsql;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Types;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.springframework.jdbc.core.RowCallbackHandler;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.object.BatchSqlUpdate;
import org.springframework.stereotype.Repository;
import com.breakersoft.plow.Defaults;
import com.breakersoft.plow.FrameRange;
import com.breakersoft.plow.Job;
import com.breakersoft.plow.Layer;
import com.breakersoft.plow.Task;
import com.breakersoft.plow.TaskE;
import com.breakersoft.plow.dao.AbstractDao;
import com.breakersoft.plow.dao.TaskDao;
import com.breakersoft.plow.thrift.TaskFilterT;
import com.breakersoft.plow.thrift.TaskSpecT;
import com.breakersoft.plow.thrift.TaskState;
import com.breakersoft.plow.util.JdbcUtils;
import com.breakersoft.plow.util.PlowUtils;
import com.breakersoft.plow.util.UUIDGen;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@Repository
public class TaskDoaImpl extends AbstractDao implements TaskDao {
private static final Logger logger =
org.slf4j.LoggerFactory.getLogger(TaskDoaImpl.class);
public static final RowMapper<Task> MAPPER = new RowMapper<Task>() {
@Override
public Task mapRow(ResultSet rs, int rowNum)
throws SQLException {
TaskE task = new TaskE();
task.setTaskId(UUID.fromString(rs.getString(1)));
task.setLayerId(UUID.fromString(rs.getString(2)));
task.setJobId((UUID) rs.getObject(3));
task.setName(rs.getString(4));
return task;
}
};
private static final String GET =
"SELECT " +
"task.pk_task,"+
"task.pk_layer, " +
"task.pk_job, " +
"task.str_name " +
"FROM " +
"plow.task INNER JOIN plow.layer ON layer.pk_layer = task.pk_layer ";
@Override
public Task get(UUID id) {
return jdbc.queryForObject(
GET + "WHERE task.pk_task=?",
MAPPER, id);
}
@Override
public Task get(Layer layer, int number) {
return jdbc.queryForObject(
GET + "WHERE layer.pk_layer=? AND task.int_number=?",
MAPPER, layer.getLayerId(), number);
}
@Override
public Task getByNameOrId(Job job, String identifer) {
try {
return get(UUID.fromString(identifer));
} catch (IllegalArgumentException e) {
return jdbc.queryForObject(
GET + "WHERE task.pk_job=? AND task.str_name=?",
MAPPER, job.getJobId(), identifer);
}
}
private static final String INSERT =
JdbcUtils.Insert("plow.task",
"pk_task",
"pk_layer",
"pk_job",
"str_name",
"int_number",
"int_task_order",
"int_layer_order",
"int_ram_min");
@Override
public Task create(Layer layer, String name, int number, int taskOrder, int layerOrder, int minRam) {
final UUID id = UUIDGen.random();
jdbc.update(INSERT, id, layer.getLayerId(), layer.getJobId(), name,
number, taskOrder, layerOrder, minRam);
TaskE task = new TaskE();
task.setTaskId(id);
task.setLayerId(layer.getLayerId());
task.setJobId(layer.getJobId());
task.setName(name);
return task;
}
private static final int[] BATCH_TYPES = new int[] {
Types.OTHER,
Types.OTHER,
Types.OTHER,
Types.VARCHAR,
Types.INTEGER,
Types.INTEGER,
Types.INTEGER,
Types.INTEGER
};
@Override
public void batchCreate(Layer layer, FrameRange frameRange, int layerOrder, int minRam) {
final int size = frameRange.frameSet.size();
final BatchSqlUpdate update = new BatchSqlUpdate(
jdbc.getDataSource(), INSERT, BATCH_TYPES);
update.setBatchSize(Defaults.JDBC_DEFAULT_BATCH_SIZE);
int frameOrderCounter = 0;
for (int i=0; i<size; i=i+frameRange.chunkSize) {
int number = frameRange.frameSet.get(i);
update.update(UUIDGen.random(),
layer.getLayerId(),
layer.getJobId(),
String.format("%04d-%s", number, layer.getName()),
number,
frameOrderCounter,
layerOrder,
minRam);
frameOrderCounter++;
}
update.flush();
}
@Override
public void batchCreate(Layer layer, List<TaskSpecT> tasks, int layerOrder, int minRam) {
final BatchSqlUpdate update = new BatchSqlUpdate(
jdbc.getDataSource(), INSERT, BATCH_TYPES);
update.setBatchSize(Defaults.JDBC_DEFAULT_BATCH_SIZE);
int taskOrderCounter = 0;
for (TaskSpecT task: tasks) {
PlowUtils.alpahNumCheck(task.getName(), "The task name must be alpha numeric:");
update.update(UUIDGen.random(),
layer.getLayerId(),
layer.getJobId(),
task.getName(),
0,
taskOrderCounter,
layerOrder,
minRam);
taskOrderCounter++;
}
update.flush();
}
@Override
public Map<Integer, UUID> buildTaskCache(Layer layer, int size) {
final Map<Integer, UUID> result = Maps.newHashMapWithExpectedSize(size);
jdbc.query("SELECT int_number, pk_task FROM plow.task WHERE task.pk_layer=?", new RowCallbackHandler() {
@Override
public void processRow(ResultSet rs) throws SQLException {
result.put(rs.getInt(1), (UUID)rs.getObject(2));
}
}, layer.getLayerId());
return result;
}
@Override
public boolean updateState(Task task, TaskState currentState, TaskState newState) {
return jdbc.update("UPDATE plow.task SET int_state=?, " +
"time_updated = txTimeMillis() WHERE pk_task=? AND int_state=?",
newState.ordinal(), task.getTaskId(), currentState.ordinal()) == 1;
}
@Override
public void clearLastLogLine(Task task) {
jdbc.update("UPDATE plow.task_ping SET str_last_log_line=? WHERE pk_task=?", "", task.getTaskId());
}
@Override
public boolean setTaskState(Task task, TaskState newState) {
return jdbc.update("UPDATE plow.task SET int_state=?, bool_reserved='f', " +
"time_updated = txTimeMillis() WHERE task.pk_task=?",
newState.ordinal(), task.getTaskId()) == 1;
}
@Override
public boolean setTaskState(Task task, TaskState newState, TaskState oldState) {
return jdbc.update("UPDATE plow.task SET int_state=?, bool_reserved='f', " +
"time_updated = txTimeMillis() WHERE task.pk_task=? AND int_state=?",
newState.ordinal(), task.getTaskId(), oldState.ordinal()) == 1;
}
@Override
public List<Task> getTasks(TaskFilterT filter) {
final StringBuilder sb = new StringBuilder(512);
sb.append(GET);
final List<String> where = Lists.newArrayList();
final List<Object> values = Lists.newArrayList();
boolean idsIsSet = false;
if (PlowUtils.isValid(filter.nodeIds)) {
sb.append("INNER JOIN plow.proc ON proc.pk_task = task.pk_task ");
where.add(JdbcUtils.In("proc.pk_node", filter.nodeIds.size(), "uuid"));
values.addAll(filter.nodeIds);
idsIsSet = true;
}
if (PlowUtils.isValid(filter.jobId)) {
idsIsSet = true;
where.add("task.pk_job = ?::uuid");
values.add(filter.jobId);
}
if (filter.getLastUpdateTime() > 0) {
where.add("task.time_updated >= ?");
values.add(filter.getLastUpdateTime());
}
if (PlowUtils.isValid(filter.states)) {
where.add(JdbcUtils.In("task.int_state", filter.states.size()));
for (TaskState state: filter.states) {
values.add(state.ordinal());
}
}
if (PlowUtils.isValid(filter.layerIds)) {
idsIsSet = true;
where.add(JdbcUtils.In("task.pk_layer", filter.layerIds.size(), "uuid"));
values.addAll(filter.layerIds);
}
if (PlowUtils.isValid(filter.taskIds)) {
idsIsSet = true;
where.add(JdbcUtils.In("task.pk_task", filter.taskIds.size(), "uuid"));
values.addAll(filter.taskIds);
}
if (!idsIsSet) {
throw new RuntimeException("A job ID, layer IDs, task IDs, or node IDs must be set.");
}
sb.append(" WHERE ");
sb.append(StringUtils.join(where, " AND "));
sb.append(" ORDER BY task.int_task_order ASC ");
int limit = 1000;
int offset = 0;
if (filter.isSetLimit()) {
if (filter.limit > 0 && filter.limit < Defaults.TASK_MAX_LIMIT) {
limit = filter.limit;
}
}
if (filter.isSetOffset()) {
if (offset > 0) {
offset = filter.offset;
}
}
sb.append(JdbcUtils.limitOffset(limit, offset));
final String q = sb.toString();
logger.info(q);
return jdbc.query(q, MAPPER, values.toArray());
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.operator.scalar;
import com.google.common.collect.ImmutableList;
import io.airlift.slice.DynamicSliceOutput;
import io.airlift.slice.SliceOutput;
import io.prestosql.metadata.Metadata;
import io.prestosql.operator.DriverYieldSignal;
import io.prestosql.operator.project.PageProcessor;
import io.prestosql.spi.Page;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.BlockBuilder;
import io.prestosql.spi.type.ArrayType;
import io.prestosql.spi.type.Type;
import io.prestosql.sql.gen.ExpressionCompiler;
import io.prestosql.sql.gen.PageFunctionCompiler;
import io.prestosql.sql.relational.CallExpression;
import io.prestosql.sql.relational.RowExpression;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OperationsPerInvocation;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.runner.Runner;
import org.openjdk.jmh.runner.options.Options;
import org.openjdk.jmh.runner.options.OptionsBuilder;
import org.openjdk.jmh.runner.options.VerboseMode;
import org.openjdk.jmh.runner.options.WarmupMode;
import org.testng.annotations.Test;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit;
import static io.prestosql.memory.context.AggregatedMemoryContext.newSimpleAggregatedMemoryContext;
import static io.prestosql.metadata.MetadataManager.createTestMetadataManager;
import static io.prestosql.spi.type.BigintType.BIGINT;
import static io.prestosql.spi.type.DoubleType.DOUBLE;
import static io.prestosql.spi.type.VarcharType.VARCHAR;
import static io.prestosql.sql.relational.Expressions.field;
import static io.prestosql.testing.TestingConnectorSession.SESSION;
import static io.prestosql.type.JsonType.JSON;
import static java.nio.charset.StandardCharsets.UTF_8;
@SuppressWarnings("MethodMayBeStatic")
@State(Scope.Thread)
@OutputTimeUnit(TimeUnit.NANOSECONDS)
@Fork(10)
@BenchmarkMode(Mode.AverageTime)
public class BenchmarkJsonToArrayCast
{
private static final int POSITION_COUNT = 100_000;
private static final int ARRAY_SIZE = 20;
@Benchmark
@OperationsPerInvocation(POSITION_COUNT)
public List<Optional<Page>> benchmark(BenchmarkData data)
{
return ImmutableList.copyOf(
data.getPageProcessor().process(
SESSION,
new DriverYieldSignal(),
newSimpleAggregatedMemoryContext().newLocalMemoryContext(PageProcessor.class.getSimpleName()),
data.getPage()));
}
@SuppressWarnings("FieldMayBeFinal")
@State(Scope.Thread)
public static class BenchmarkData
{
@Param({"BIGINT", "DOUBLE", "VARCHAR"})
private String valueTypeName = "BIGINT";
private Page page;
private PageProcessor pageProcessor;
@Setup
public void setup()
{
Type elementType;
switch (valueTypeName) {
case "BIGINT":
elementType = BIGINT;
break;
case "DOUBLE":
elementType = DOUBLE;
break;
case "VARCHAR":
elementType = VARCHAR;
break;
default:
throw new UnsupportedOperationException();
}
Metadata metadata = createTestMetadataManager();
List<RowExpression> projections = ImmutableList.of(new CallExpression(
metadata.getCoercion(JSON, new ArrayType(elementType)),
new ArrayType(elementType),
ImmutableList.of(field(0, JSON))));
pageProcessor = new ExpressionCompiler(metadata, new PageFunctionCompiler(metadata, 0))
.compilePageProcessor(Optional.empty(), projections)
.get();
page = new Page(createChannel(POSITION_COUNT, ARRAY_SIZE, elementType));
}
private static Block createChannel(int positionCount, int mapSize, Type elementType)
{
BlockBuilder blockBuilder = JSON.createBlockBuilder(null, positionCount);
for (int position = 0; position < positionCount; position++) {
SliceOutput jsonSlice = new DynamicSliceOutput(20 * mapSize);
jsonSlice.appendByte('[');
for (int i = 0; i < mapSize; i++) {
if (i != 0) {
jsonSlice.appendByte(',');
}
String value = generateRandomJsonValue(elementType);
jsonSlice.appendBytes(value.getBytes(UTF_8));
}
jsonSlice.appendByte(']');
JSON.writeSlice(blockBuilder, jsonSlice.slice());
}
return blockBuilder.build();
}
private static String generateRandomJsonValue(Type valueType)
{
if (valueType == BIGINT) {
return Long.toString(ThreadLocalRandom.current().nextLong());
}
else if (valueType == DOUBLE) {
return Double.toString(ThreadLocalRandom.current().nextDouble());
}
else if (valueType == VARCHAR) {
String characters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
int length = ThreadLocalRandom.current().nextInt(10) + 1;
StringBuilder builder = new StringBuilder(length + 2);
builder.append('"');
for (int i = 0; i < length; i++) {
builder.append(characters.charAt(ThreadLocalRandom.current().nextInt(characters.length())));
}
builder.append('"');
return builder.toString();
}
else {
throw new UnsupportedOperationException();
}
}
public PageProcessor getPageProcessor()
{
return pageProcessor;
}
public Page getPage()
{
return page;
}
}
@Test
public void verify()
{
BenchmarkData data = new BenchmarkData();
data.setup();
new BenchmarkJsonToArrayCast().benchmark(data);
}
public static void main(String[] args)
throws Exception
{
// assure the benchmarks are valid before running
BenchmarkData data = new BenchmarkData();
data.setup();
new BenchmarkJsonToArrayCast().benchmark(data);
Options options = new OptionsBuilder()
.verbosity(VerboseMode.NORMAL)
.include(".*" + BenchmarkJsonToArrayCast.class.getSimpleName() + ".*")
.warmupMode(WarmupMode.BULK_INDI)
.build();
new Runner(options).run();
}
}
| |
/* ###
* IP: GHIDRA
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.bin.format.pe.cli.streams;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import ghidra.app.util.bin.BinaryReader;
import ghidra.app.util.bin.format.pe.NTHeader;
import ghidra.app.util.bin.format.pe.PeUtils;
import ghidra.app.util.bin.format.pe.cli.CliStreamHeader;
import ghidra.app.util.bin.format.pe.cli.tables.*;
import ghidra.app.util.importer.MessageLog;
import ghidra.program.model.address.Address;
import ghidra.program.model.data.*;
import ghidra.program.model.listing.BookmarkType;
import ghidra.program.model.listing.Program;
import ghidra.util.Msg;
import ghidra.util.exception.DuplicateNameException;
import ghidra.util.task.TaskMonitor;
/**
* The Metadata stream is giant and complicated. It is made up of {@link CliAbstractTable}s.
*
* @see CliTypeTable
*/
public class CliStreamMetadata extends CliAbstractStream {
private byte majorVersion;
private byte minorVersion;
private byte heapSizes;
private long valid;
private long sorted;
private HashMap<CliTypeTable, Integer> rows;
private ArrayList<CliAbstractTable> tables = new ArrayList<>();
private CliStreamGuid guidStream;
private CliStreamUserStrings userStringsStream;
private CliStreamStrings stringsStream;
private CliStreamBlob blobStream;
/**
* Gets the name of this stream.
*
* @return The name of this stream.
*/
public static String getName() {
return "#~";
}
/**
* Creates a new Metadata stream.
*
* @param header The stream header associated with this stream.
* @param guidStream The GUID stream.
* @param userStringsStream The user strings stream.
* @param stringsStream The strings stream.
* @param blobStream The blob stream.
* @param fileOffset The file offset where this stream starts.
* @param rva The relative virtual address where this stream starts.
* @param reader A reader that is set to the start of the stream.
* @throws IOException if there is a problem reading the stream.
*/
public CliStreamMetadata(CliStreamHeader header, CliStreamGuid guidStream,
CliStreamUserStrings userStringsStream, CliStreamStrings stringsStream,
CliStreamBlob blobStream, long fileOffset, int rva, BinaryReader reader)
throws IOException {
super(header, fileOffset, rva, reader);
this.rows = new HashMap<>();
this.tables = new ArrayList<>();
this.guidStream = guidStream;
this.userStringsStream = userStringsStream;
this.stringsStream = stringsStream;
this.blobStream = blobStream;
}
@Override
public boolean parse() throws IOException {
reader.setPointerIndex(offset);
reader.readNextInt(); // reserved
majorVersion = reader.readNextByte();
minorVersion = reader.readNextByte();
heapSizes = reader.readNextByte();
reader.readNextByte(); // reserved
valid = reader.readNextLong();
sorted = reader.readNextLong();
// Next is an array of n 4-byte unsigned integers indicating the number of rows for each present table
for (int i = 0; i < 64; i++) {
if ((valid & (1L << i)) != 0) {
CliTypeTable tableType = CliTypeTable.fromId(i);
if (tableType != null) {
rows.put(tableType, reader.readNextInt());
}
else {
Msg.warn(this, "CLI metadata table with id " + i + " is not supported");
}
}
}
// Now the tables follow directly after
for (int i = 0; i < 64; i++) {
if ((valid & (1L << i)) != 0) {
CliTypeTable tableType = CliTypeTable.fromId(i);
if (tableType != null) {
long origIndex = reader.getPointerIndex();
CliAbstractTable table = createTableObject(tableType);
tables.add(table);
reader.setPointerIndex(origIndex + table.toDataType().getLength());
}
}
}
return true;
}
/**
* Gets the GUID stream.
*
* @return The GUID stream. Could be null if one doesn't exist.
*/
public CliStreamGuid getGuidStream() {
return guidStream;
}
/**
* Gets the user strings stream.
*
* @return The user strings stream. Could be null if one doesn't exist.
*/
public CliStreamUserStrings getUserStringsStream() {
return userStringsStream;
}
/**
* Gets the strings stream.
*
* @return The strings stream. Could be null if one doesn't exist.
*/
public CliStreamStrings getStringsStream() {
return stringsStream;
}
/**
* Gets the blob stream.
*
* @return The blob stream. Could be null if one doesn't exist.
*/
public CliStreamBlob getBlobStream() {
return blobStream;
}
/**
* Creates a new {@link CliAbstractTable} from the table at the current reader index
* with the given table type.
*
* @param tableType The type of table to create.
* @return A new table with the given type. Could be null if we don't support the table type.
* @throws IOException if there was an issue reading the new table.
*/
private CliAbstractTable createTableObject(CliTypeTable tableType) throws IOException {
switch (tableType) {
case Module:
return new CliTableModule(reader, this, tableType);
case TypeRef:
return new CliTableTypeRef(reader, this, tableType);
case TypeDef:
return new CliTableTypeDef(reader, this, tableType);
case Field:
return new CliTableField(reader, this, tableType);
case MethodDef:
return new CliTableMethodDef(reader, this, tableType);
case Param:
return new CliTableParam(reader, this, tableType);
case InterfaceImpl:
return new CliTableInterfaceImpl(reader, this, tableType);
case MemberRef:
return new CliTableMemberRef(reader, this, tableType);
case Constant:
return new CliTableConstant(reader, this, tableType);
case CustomAttribute:
return new CliTableCustomAttribute(reader, this, tableType);
case FieldMarshal:
return new CliTableFieldMarshall(reader, this, tableType);
case DeclSecurity:
return new CliTableDeclSecurity(reader, this, tableType);
case ClassLayout:
return new CliTableClassLayout(reader, this, tableType);
case FieldLayout:
return new CliTableFieldLayout(reader, this, tableType);
case StandAloneSig:
return new CliTableStandAloneSig(reader, this, tableType);
case EventMap:
return new CliTableEventMap(reader, this, tableType);
case Event:
return new CliTableEvent(reader, this, tableType);
case PropertyMap:
return new CliTablePropertyMap(reader, this, tableType);
case Property:
return new CliTableProperty(reader, this, tableType);
case MethodSemantics:
return new CliTableMethodSemantics(reader, this, tableType);
case MethodImpl:
return new CliTableMethodImpl(reader, this, tableType);
case ModuleRef:
return new CliTableModuleRef(reader, this, tableType);
case TypeSpec:
return new CliTableTypeSpec(reader, this, tableType);
case ImplMap:
return new CliTableImplMap(reader, this, tableType);
case FieldRVA:
return new CliTableFieldRVA(reader, this, tableType);
case Assembly:
return new CliTableAssembly(reader, this, tableType);
case AssemblyProcessor:
return new CliTableAssemblyProcessor(reader, this, tableType);
case AssemblyOS:
return new CliTableAssemblyOS(reader, this, tableType);
case AssemblyRef:
return new CliTableAssemblyRef(reader, this, tableType);
case AssemblyRefProcessor:
return new CliTableAssemblyRefProcessor(reader, this, tableType);
case AssemblyRefOS:
return new CliTableAssemblyRefOS(reader, this, tableType);
case File:
return new CliTableFile(reader, this, tableType);
case ExportedType:
return new CliTableExportedType(reader, this, tableType);
case ManifestResource:
return new CliTableManifestResource(reader, this, tableType);
case NestedClass:
return new CliTableNestedClass(reader, this, tableType);
case GenericParam:
return new CliTableGenericParam(reader, this, tableType);
case MethodSpec:
return new CliTableMethodSpec(reader, this, tableType);
case GenericParamConstraint:
return new CliTableGenericParamConstraint(reader, this, tableType);
default:
Msg.warn(this,
"Parsing table type \"" + tableType.toString() + "\" is not supported.");
return null;
}
}
/**
* Gets the major version.
*
* @return The major version.
*/
public short getMajorVersion() {
return majorVersion;
}
/**
* Gets the minor version.
*
* @return The minor version.
*/
public short getMinorVersion() {
return minorVersion;
}
/**
* Gets the sorted field.
*
* @return The sorted field.
*/
public long getSorted() {
return sorted;
}
/**
* Gets the valid field.
*
* @return The valid field.
*/
public long getValid() {
return valid;
}
/**
* Gets the table with the provided table type from the metadata stream.
*
* @param tableType The type of table to get.
* @return The table with the provided table type. Could be null if it doesn't exist.
*/
public CliAbstractTable getTable(CliTypeTable tableType) {
// Make sure it is present
if (!isTablePresent(tableType)) {
return null;
}
// Get the already-created table
int tableIndex = getPresentTableIndex(tableType);
if (tableIndex < tables.size()) {
CliAbstractTable tableObj = tables.get(tableIndex);
if (tableObj.getTableType() == tableType) {
return tableObj;
}
}
return null;
}
/**
* Gets the table with the provided table type id from the metadata stream.
*
* @param tableId The id of the table type to get.
* @return The table with the provided table id. Could be null if it doesn't exist.
*/
public CliAbstractTable getTable(int tableId) {
return getTable(CliTypeTable.fromId(tableId));
}
/**
* Gets the number of rows in the table with the given table type.
*
* @param tableType The type of table to get the number of rows of.
* @return The number of rows in the table with the given table type. Could be 0 if
* the table of the given type was not found.
*/
public int getNumberRowsForTable(CliTypeTable tableType) {
Integer ret = rows.get(tableType);
return (ret != null) ? ret : 0;
}
/**
* Gets the data type of the index into the string stream. Will be either
* {@link DWordDataType} or {@link WordDataType}.
*
* @return The data type of the index into the string stream.
*/
public DataType getStringIndexDataType() {
return ((heapSizes & 0x01) != 0) ? DWordDataType.dataType : WordDataType.dataType;
}
/**
* Gets the data type of the index into the GUID stream. Will be either
* {@link DWordDataType} or {@link WordDataType}.
*
* @return The data type of the index into the string stream.
*/
public DataType getGuidIndexDataType() {
return ((heapSizes & 0x02) != 0) ? DWordDataType.dataType : WordDataType.dataType;
}
/**
* Gets the data type of the index into the Blob stream. Will be either
* {@link DWordDataType} or {@link WordDataType}.
*
* @return The data type of the index into the string stream.
*/
public DataType getBlobIndexDataType() {
return ((heapSizes & 0x04) != 0) ? DWordDataType.dataType : WordDataType.dataType;
}
/**
* Gets the data type of the index into a metadata table. Will be either
* {@link DWordDataType} or {@link WordDataType}.
*
* @return The data type of the index into the string stream.
*/
public DataType getTableIndexDataType(CliTypeTable table) {
return (getNumberRowsForTable(table) >= (1 << 16)) ? DWordDataType.dataType
: WordDataType.dataType;
}
@Override
public void markup(Program program, boolean isBinary, TaskMonitor monitor, MessageLog log,
NTHeader ntHeader) throws DuplicateNameException, IOException {
super.markup(program, isBinary, monitor, log, ntHeader);
for (CliAbstractTable table : tables) {
try {
Address addr = PeUtils.getMarkupAddress(program, isBinary, ntHeader,
rva + getTableOffset(table.getTableType()));
program.getBookmarkManager()
.setBookmark(addr, BookmarkType.INFO, "CLI Table", table.toString());
table.markup(program, isBinary, monitor, log, ntHeader);
}
catch (Exception e) {
Msg.error(this, "Failed to markup " + table + ": " + e.getMessage());
}
}
}
@Override
public DataType toDataType() {
Structure struct = new StructureDataType(new CategoryPath(PATH), header.getName(), 0);
struct.add(DWORD, "Reserved", "Always 0");
struct.add(BYTE, "MajorVersion", null);
struct.add(BYTE, "MinorVersion", null);
struct.add(BYTE, "HeapSizes", "Bit vector for heap sizes");
struct.add(BYTE, "Reserved", "Always 1");
struct.add(QWORD, "Valid", "Bit vector of present tables");
struct.add(QWORD, "Sorted", "Bit vector of sorted tables");
struct.add(new ArrayDataType(DWORD, Long.bitCount(valid), DWORD.getLength()), "Rows",
"# of rows for each corresponding present table");
for (CliAbstractTable table : tables) {
struct.add(table.toDataType(), table.toString(),
"CLI Metadata Table: " + table.toString());
}
return struct;
}
private boolean isTablePresent(CliTypeTable tableType) {
return ((valid & (1L << tableType.id())) != 0);
}
private int getTableOffset(CliTypeTable table) {
StructureDataType struct = (StructureDataType) this.toDataType();
int structOffset = 8; // Struct offset (0-indexed) of first metadata table
structOffset += getPresentTableIndex(table);
return struct.getComponent(structOffset).getOffset();
}
private int getPresentTableIndex(CliTypeTable table) {
int tableId = table.id();
long mask = valid & ((1L << tableId) - 1); // mask tables that come after this one. Start with all present tables, 0 out any that are after tableId.
int tablesBefore = Long.bitCount(mask);
return tablesBefore;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.serveraction.upgrades;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import java.lang.reflect.Field;
import java.util.HashMap;
import java.util.Map;
import org.apache.ambari.server.actionmanager.ExecutionCommandWrapper;
import org.apache.ambari.server.actionmanager.HostRoleCommand;
import org.apache.ambari.server.agent.CommandReport;
import org.apache.ambari.server.agent.ExecutionCommand;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Config;
import org.easymock.EasyMock;
import org.junit.Before;
import org.junit.Test;
import com.google.inject.Injector;
/**
* Test FixYarnWebServiceUrl logic
*/
public class FixYarnWebServiceUrlTest {
private Injector injector;
private Clusters clusters;
private Cluster cluster;
private Field clustersField;
private static final String SOURCE_CONFIG_TYPE = "yarn-site";
private static final String YARN_TIMELINE_WEBAPP_HTTPADDRESS = "yarn.timeline-service.webapp.address";
private static final String YARN_TIMELINE_WEBAPP_HTTPSADDRESS = "yarn.timeline-service.webapp.https.address";
private static final String YARN_LOGSERVER_WEBSERVICE_URL = "yarn.log.server.web-service.url";
private static final String YARN_HTTP_POLICY = "yarn.http.policy";
@Before
public void setup() throws Exception {
injector = EasyMock.createMock(Injector.class);
clusters = EasyMock.createMock(Clusters.class);
cluster = EasyMock.createMock(Cluster.class);
clustersField = FixYarnWebServiceUrl.class.getDeclaredField("clusters");
clustersField.setAccessible(true);
expect(clusters.getCluster((String) anyObject())).andReturn(cluster).anyTimes();
expect(injector.getInstance(Clusters.class)).andReturn(clusters).atLeastOnce();
replay(injector, clusters);
}
/**
* Test when http policy is set to HTTP_ONLY
* @throws Exception
*/
@Test
public void testHttpOnly() throws Exception {
Map<String, String> mockProperties = new HashMap<String, String>() {{
put(YARN_TIMELINE_WEBAPP_HTTPADDRESS, "c6401.ambari.apache.org:8188");
put(YARN_TIMELINE_WEBAPP_HTTPSADDRESS, "c6401.ambari.apache.org:8190");
put(YARN_HTTP_POLICY, "HTTP_ONLY");
put(YARN_LOGSERVER_WEBSERVICE_URL, "http://localhost:8188/ws/v1/applicationhistory");
}};
Config yarnSiteConfig = EasyMock.createNiceMock(Config.class);
expect(yarnSiteConfig.getType()).andReturn("yarn-site").anyTimes();
expect(yarnSiteConfig.getProperties()).andReturn(mockProperties).anyTimes();
expect(cluster.getDesiredConfigByType(SOURCE_CONFIG_TYPE)).andReturn(yarnSiteConfig).atLeastOnce();
Map<String, String> commandParams = new HashMap<String, String>();
commandParams.put("clusterName", "c1");
ExecutionCommand executionCommand = new ExecutionCommand();
executionCommand.setCommandParams(commandParams);
executionCommand.setClusterName("c1");
HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
expect(hrc.getRequestId()).andReturn(1L).anyTimes();
expect(hrc.getStageId()).andReturn(2L).anyTimes();
expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
replay(cluster, hrc,yarnSiteConfig);
FixYarnWebServiceUrl action = new FixYarnWebServiceUrl();
clustersField.set(action, clusters);
action.setExecutionCommand(executionCommand);
action.setHostRoleCommand(hrc);
CommandReport report = action.execute(null);
assertNotNull(report);
Cluster c = clusters.getCluster("c1");
Config desiredYarnSiteConfig = c.getDesiredConfigByType(SOURCE_CONFIG_TYPE);
Map<String, String> yarnSiteConfigMap = desiredYarnSiteConfig.getProperties();
assertTrue(yarnSiteConfigMap.containsKey(YARN_LOGSERVER_WEBSERVICE_URL));
assertTrue(yarnSiteConfigMap.containsKey(YARN_HTTP_POLICY));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPADDRESS));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPSADDRESS));
String yarnLogServerWebServiceUrl = yarnSiteConfigMap.get(YARN_LOGSERVER_WEBSERVICE_URL);
assertEquals("http://c6401.ambari.apache.org:8188/ws/v1/applicationhistory", yarnLogServerWebServiceUrl);
}
/**
* Test when http policy is set to HTTPS_ONLY
* @throws Exception
*/
@Test
public void testHttpsOnly() throws Exception {
Map<String, String> mockProperties = new HashMap<String, String>() {{
put(YARN_TIMELINE_WEBAPP_HTTPADDRESS, "c6401.ambari.apache.org:8188");
put(YARN_TIMELINE_WEBAPP_HTTPSADDRESS, "c6401.ambari.apache.org:8190");
put(YARN_HTTP_POLICY, "HTTPS_ONLY");
put(YARN_LOGSERVER_WEBSERVICE_URL, "http://localhost:8188/ws/v1/applicationhistory");
}};
Config yarnSiteConfig = EasyMock.createNiceMock(Config.class);
expect(yarnSiteConfig.getType()).andReturn("yarn-site").anyTimes();
expect(yarnSiteConfig.getProperties()).andReturn(mockProperties).anyTimes();
expect(cluster.getDesiredConfigByType(SOURCE_CONFIG_TYPE)).andReturn(yarnSiteConfig).atLeastOnce();
Map<String, String> commandParams = new HashMap<String, String>();
commandParams.put("clusterName", "c1");
ExecutionCommand executionCommand = new ExecutionCommand();
executionCommand.setCommandParams(commandParams);
executionCommand.setClusterName("c1");
HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
expect(hrc.getRequestId()).andReturn(1L).anyTimes();
expect(hrc.getStageId()).andReturn(2L).anyTimes();
expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
replay(cluster, hrc,yarnSiteConfig);
FixYarnWebServiceUrl action = new FixYarnWebServiceUrl();
clustersField.set(action, clusters);
action.setExecutionCommand(executionCommand);
action.setHostRoleCommand(hrc);
CommandReport report = action.execute(null);
assertNotNull(report);
Cluster c = clusters.getCluster("c1");
Config desiredYarnSiteConfig = c.getDesiredConfigByType(SOURCE_CONFIG_TYPE);
Map<String, String> yarnSiteConfigMap = desiredYarnSiteConfig.getProperties();
assertTrue(yarnSiteConfigMap.containsKey(YARN_LOGSERVER_WEBSERVICE_URL));
assertTrue(yarnSiteConfigMap.containsKey(YARN_HTTP_POLICY));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPADDRESS));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPSADDRESS));
String yarnLogServerWebServiceUrl = yarnSiteConfigMap.get(YARN_LOGSERVER_WEBSERVICE_URL);
assertEquals("https://c6401.ambari.apache.org:8190/ws/v1/applicationhistory", yarnLogServerWebServiceUrl);
}
/**
* Test when http policy is set to incorrect value
* @throws Exception
*/
@Test
public void testIncorrectValue() throws Exception {
Map<String, String> mockProperties = new HashMap<String, String>() {{
put(YARN_TIMELINE_WEBAPP_HTTPADDRESS, "c6401.ambari.apache.org:8188");
put(YARN_TIMELINE_WEBAPP_HTTPSADDRESS, "c6401.ambari.apache.org:8190");
put(YARN_HTTP_POLICY, "abc");
put(YARN_LOGSERVER_WEBSERVICE_URL, "http://localhost:8188/ws/v1/applicationhistory");
}};
Config yarnSiteConfig = EasyMock.createNiceMock(Config.class);
expect(yarnSiteConfig.getType()).andReturn("yarn-site").anyTimes();
expect(yarnSiteConfig.getProperties()).andReturn(mockProperties).anyTimes();
expect(cluster.getDesiredConfigByType(SOURCE_CONFIG_TYPE)).andReturn(yarnSiteConfig).atLeastOnce();
Map<String, String> commandParams = new HashMap<String, String>();
commandParams.put("clusterName", "c1");
ExecutionCommand executionCommand = new ExecutionCommand();
executionCommand.setCommandParams(commandParams);
executionCommand.setClusterName("c1");
HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
expect(hrc.getRequestId()).andReturn(1L).anyTimes();
expect(hrc.getStageId()).andReturn(2L).anyTimes();
expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
replay(cluster, hrc,yarnSiteConfig);
FixYarnWebServiceUrl action = new FixYarnWebServiceUrl();
clustersField.set(action, clusters);
action.setExecutionCommand(executionCommand);
action.setHostRoleCommand(hrc);
CommandReport report = action.execute(null);
assertNotNull(report);
Cluster c = clusters.getCluster("c1");
Config desiredYarnSiteConfig = c.getDesiredConfigByType(SOURCE_CONFIG_TYPE);
Map<String, String> yarnSiteConfigMap = desiredYarnSiteConfig.getProperties();
assertTrue(yarnSiteConfigMap.containsKey(YARN_LOGSERVER_WEBSERVICE_URL));
assertTrue(yarnSiteConfigMap.containsKey(YARN_HTTP_POLICY));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPADDRESS));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPSADDRESS));
String yarnLogServerWebServiceUrl = yarnSiteConfigMap.get(YARN_LOGSERVER_WEBSERVICE_URL);
assertEquals("http://localhost:8188/ws/v1/applicationhistory",yarnLogServerWebServiceUrl);
assertEquals(SOURCE_CONFIG_TYPE +"/" + YARN_HTTP_POLICY + " property contains an invalid value. It should be from [HTTP_ONLY,HTTPS_ONLY]", report.getStdOut());
}
/**
* Test when some values are null
* @throws Exception
*/
@Test
public void testNullValues() throws Exception {
Map<String, String> mockProperties = new HashMap<String, String>() {{
put(YARN_TIMELINE_WEBAPP_HTTPADDRESS, null);
put(YARN_TIMELINE_WEBAPP_HTTPSADDRESS, "c6401.ambari.apache.org:8190");
put(YARN_HTTP_POLICY, null);
put(YARN_LOGSERVER_WEBSERVICE_URL, "http://localhost:8188/ws/v1/applicationhistory");
}};
Config yarnSiteConfig = EasyMock.createNiceMock(Config.class);
expect(yarnSiteConfig.getType()).andReturn("yarn-site").anyTimes();
expect(yarnSiteConfig.getProperties()).andReturn(mockProperties).anyTimes();
expect(cluster.getDesiredConfigByType(SOURCE_CONFIG_TYPE)).andReturn(yarnSiteConfig).atLeastOnce();
Map<String, String> commandParams = new HashMap<String, String>();
commandParams.put("clusterName", "c1");
ExecutionCommand executionCommand = new ExecutionCommand();
executionCommand.setCommandParams(commandParams);
executionCommand.setClusterName("c1");
HostRoleCommand hrc = EasyMock.createMock(HostRoleCommand.class);
expect(hrc.getRequestId()).andReturn(1L).anyTimes();
expect(hrc.getStageId()).andReturn(2L).anyTimes();
expect(hrc.getExecutionCommandWrapper()).andReturn(new ExecutionCommandWrapper(executionCommand)).anyTimes();
replay(cluster, hrc,yarnSiteConfig);
FixYarnWebServiceUrl action = new FixYarnWebServiceUrl();
clustersField.set(action, clusters);
action.setExecutionCommand(executionCommand);
action.setHostRoleCommand(hrc);
CommandReport report = action.execute(null);
assertNotNull(report);
Cluster c = clusters.getCluster("c1");
Config desiredYarnSiteConfig = c.getDesiredConfigByType(SOURCE_CONFIG_TYPE);
Map<String, String> yarnSiteConfigMap = yarnSiteConfig.getProperties();
yarnSiteConfigMap.put(YARN_TIMELINE_WEBAPP_HTTPADDRESS, "");
assertTrue(yarnSiteConfigMap.containsKey(YARN_LOGSERVER_WEBSERVICE_URL));
assertTrue(yarnSiteConfigMap.containsKey(YARN_HTTP_POLICY));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPADDRESS));
assertTrue(yarnSiteConfigMap.containsKey(YARN_TIMELINE_WEBAPP_HTTPSADDRESS));
String yarnLogServerWebServiceUrl = yarnSiteConfigMap.get(YARN_LOGSERVER_WEBSERVICE_URL);
assertEquals("http://localhost:8188/ws/v1/applicationhistory", yarnLogServerWebServiceUrl);
assertEquals(SOURCE_CONFIG_TYPE + "/" +YARN_HTTP_POLICY +" property is null", report.getStdOut());
}
}
| |
/*
Licensed to Diennea S.r.l. under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Diennea S.r.l. licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package herddb.index;
import herddb.core.PostCheckpointAction;
import herddb.log.LogSequenceNumber;
import herddb.model.InvalidNullValueForKeyException;
import herddb.model.StatementEvaluationContext;
import herddb.model.StatementExecutionException;
import herddb.model.TableContext;
import herddb.sql.SQLRecordKeyFunction;
import herddb.storage.DataStorageManagerException;
import herddb.utils.BooleanHolder;
import herddb.utils.Bytes;
import java.util.AbstractMap;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Predicate;
import java.util.stream.Stream;
/**
* Implementation of KeyToPageIndex which uses any ConcurrentMap
*
* @author enrico.olivelli
*/
public class ConcurrentMapKeyToPageIndex implements KeyToPageIndex {
private final ConcurrentMap<Bytes, Long> map;
private final AtomicLong usedMemory = new AtomicLong();
// assume that an entry holds 24 bytes (a Long pointer + long value + extra overhead)
private static final long ENTRY_OVERHEAD = 8 + 8 + 8 + 8 + 8;
public ConcurrentMapKeyToPageIndex(ConcurrentMap<Bytes, Long> map) {
this.map = map;
this.map.keySet().forEach(this::keyAdded);
}
public ConcurrentMap<Bytes, Long> getMap() {
return map;
}
@Override
public long size() {
return map.size();
}
@Override
public void put(Bytes key, Long currentPage) {
Long res = map.put(key, currentPage);
if (res == null) {
keyAdded(key);
}
}
@Override
public boolean put(Bytes key, Long newPage, Long expectedPage) {
if (expectedPage == null) {
final Long opage = map.putIfAbsent(key, newPage);
if (opage == null) {
keyAdded(key);
return true;
}
return false;
} else {
/*
* We need to keep track if the update was really done. Reading computeIfPresent result won't
* suffice, it can be equal to newPage even if no replacement was done (the map contained already
* newPage mapping and expectedPage was different)
*/
BooleanHolder holder = new BooleanHolder(false);
map.computeIfPresent(key, (skey, spage) -> {
if (spage.equals(expectedPage)) {
holder.value = true;
return newPage;
}
return spage;
});
if (holder.value) {
keyAdded(key);
return true;
}
return false;
}
}
private void keyAdded(Bytes key) {
usedMemory.addAndGet(key.getLength() + ENTRY_OVERHEAD);
}
private void keyRemoved(Bytes key) {
usedMemory.addAndGet(-key.getLength() - ENTRY_OVERHEAD);
}
@Override
public boolean containsKey(Bytes key) {
return map.containsKey(key);
}
@Override
public Long get(Bytes key) {
return map.get(key);
}
@Override
public Long remove(Bytes key) {
Long res = map.remove(key);
if (res != null) {
keyRemoved(key);
}
return res;
}
@Override
public boolean isSortedAscending(int[] pkTypes) {
return false;
}
@Override
public Stream<Map.Entry<Bytes, Long>> scanner(IndexOperation operation, StatementEvaluationContext context, TableContext tableContext, herddb.core.AbstractIndexManager index) throws DataStorageManagerException {
if (operation instanceof PrimaryIndexSeek) {
PrimaryIndexSeek seek = (PrimaryIndexSeek) operation;
byte[] seekValue;
try {
seekValue = seek.value.computeNewValue(null, context, tableContext);
} catch (InvalidNullValueForKeyException nullKey) {
seekValue = null;
}
if (seekValue == null) {
return Stream.empty();
}
Bytes key = Bytes.from_array(seekValue);
Long pageId = map.get(key);
if (pageId == null) {
return Stream.empty();
}
return Stream.of(new AbstractMap.SimpleImmutableEntry<>(key, pageId));
}
// Remember that the IndexOperation can return more records
// every predicate (WHEREs...) will always be evaluated anyway on every record, in order to guarantee correctness
if (index != null) {
return index.recordSetScanner(operation, context, tableContext, this);
}
if (operation == null) {
Stream<Map.Entry<Bytes, Long>> baseStream = map.entrySet().stream();
return baseStream;
} else if (operation instanceof PrimaryIndexPrefixScan) {
PrimaryIndexPrefixScan scan = (PrimaryIndexPrefixScan) operation;
byte[] prefix;
try {
prefix = scan.value.computeNewValue(null, context, tableContext);
} catch (InvalidNullValueForKeyException err) {
return Stream.empty();
} catch (StatementExecutionException err) {
throw new RuntimeException(err);
}
Predicate<Map.Entry<Bytes, Long>> predicate = (Map.Entry<Bytes, Long> t) -> {
Bytes fullrecordKey = t.getKey();
return fullrecordKey.startsWith(prefix.length, prefix);
};
Stream<Map.Entry<Bytes, Long>> baseStream = map.entrySet().stream();
return baseStream.filter(predicate);
} else if (operation instanceof PrimaryIndexRangeScan) {
Bytes refminvalue;
PrimaryIndexRangeScan sis = (PrimaryIndexRangeScan) operation;
SQLRecordKeyFunction minKey = sis.minValue;
if (minKey != null) {
refminvalue = Bytes.from_nullable_array(minKey.computeNewValue(null, context, tableContext));
} else {
refminvalue = null;
}
Bytes refmaxvalue;
SQLRecordKeyFunction maxKey = sis.maxValue;
if (maxKey != null) {
refmaxvalue = Bytes.from_nullable_array(maxKey.computeNewValue(null, context, tableContext));
} else {
refmaxvalue = null;
}
Predicate<Map.Entry<Bytes, Long>> predicate;
if (refminvalue != null && refmaxvalue == null) {
predicate = (Map.Entry<Bytes, Long> entry) -> {
Bytes datum = entry.getKey();
return datum.compareTo(refminvalue) >= 0;
};
} else if (refminvalue == null && refmaxvalue != null) {
predicate = (Map.Entry<Bytes, Long> entry) -> {
Bytes datum = entry.getKey();
return datum.compareTo(refmaxvalue) <= 0;
};
} else if (refminvalue != null && refmaxvalue != null) {
predicate = (Map.Entry<Bytes, Long> entry) -> {
Bytes datum = entry.getKey();
return datum.compareTo(refmaxvalue) <= 0
&& datum.compareTo(refminvalue) >= 0;
};
} else {
predicate = (Map.Entry<Bytes, Long> entry) -> {
return true;
};
}
Stream<Map.Entry<Bytes, Long>> baseStream = map.entrySet().stream();
return baseStream.filter(predicate);
} else {
throw new DataStorageManagerException("operation " + operation + " not implemented on " + this.getClass());
}
}
@Override
public void close() {
map.clear();
usedMemory.set(0);
}
@Override
public void truncate() {
map.clear();
usedMemory.set(0);
}
@Override
public void dropData() {
truncate();
}
@Override
public long getUsedMemory() {
return usedMemory.get();
}
@Override
public boolean requireLoadAtStartup() {
/* Require a full table scan at startup */
return true;
}
@Override
public List<PostCheckpointAction> checkpoint(LogSequenceNumber sequenceNumber, boolean pin) throws DataStorageManagerException {
/* No checkpoint, isn't persisted */
return Collections.emptyList();
}
@Override
public void unpinCheckpoint(LogSequenceNumber sequenceNumber) throws DataStorageManagerException {
/* No checkpoint, isn't persisted */
}
@Override
public void start(LogSequenceNumber sequenceNumber, boolean created) throws DataStorageManagerException {
/* No work needed, this implementation require a full table scan at startup instead */
}
}
| |
/*
* DialogEditor.java
*
*
* Copyright 2006-2015 James F. Bowring and www.Earth-Time.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.earthtime.UPb_Redux.dialogs;
import java.awt.Dimension;
import java.awt.Font;
import java.awt.Toolkit;
import java.awt.event.ActionEvent;
import java.awt.event.FocusEvent;
import java.awt.event.FocusListener;
import java.math.BigDecimal;
import javax.swing.AbstractAction;
import javax.swing.JDialog;
import javax.swing.JTextField;
import javax.swing.KeyStroke;
import javax.swing.event.UndoableEditEvent;
import javax.swing.event.UndoableEditListener;
import javax.swing.text.AttributeSet;
import javax.swing.text.BadLocationException;
import javax.swing.text.JTextComponent;
import javax.swing.text.PlainDocument;
import javax.swing.undo.CannotRedoException;
import javax.swing.undo.CannotUndoException;
import javax.swing.undo.UndoManager;
import org.earthtime.UPb_Redux.ReduxConstants;
import org.earthtime.UPb_Redux.utilities.BrowserControl;
/**
*
* @author James F. Bowring
*/
public abstract class DialogEditor extends JDialog {
/**
*
*/
public static boolean amOpen = false;
private final static Font myScienceFont = new Font("Calibri"//Arial"
/*
* "Monospaced"
*/, Font.PLAIN, 13);
/**
* Creates new form DialogEditor
*
* @param parent
* @param modal
*/
public DialogEditor(java.awt.Frame parent, boolean modal) {
super(parent, modal);
JDialog.setDefaultLookAndFeelDecorated(true);
initComponents();
}
/**
*
* @param preferredWidth
* @param preferredHeight
*/
protected void setSizeAndCenter(int preferredWidth, int preferredHeight) {
preferredHeight += (BrowserControl.isMacOS() ? 0 : 25);
super.setSize(preferredWidth, preferredHeight);
super.setPreferredSize(new Dimension(preferredWidth, preferredHeight));
//Get the screen size
Toolkit toolkit = Toolkit.getDefaultToolkit();
Dimension screenSize = toolkit.getScreenSize();
//Calculate the frame location
int x = (screenSize.width - getWidth()) / 2;
int y = (screenSize.height - getHeight()) / 2;
//Set the new frame location centered
setLocation(x, y);
}
/**
*
*/
public static class UnDoAbleDocument extends PlainDocument {
/**
*
*/
public final UndoManager undo = new UndoManager();
private JTextComponent textComp;
/**
*
* @param textComp
* @param editable
*/
public UnDoAbleDocument(JTextComponent textComp, boolean editable) {
this(textComp);
textComp.setEditable(editable);
if (editable) {
textComp.setBackground(ReduxConstants.myEditingWhiteColor);
} else {
textComp.setBackground(ReduxConstants.myNotEditingGreyColor);
}
textComp.addFocusListener(new SelectTextFocusListener());
}
/**
*
* @param textComp
*/
public UnDoAbleDocument(JTextComponent textComp) {
super();
this.textComp = textComp;
// http://javaalmanac.com/egs/javax.swing.undo/UndoText.html
// Listen for undo and redo events
addUndoableEditListener(new UndoableEditListener() {
@Override
public void undoableEditHappened(UndoableEditEvent evt) {
undo.addEdit(evt.getEdit());
}
});
// Create an undo action and add it to the text component
textComp.getActionMap().put("Undo",
new AbstractAction("Undo") {
@Override
public void actionPerformed(ActionEvent evt) {
try {
if (undo.canUndo()) {
undo.undo();
}
} catch (CannotUndoException e) {
}
}
});
// Bind the undo action to ctl-Z
textComp.getInputMap().put(KeyStroke.getKeyStroke("control Z"), "Undo");
// Create a redo action and add it to the text component
textComp.getActionMap().put("Redo",
new AbstractAction("Redo") {
@Override
public void actionPerformed(ActionEvent evt) {
try {
if (undo.canRedo()) {
undo.redo();
}
} catch (CannotRedoException e) {
}
}
});
// Bind the redo action to ctl-Y
textComp.getInputMap().put(KeyStroke.getKeyStroke("control Y"), "Redo");
}
/**
*
*/
public void unFill() {
try {
if (undo.canUndo()) {
undo.undo();
undo.undo();
}
} catch (CannotUndoException e) {
}
}
/**
* @return the textComp
*/
public JTextComponent getTextComp() {
return textComp;
}
/**
* @param textComp the textComp to set
*/
public void setTextComp(JTextComponent textComp) {
this.textComp = textComp;
}
}
//http://java.sun.com/developer/JDCTechTips/2001/tt1120.html
/**
*
*/
public static class DoubleDocument extends UnDoAbleDocument {
double maxValue;
private JTextField textF;
/**
*
* @param textF
*/
public DoubleDocument(JTextField textF) {
super(textF, true);
textF.setFont(myScienceFont);
maxValue = 0.0;
}
/**
*
* @param textF
* @param editable
*/
public DoubleDocument(JTextField textF, boolean editable) {
super(textF, editable);
textF.setFont(myScienceFont);
maxValue = 0.0;
}
// may 2010 to handle trapping maximum dates
/**
*
* @param textF
* @param maxValue
* @param editable
*/
public DoubleDocument(JTextField textF, double maxValue, boolean editable) {
super(textF, editable);
this.textF = textF;
textF.setFont(myScienceFont);
this.maxValue = maxValue;
}
/**
*
* @param offset
* @param string
* @param attributes
* @throws BadLocationException
*/
@Override
public void insertString(int offset,
String string, AttributeSet attributes)
throws BadLocationException {
if (string != null) {
String newValue;
int length = getLength();
if (length == 0) {
newValue = string;
} else {
String currentContent
= getText(0, length);
StringBuilder currentBuffer
= new StringBuilder(currentContent);
currentBuffer.insert(offset, string);
newValue = currentBuffer.toString();
}
try {
double tempVal = Double.parseDouble(newValue);
// if ((maxValue > 0.0)){//jan 2014 removed positive constraint && (tempVal > maxValue)) {
// jan 2015 repalced to original
if ((maxValue > 0.0) && (tempVal > maxValue)) {
string = Double.toString(maxValue);
offset = 0;
textF.setText("");
}
super.insertString(offset, string,
attributes);
} catch (NumberFormatException ex) {
Toolkit.getDefaultToolkit().beep();
}
}
}
// public String toString() {
// }
}
/**
*
*/
public static class IntegerDocument extends UnDoAbleDocument {
/**
*
* @param textF
*/
public IntegerDocument(JTextField textF) {
super(textF, true);
textF.setFont(myScienceFont);
}
/**
*
* @param textF
* @param editable
*/
public IntegerDocument(JTextField textF, boolean editable) {
super(textF, editable);
textF.setFont(myScienceFont);
}
/**
*
* @param offset
* @param string
* @param attributes
* @throws BadLocationException
*/
@Override
public void insertString(int offset,
String string, AttributeSet attributes)
throws BadLocationException {
if (string != null) {
String newValue;
int length = getLength();
if (length == 0) {
newValue = string;
} else {
String currentContent
= getText(0, length);
StringBuilder currentBuffer
= new StringBuilder(currentContent);
currentBuffer.insert(offset, string);
newValue = currentBuffer.toString();
}
try {
Integer.parseInt(newValue);
super.insertString(offset, string,
attributes);
} catch (NumberFormatException ex) {
Toolkit.getDefaultToolkit().beep();
}
}
}
// public String toString() {
// }
}
/**
*
*/
public static class BigDecimalDocument extends UnDoAbleDocument {
boolean editable = false;
/**
*
* @param textF
*/
public BigDecimalDocument(JTextField textF) {
this(textF, true);
}
/**
*
* @param textF
* @param editable
*/
public BigDecimalDocument(JTextField textF, boolean editable) {
super(textF, editable);
this.editable = editable;
textF.setFont(myScienceFont);
textF.setCaretPosition(0);
}
/**
*
* @return
*/
public boolean isEditable() {
return editable;
}
/**
*
* @param offset
* @param string
* @param attributes
* @throws BadLocationException
*/
@Override
public void insertString(int offset,
String string, AttributeSet attributes)
throws BadLocationException {
if (string != null) {
// to handle e and E
string = string.toUpperCase();
String newValue;
int length = getLength();
if (length == 0) {
newValue = string;
} else {
String currentContent
= getText(0, length);
StringBuilder currentBuffer
= new StringBuilder(currentContent);
currentBuffer.insert(offset, string);
newValue = currentBuffer.toString();
}
try {
// test for signs and scientific notation by allowing the letter e or E or - or +
if ((newValue.startsWith(".") //
|| newValue.startsWith("-")//
|| newValue.startsWith("+"))) {
if (newValue.length() > 1) {
BigDecimal test = new BigDecimal(newValue, ReduxConstants.mathContext15);
} else {
// do nothing
}
} else if ((newValue.indexOf("E") >= 0 //
|| newValue.indexOf("E-") >= 0//
|| newValue.indexOf("E+") >= 0)) {
if ((newValue.length() - newValue.indexOf("E")) > 2) {
BigDecimal test = new BigDecimal(newValue, ReduxConstants.mathContext15);
} else {
// do nothing
}
} else {
BigDecimal test = new BigDecimal(newValue, ReduxConstants.mathContext15);
}
super.insertString(offset, string,
attributes);
} catch (NumberFormatException ex) {
Toolkit.getDefaultToolkit().beep();
}
}
}
}
/**
*
*/
public static class SelectTextFocusListener implements FocusListener {
/**
* Creates a new instance of SelectTextFocusListener
*/
public SelectTextFocusListener() {
}
/**
*
* @param focusEvent
*/
@Override
public void focusGained(FocusEvent focusEvent) {
// ((JTextComponent) focusEvent.getSource()).setSelectionStart( 0 );
((JTextComponent) focusEvent.getSource()).setCaretPosition(0);
// if ( ((JTextComponent) focusEvent.getSource()).isEditable() ) {
// ((JTextComponent) focusEvent.getSource()).setSelectionEnd(
// ((JTextComponent) focusEvent.getSource()).getText().length() );
// } else {
// ((JTextComponent) focusEvent.getSource()).setSelectionEnd( 0 );
// }
}
/**
*
* @param focusEvent
*/
@Override
public void focusLost(FocusEvent focusEvent) {
// revised nov 2010 to differentiate //added sep 2010 to handle accidentally blanked out number items
JTextComponent temp = ((JTextComponent) focusEvent.getSource());
if (temp.getText().length() == 0) {
if ((temp.getDocument() instanceof DoubleDocument)//
||//
(temp.getDocument() instanceof BigDecimalDocument)) {
temp.setText("0.0");
} else if ((temp.getDocument() instanceof IntegerDocument)) {
temp.setText("0");
}
}
temp.setCaretPosition(0);
}
}
/**
*
* @param width
* @param height
*/
@Override
public void setSize(int width, int height) {
if (BrowserControl.isMacOS()) {
super.setSize(width, height);
}
if (BrowserControl.isWindowsPlatform()) {
super.setSize(width, height + 25);
}
//Get the screen size
Toolkit toolkit = Toolkit.getDefaultToolkit();
Dimension screenSize = toolkit.getScreenSize();
//Calculate the frame location
int x = (screenSize.width - getWidth()) / 2;
int y = (screenSize.height - getHeight()) / 2;
//Set the new frame location
setLocation(x, y);
}
/**
*
*/
public void close() {
setVisible(false);
dispose();
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
// <editor-fold defaultstate="collapsed" desc=" Generated Code ">//GEN-BEGIN:initComponents
private void initComponents () {
setDefaultCloseOperation (javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
org.jdesktop.layout.GroupLayout layout = new org.jdesktop.layout.GroupLayout (getContentPane ());
getContentPane ().setLayout (layout);
layout.setHorizontalGroup (
layout.createParallelGroup (org.jdesktop.layout.GroupLayout.LEADING)
.add (0, 400, Short.MAX_VALUE)
);
layout.setVerticalGroup (
layout.createParallelGroup (org.jdesktop.layout.GroupLayout.LEADING)
.add (0, 300, Short.MAX_VALUE)
);
pack ();
}
// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
// End of variables declaration//GEN-END:variables
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/enums/conversion_value_rule_set_status.proto
package com.google.ads.googleads.v10.enums;
/**
* <pre>
* Container for enum describing possible statuses of a conversion value rule
* set.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum}
*/
public final class ConversionValueRuleSetStatusEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum)
ConversionValueRuleSetStatusEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use ConversionValueRuleSetStatusEnum.newBuilder() to construct.
private ConversionValueRuleSetStatusEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ConversionValueRuleSetStatusEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ConversionValueRuleSetStatusEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ConversionValueRuleSetStatusEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusProto.internal_static_google_ads_googleads_v10_enums_ConversionValueRuleSetStatusEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusProto.internal_static_google_ads_googleads_v10_enums_ConversionValueRuleSetStatusEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.class, com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.Builder.class);
}
/**
* <pre>
* Possible statuses of a conversion value rule set.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.ConversionValueRuleSetStatus}
*/
public enum ConversionValueRuleSetStatus
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* Conversion Value Rule Set is enabled and can be applied.
* </pre>
*
* <code>ENABLED = 2;</code>
*/
ENABLED(2),
/**
* <pre>
* Conversion Value Rule Set is permanently deleted and can't be applied.
* </pre>
*
* <code>REMOVED = 3;</code>
*/
REMOVED(3),
/**
* <pre>
* Conversion Value Rule Set is paused and won't be applied. It can be
* enabled again.
* </pre>
*
* <code>PAUSED = 4;</code>
*/
PAUSED(4),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Not specified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* Used for return value only. Represents value unknown in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* Conversion Value Rule Set is enabled and can be applied.
* </pre>
*
* <code>ENABLED = 2;</code>
*/
public static final int ENABLED_VALUE = 2;
/**
* <pre>
* Conversion Value Rule Set is permanently deleted and can't be applied.
* </pre>
*
* <code>REMOVED = 3;</code>
*/
public static final int REMOVED_VALUE = 3;
/**
* <pre>
* Conversion Value Rule Set is paused and won't be applied. It can be
* enabled again.
* </pre>
*
* <code>PAUSED = 4;</code>
*/
public static final int PAUSED_VALUE = 4;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ConversionValueRuleSetStatus valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ConversionValueRuleSetStatus forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return ENABLED;
case 3: return REMOVED;
case 4: return PAUSED;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ConversionValueRuleSetStatus>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ConversionValueRuleSetStatus> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ConversionValueRuleSetStatus>() {
public ConversionValueRuleSetStatus findValueByNumber(int number) {
return ConversionValueRuleSetStatus.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.getDescriptor().getEnumTypes().get(0);
}
private static final ConversionValueRuleSetStatus[] VALUES = values();
public static ConversionValueRuleSetStatus valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ConversionValueRuleSetStatus(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.ConversionValueRuleSetStatus)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum other = (com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible statuses of a conversion value rule
* set.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum)
com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusProto.internal_static_google_ads_googleads_v10_enums_ConversionValueRuleSetStatusEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusProto.internal_static_google_ads_googleads_v10_enums_ConversionValueRuleSetStatusEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.class, com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusProto.internal_static_google_ads_googleads_v10_enums_ConversionValueRuleSetStatusEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum build() {
com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum buildPartial() {
com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum result = new com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum) {
return mergeFrom((com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum other) {
if (other == com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum)
private static final com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum();
}
public static com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ConversionValueRuleSetStatusEnum>
PARSER = new com.google.protobuf.AbstractParser<ConversionValueRuleSetStatusEnum>() {
@java.lang.Override
public ConversionValueRuleSetStatusEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ConversionValueRuleSetStatusEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ConversionValueRuleSetStatusEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ConversionValueRuleSetStatusEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.enums.ConversionValueRuleSetStatusEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.zen;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.cluster.ClusterChangedEvent;
import org.elasticsearch.cluster.ClusterModule;
import org.elasticsearch.cluster.ClusterName;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ClusterStateListener;
import org.elasticsearch.cluster.Diff;
import org.elasticsearch.cluster.block.ClusterBlocks;
import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.discovery.Discovery;
import org.elasticsearch.discovery.DiscoverySettings;
import org.elasticsearch.node.Node;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.junit.annotations.TestLogging;
import org.elasticsearch.test.transport.MockTransportService;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.BytesTransportRequest;
import org.elasticsearch.transport.TransportChannel;
import org.elasticsearch.transport.TransportConnectionListener;
import org.elasticsearch.transport.TransportResponse;
import org.elasticsearch.transport.TransportResponseOptions;
import org.elasticsearch.transport.TransportService;
import org.junit.After;
import org.junit.Before;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.emptyIterable;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
@TestLogging("org.elasticsearch.discovery.zen.publish:TRACE")
public class PublishClusterStateActionTests extends ESTestCase {
private static final ClusterName CLUSTER_NAME = ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY);
protected ThreadPool threadPool;
protected Map<String, MockNode> nodes = new HashMap<>();
public static class MockNode implements PublishClusterStateAction.IncomingClusterStateListener {
public final DiscoveryNode discoveryNode;
public final MockTransportService service;
public MockPublishAction action;
public final ClusterStateListener listener;
private final PendingClusterStatesQueue pendingStatesQueue;
public volatile ClusterState clusterState;
private final Logger logger;
public MockNode(DiscoveryNode discoveryNode, MockTransportService service,
@Nullable ClusterStateListener listener, Logger logger) {
this.discoveryNode = discoveryNode;
this.service = service;
this.listener = listener;
this.logger = logger;
this.clusterState = ClusterState.builder(CLUSTER_NAME).nodes(DiscoveryNodes.builder()
.add(discoveryNode).localNodeId(discoveryNode.getId()).build()).build();
this.pendingStatesQueue = new PendingClusterStatesQueue(logger, 25);
}
public MockNode setAsMaster() {
this.clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.masterNodeId(discoveryNode.getId())).build();
return this;
}
public MockNode resetMasterId() {
this.clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(clusterState.nodes())
.masterNodeId(null)).build();
return this;
}
public void connectTo(DiscoveryNode node) {
service.connectToNode(node);
}
@Override
public void onIncomingClusterState(ClusterState incomingState) {
ZenDiscovery.validateIncomingState(logger, incomingState, clusterState);
pendingStatesQueue.addPending(incomingState);
}
public void onClusterStateCommitted(String stateUUID, ActionListener<Void> processedListener) {
final ClusterState state = pendingStatesQueue.markAsCommitted(stateUUID,
new PendingClusterStatesQueue.StateProcessedListener() {
@Override
public void onNewClusterStateProcessed() {
processedListener.onResponse(null);
}
@Override
public void onNewClusterStateFailed(Exception e) {
processedListener.onFailure(e);
}
});
if (state != null) {
ClusterState newClusterState = pendingStatesQueue.getNextClusterStateToProcess();
logger.debug("[{}] received version [{}], uuid [{}]",
discoveryNode.getName(), newClusterState.version(), newClusterState.stateUUID());
if (listener != null) {
ClusterChangedEvent event = new ClusterChangedEvent("", newClusterState, clusterState);
listener.clusterChanged(event);
}
if (clusterState.nodes().getMasterNode() == null || newClusterState.supersedes(clusterState)) {
clusterState = newClusterState;
}
pendingStatesQueue.markAsProcessed(newClusterState);
}
}
public DiscoveryNodes nodes() {
return clusterState.nodes();
}
}
public MockNode createMockNode(final String name) throws Exception {
return createMockNode(name, Settings.EMPTY, null);
}
public MockNode createMockNode(String name, final Settings basSettings, @Nullable ClusterStateListener listener) throws Exception {
return createMockNode(name, basSettings, listener, threadPool, logger, nodes);
}
public static MockNode createMockNode(String name, final Settings basSettings, @Nullable ClusterStateListener listener,
ThreadPool threadPool, Logger logger, Map<String, MockNode> nodes) throws Exception {
final Settings settings = Settings.builder()
.put("name", name)
.put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "").put(
TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING")
.put(basSettings)
.build();
MockTransportService service = buildTransportService(settings, threadPool);
DiscoveryNode discoveryNode = service.getLocalDiscoNode();
MockNode node = new MockNode(discoveryNode, service, listener, logger);
node.action = buildPublishClusterStateAction(settings, service, node);
final CountDownLatch latch = new CountDownLatch(nodes.size() * 2);
TransportConnectionListener waitForConnection = new TransportConnectionListener() {
@Override
public void onNodeConnected(DiscoveryNode node) {
latch.countDown();
}
@Override
public void onNodeDisconnected(DiscoveryNode node) {
fail("disconnect should not be called " + node);
}
};
node.service.addConnectionListener(waitForConnection);
for (MockNode curNode : nodes.values()) {
curNode.service.addConnectionListener(waitForConnection);
curNode.connectTo(node.discoveryNode);
node.connectTo(curNode.discoveryNode);
}
assertThat("failed to wait for all nodes to connect", latch.await(5, TimeUnit.SECONDS), equalTo(true));
for (MockNode curNode : nodes.values()) {
curNode.service.removeConnectionListener(waitForConnection);
}
node.service.removeConnectionListener(waitForConnection);
if (nodes.put(name, node) != null) {
fail("Node with the name " + name + " already exist");
}
return node;
}
public MockTransportService service(String name) {
MockNode node = nodes.get(name);
if (node != null) {
return node.service;
}
return null;
}
public PublishClusterStateAction action(String name) {
MockNode node = nodes.get(name);
if (node != null) {
return node.action;
}
return null;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
threadPool = new TestThreadPool(getClass().getName());
}
@Override
@After
public void tearDown() throws Exception {
super.tearDown();
for (MockNode curNode : nodes.values()) {
curNode.service.close();
}
terminate(threadPool);
}
private static MockTransportService buildTransportService(Settings settings, ThreadPool threadPool) {
MockTransportService transportService = MockTransportService.createNewService(settings, Version.CURRENT, threadPool, null);
transportService.start();
transportService.acceptIncomingRequests();
return transportService;
}
private static MockPublishAction buildPublishClusterStateAction(
Settings settings,
MockTransportService transportService,
PublishClusterStateAction.IncomingClusterStateListener listener
) {
DiscoverySettings discoverySettings =
new DiscoverySettings(settings, new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(ClusterModule.getNamedWriteables());
return new MockPublishAction(
settings,
transportService,
namedWriteableRegistry,
listener,
discoverySettings);
}
public void testSimpleClusterStatePublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA").setAsMaster();
MockNode nodeB = createMockNode("nodeB");
// Initial cluster state
ClusterState clusterState = nodeA.clusterState;
// cluster state update - add nodeB
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(clusterState.nodes()).add(nodeB.discoveryNode).build();
ClusterState previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder()
.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
assertThat(nodeB.clusterState.blocks().global().size(), equalTo(1));
// cluster state update - remove block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.EMPTY_CLUSTER_BLOCK).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
assertTrue(nodeB.clusterState.wasReadFromDiff());
// Adding new node - this node should get full cluster state while nodeB should still be getting diffs
MockNode nodeC = createMockNode("nodeC");
// cluster state update 3 - register node C
previousClusterState = clusterState;
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).add(nodeC.discoveryNode).build();
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
// First state
assertSameStateFromFull(nodeC.clusterState, clusterState);
// cluster state update 4 - update settings
previousClusterState = clusterState;
MetaData metaData = MetaData.builder(clusterState.metaData())
.transientSettings(Settings.builder().put("foo", "bar").build()).build();
clusterState = ClusterState.builder(clusterState).metaData(metaData).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
assertThat(nodeB.clusterState.blocks().global().size(), equalTo(0));
assertSameStateFromDiff(nodeC.clusterState, clusterState);
assertThat(nodeC.clusterState.blocks().global().size(), equalTo(0));
// cluster state update - skipping one version change - should request full cluster state
previousClusterState = ClusterState.builder(clusterState).incrementVersion().build();
clusterState = ClusterState.builder(clusterState).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
assertSameStateFromFull(nodeC.clusterState, clusterState);
assertFalse(nodeC.clusterState.wasReadFromDiff());
// node A steps down from being master
nodeA.resetMasterId();
nodeB.resetMasterId();
nodeC.resetMasterId();
// node B becomes the master and sends a version of the cluster state that goes back
discoveryNodes = DiscoveryNodes.builder(discoveryNodes)
.add(nodeA.discoveryNode)
.add(nodeB.discoveryNode)
.add(nodeC.discoveryNode)
.masterNodeId(nodeB.discoveryNode.getId())
.localNodeId(nodeB.discoveryNode.getId())
.build();
previousClusterState = ClusterState.builder(new ClusterName("test")).nodes(discoveryNodes).build();
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeB.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeA.clusterState, clusterState);
assertSameStateFromFull(nodeC.clusterState, clusterState);
}
public void testUnexpectedDiffPublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, event -> {
fail("Shouldn't send cluster state to myself");
}).setAsMaster();
MockNode nodeB = createMockNode("nodeB");
// Initial cluster state with both states - the second node still shouldn't
// get diff even though it's present in the previous cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).add(nodeB.discoveryNode).build();
ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build();
ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder()
.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromDiff(nodeB.clusterState, clusterState);
}
public void testDisablingDiffPublishing() throws Exception {
Settings noDiffPublishingSettings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), false).build();
MockNode nodeA = createMockNode("nodeA", noDiffPublishingSettings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
fail("Shouldn't send cluster state to myself");
}
});
MockNode nodeB = createMockNode("nodeB", noDiffPublishingSettings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
assertFalse(event.state().wasReadFromDiff());
}
});
// Initial cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder()
.add(nodeA.discoveryNode).localNodeId(nodeA.discoveryNode.getId()).masterNodeId(nodeA.discoveryNode.getId()).build();
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build();
// cluster state update - add nodeB
discoveryNodes = DiscoveryNodes.builder(discoveryNodes).add(nodeB.discoveryNode).build();
ClusterState previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).nodes(discoveryNodes).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder()
.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
}
/**
* Test not waiting on publishing works correctly (i.e., publishing times out)
*/
public void testSimultaneousClusterStatePublishing() throws Exception {
int numberOfNodes = randomIntBetween(2, 10);
int numberOfIterations = scaledRandomIntBetween(5, 50);
Settings settings = Settings.builder().put(DiscoverySettings.PUBLISH_DIFF_ENABLE_SETTING.getKey(), randomBoolean()).build();
MockNode master = createMockNode("node0", settings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
assertProperMetaDataForVersion(event.state().metaData(), event.state().version());
}
}).setAsMaster();
DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder(master.nodes());
for (int i = 1; i < numberOfNodes; i++) {
final String name = "node" + i;
final MockNode node = createMockNode(name, settings, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
assertProperMetaDataForVersion(event.state().metaData(), event.state().version());
}
});
discoveryNodesBuilder.add(node.discoveryNode);
}
AssertingAckListener[] listeners = new AssertingAckListener[numberOfIterations];
DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build();
MetaData metaData = MetaData.EMPTY_META_DATA;
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).build();
ClusterState previousState;
for (int i = 0; i < numberOfIterations; i++) {
previousState = clusterState;
metaData = buildMetaDataForVersion(metaData, i + 1);
clusterState = ClusterState.builder(clusterState).incrementVersion().metaData(metaData).nodes(discoveryNodes).build();
listeners[i] = publishState(master.action, clusterState, previousState);
}
for (int i = 0; i < numberOfIterations; i++) {
listeners[i].await(1, TimeUnit.SECONDS);
}
// set the master cs
master.clusterState = clusterState;
for (MockNode node : nodes.values()) {
assertSameState(node.clusterState, clusterState);
assertThat(node.clusterState.nodes().getLocalNode(), equalTo(node.discoveryNode));
}
}
public void testSerializationFailureDuringDiffPublishing() throws Exception {
MockNode nodeA = createMockNode("nodeA", Settings.EMPTY, new ClusterStateListener() {
@Override
public void clusterChanged(ClusterChangedEvent event) {
fail("Shouldn't send cluster state to myself");
}
}).setAsMaster();
MockNode nodeB = createMockNode("nodeB");
// Initial cluster state with both states - the second node still shouldn't get
// diff even though it's present in the previous cluster state
DiscoveryNodes discoveryNodes = DiscoveryNodes.builder(nodeA.nodes()).add(nodeB.discoveryNode).build();
ClusterState previousClusterState = ClusterState.builder(CLUSTER_NAME).nodes(discoveryNodes).build();
ClusterState clusterState = ClusterState.builder(previousClusterState).incrementVersion().build();
publishStateAndWait(nodeA.action, clusterState, previousClusterState);
assertSameStateFromFull(nodeB.clusterState, clusterState);
// cluster state update - add block
previousClusterState = clusterState;
clusterState = ClusterState.builder(clusterState).blocks(ClusterBlocks.builder()
.addGlobalBlock(MetaData.CLUSTER_READ_ONLY_BLOCK)).incrementVersion().build();
ClusterState unserializableClusterState = new ClusterState(clusterState.version(), clusterState.stateUUID(), clusterState) {
@Override
public Diff<ClusterState> diff(ClusterState previousState) {
return new Diff<ClusterState>() {
@Override
public ClusterState apply(ClusterState part) {
fail("this diff shouldn't be applied");
return part;
}
@Override
public void writeTo(StreamOutput out) throws IOException {
throw new IOException("Simulated failure of diff serialization");
}
};
}
};
try {
publishStateAndWait(nodeA.action, unserializableClusterState, previousClusterState);
fail("cluster state published despite of diff errors");
} catch (Discovery.FailedToCommitClusterStateException e) {
assertThat(e.getCause(), notNullValue());
assertThat(e.getCause().getMessage(), containsString("failed to serialize"));
}
}
public void testFailToPublishWithLessThanMinMasterNodes() throws Exception {
final int masterNodes = randomIntBetween(1, 10);
MockNode master = createMockNode("master");
DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder().add(master.discoveryNode);
for (int i = 1; i < masterNodes; i++) {
discoveryNodesBuilder.add(createMockNode("node" + i).discoveryNode);
}
final int dataNodes = randomIntBetween(0, 5);
final Settings dataSettings = Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build();
for (int i = 0; i < dataNodes; i++) {
discoveryNodesBuilder.add(createMockNode("data_" + i, dataSettings, null).discoveryNode);
}
discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId());
DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build();
MetaData metaData = MetaData.EMPTY_META_DATA;
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build();
ClusterState previousState = master.clusterState;
try {
publishState(master.action, clusterState, previousState, masterNodes + randomIntBetween(1, 5));
fail("cluster state publishing didn't fail despite of not having enough nodes");
} catch (Discovery.FailedToCommitClusterStateException expected) {
logger.debug("failed to publish as expected", expected);
}
}
public void testPublishingWithSendingErrors() throws Exception {
int goodNodes = randomIntBetween(2, 5);
int errorNodes = randomIntBetween(1, 5);
int timeOutNodes = randomBoolean() ? 0 : randomIntBetween(1, 5); // adding timeout nodes will force timeout errors
final int numberOfMasterNodes = goodNodes + errorNodes + timeOutNodes + 1; // master
final boolean expectingToCommit = randomBoolean();
Settings.Builder settings = Settings.builder();
// make sure we have a reasonable timeout if we expect to timeout, o.w. one that will make the test "hang"
settings.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), expectingToCommit == false && timeOutNodes > 0 ? "100ms" : "1h")
.put(DiscoverySettings.PUBLISH_TIMEOUT_SETTING.getKey(), "5ms"); // test is about committing
MockNode master = createMockNode("master", settings.build(), null);
// randomize things a bit
int[] nodeTypes = new int[goodNodes + errorNodes + timeOutNodes];
for (int i = 0; i < goodNodes; i++) {
nodeTypes[i] = 0;
}
for (int i = goodNodes; i < goodNodes + errorNodes; i++) {
nodeTypes[i] = 1;
}
for (int i = goodNodes + errorNodes; i < nodeTypes.length; i++) {
nodeTypes[i] = 2;
}
Collections.shuffle(Arrays.asList(nodeTypes), random());
DiscoveryNodes.Builder discoveryNodesBuilder = DiscoveryNodes.builder().add(master.discoveryNode);
for (int i = 0; i < nodeTypes.length; i++) {
final MockNode mockNode = createMockNode("node" + i);
discoveryNodesBuilder.add(mockNode.discoveryNode);
switch (nodeTypes[i]) {
case 1:
mockNode.action.errorOnSend.set(true);
break;
case 2:
mockNode.action.timeoutOnSend.set(true);
break;
}
}
final int dataNodes = randomIntBetween(0, 3); // data nodes don't matter
for (int i = 0; i < dataNodes; i++) {
final MockNode mockNode = createMockNode("data_" + i,
Settings.builder().put(Node.NODE_MASTER_SETTING.getKey(), false).build(), null);
discoveryNodesBuilder.add(mockNode.discoveryNode);
if (randomBoolean()) {
// we really don't care - just chaos monkey
mockNode.action.errorOnCommit.set(randomBoolean());
mockNode.action.errorOnSend.set(randomBoolean());
mockNode.action.timeoutOnCommit.set(randomBoolean());
mockNode.action.timeoutOnSend.set(randomBoolean());
}
}
final int minMasterNodes;
final String expectedBehavior;
if (expectingToCommit) {
minMasterNodes = randomIntBetween(0, goodNodes + 1); // count master
expectedBehavior = "succeed";
} else {
minMasterNodes = randomIntBetween(goodNodes + 2, numberOfMasterNodes); // +2 because of master
expectedBehavior = timeOutNodes > 0 ? "timeout" : "fail";
}
logger.info("--> expecting commit to {}. good nodes [{}], errors [{}], timeouts [{}]. min_master_nodes [{}]",
expectedBehavior, goodNodes + 1, errorNodes, timeOutNodes, minMasterNodes);
discoveryNodesBuilder.localNodeId(master.discoveryNode.getId()).masterNodeId(master.discoveryNode.getId());
DiscoveryNodes discoveryNodes = discoveryNodesBuilder.build();
MetaData metaData = MetaData.EMPTY_META_DATA;
ClusterState clusterState = ClusterState.builder(CLUSTER_NAME).metaData(metaData).nodes(discoveryNodes).build();
ClusterState previousState = master.clusterState;
try {
publishState(master.action, clusterState, previousState, minMasterNodes);
if (expectingToCommit == false) {
fail("cluster state publishing didn't fail despite of not have enough nodes");
}
} catch (Discovery.FailedToCommitClusterStateException exception) {
logger.debug("failed to publish as expected", exception);
if (expectingToCommit) {
throw exception;
}
assertThat(exception.getMessage(), containsString(timeOutNodes > 0 ? "timed out" : "failed"));
}
}
public void testOutOfOrderCommitMessages() throws Throwable {
MockNode node = createMockNode("node").setAsMaster();
final CapturingTransportChannel channel = new CapturingTransportChannel();
List<ClusterState> states = new ArrayList<>();
final int numOfStates = scaledRandomIntBetween(3, 25);
for (int i = 1; i <= numOfStates; i++) {
states.add(ClusterState.builder(node.clusterState).version(i).stateUUID(ClusterState.UNKNOWN_UUID).build());
}
final ClusterState finalState = states.get(numOfStates - 1);
logger.info("--> publishing states");
for (ClusterState state : states) {
node.action.handleIncomingClusterStateRequest(
new BytesTransportRequest(PublishClusterStateAction.serializeFullClusterState(state, Version.CURRENT), Version.CURRENT),
channel);
assertThat(channel.response.get(), equalTo((TransportResponse) TransportResponse.Empty.INSTANCE));
assertThat(channel.error.get(), nullValue());
channel.clear();
}
logger.info("--> committing states");
long largestVersionSeen = Long.MIN_VALUE;
Randomness.shuffle(states);
for (ClusterState state : states) {
node.action.handleCommitRequest(new PublishClusterStateAction.CommitClusterStateRequest(state.stateUUID()), channel);
if (largestVersionSeen < state.getVersion()) {
assertThat(channel.response.get(), equalTo((TransportResponse) TransportResponse.Empty.INSTANCE));
if (channel.error.get() != null) {
throw channel.error.get();
}
largestVersionSeen = state.getVersion();
} else {
// older cluster states will be rejected
assertNotNull(channel.error.get());
assertThat(channel.error.get(), instanceOf(IllegalStateException.class));
}
channel.clear();
}
//now check the last state held
assertSameState(node.clusterState, finalState);
}
/**
* Tests that cluster is committed or times out. It should never be the case that we fail
* an update due to a commit timeout, but it ends up being committed anyway
*/
public void testTimeoutOrCommit() throws Exception {
Settings settings = Settings.builder()
// short but so we will sometime commit sometime timeout
.put(DiscoverySettings.COMMIT_TIMEOUT_SETTING.getKey(), "1ms").build();
MockNode master = createMockNode("master", settings, null);
MockNode node = createMockNode("node", settings, null);
ClusterState state = ClusterState.builder(master.clusterState)
.nodes(DiscoveryNodes.builder(master.clusterState.nodes())
.add(node.discoveryNode).masterNodeId(master.discoveryNode.getId())).build();
for (int i = 0; i < 10; i++) {
state = ClusterState.builder(state).incrementVersion().build();
logger.debug("--> publishing version [{}], UUID [{}]", state.version(), state.stateUUID());
boolean success;
try {
publishState(master.action, state, master.clusterState, 2).await(1, TimeUnit.HOURS);
success = true;
} catch (Discovery.FailedToCommitClusterStateException OK) {
success = false;
}
logger.debug("--> publishing [{}], verifying...", success ? "succeeded" : "failed");
if (success) {
assertSameState(node.clusterState, state);
} else {
assertThat(node.clusterState.stateUUID(), not(equalTo(state.stateUUID())));
}
}
}
private MetaData buildMetaDataForVersion(MetaData metaData, long version) {
ImmutableOpenMap.Builder<String, IndexMetaData> indices = ImmutableOpenMap.builder(metaData.indices());
indices.put("test" + version, IndexMetaData.builder("test" + version)
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards((int) version).numberOfReplicas(0).build());
return MetaData.builder(metaData)
.transientSettings(Settings.builder().put("test", version).build())
.indices(indices.build())
.build();
}
private void assertProperMetaDataForVersion(MetaData metaData, long version) {
for (long i = 1; i <= version; i++) {
assertThat(metaData.index("test" + i), notNullValue());
assertThat(metaData.index("test" + i).getNumberOfShards(), equalTo((int) i));
}
assertThat(metaData.index("test" + (version + 1)), nullValue());
assertThat(metaData.transientSettings().get("test"), equalTo(Long.toString(version)));
}
public void publishStateAndWait(PublishClusterStateAction action, ClusterState state,
ClusterState previousState) throws InterruptedException {
publishState(action, state, previousState).await(1, TimeUnit.SECONDS);
}
public AssertingAckListener publishState(PublishClusterStateAction action, ClusterState state,
ClusterState previousState) throws InterruptedException {
final int minimumMasterNodes = randomIntBetween(-1, state.nodes().getMasterNodes().size());
return publishState(action, state, previousState, minimumMasterNodes);
}
public AssertingAckListener publishState(PublishClusterStateAction action, ClusterState state,
ClusterState previousState, int minMasterNodes) throws InterruptedException {
AssertingAckListener assertingAckListener = new AssertingAckListener(state.nodes().getSize() - 1);
ClusterChangedEvent changedEvent = new ClusterChangedEvent("test update", state, previousState);
action.publish(changedEvent, minMasterNodes, assertingAckListener);
return assertingAckListener;
}
public static class AssertingAckListener implements Discovery.AckListener {
private final List<Tuple<DiscoveryNode, Throwable>> errors = new CopyOnWriteArrayList<>();
private final AtomicBoolean timeoutOccurred = new AtomicBoolean();
private final CountDownLatch countDown;
public AssertingAckListener(int nodeCount) {
countDown = new CountDownLatch(nodeCount);
}
@Override
public void onNodeAck(DiscoveryNode node, @Nullable Exception e) {
if (e != null) {
errors.add(new Tuple<>(node, e));
}
countDown.countDown();
}
@Override
public void onTimeout() {
timeoutOccurred.set(true);
// Fast forward the counter - no reason to wait here
long currentCount = countDown.getCount();
for (long i = 0; i < currentCount; i++) {
countDown.countDown();
}
}
public void await(long timeout, TimeUnit unit) throws InterruptedException {
assertThat(awaitErrors(timeout, unit), emptyIterable());
}
public List<Tuple<DiscoveryNode, Throwable>> awaitErrors(long timeout, TimeUnit unit) throws InterruptedException {
countDown.await(timeout, unit);
assertFalse(timeoutOccurred.get());
return errors;
}
}
void assertSameState(ClusterState actual, ClusterState expected) {
assertThat(actual, notNullValue());
final String reason = "\n--> actual ClusterState: " + actual + "\n" +
"--> expected ClusterState:" + expected;
assertThat("unequal UUIDs" + reason, actual.stateUUID(), equalTo(expected.stateUUID()));
assertThat("unequal versions" + reason, actual.version(), equalTo(expected.version()));
}
void assertSameStateFromDiff(ClusterState actual, ClusterState expected) {
assertSameState(actual, expected);
assertTrue(actual.wasReadFromDiff());
}
void assertSameStateFromFull(ClusterState actual, ClusterState expected) {
assertSameState(actual, expected);
assertFalse(actual.wasReadFromDiff());
}
public static class MockPublishAction extends PublishClusterStateAction {
AtomicBoolean timeoutOnSend = new AtomicBoolean();
AtomicBoolean errorOnSend = new AtomicBoolean();
AtomicBoolean timeoutOnCommit = new AtomicBoolean();
AtomicBoolean errorOnCommit = new AtomicBoolean();
public MockPublishAction(Settings settings, TransportService transportService, NamedWriteableRegistry namedWriteableRegistry,
IncomingClusterStateListener listener, DiscoverySettings discoverySettings) {
super(settings, transportService, namedWriteableRegistry, listener, discoverySettings);
}
@Override
protected void handleIncomingClusterStateRequest(BytesTransportRequest request, TransportChannel channel) throws IOException {
if (errorOnSend.get()) {
throw new ElasticsearchException("forced error on incoming cluster state");
}
if (timeoutOnSend.get()) {
return;
}
super.handleIncomingClusterStateRequest(request, channel);
}
@Override
protected void handleCommitRequest(PublishClusterStateAction.CommitClusterStateRequest request, TransportChannel channel) {
if (errorOnCommit.get()) {
throw new ElasticsearchException("forced error on incoming commit");
}
if (timeoutOnCommit.get()) {
return;
}
super.handleCommitRequest(request, channel);
}
}
static class CapturingTransportChannel implements TransportChannel {
AtomicReference<TransportResponse> response = new AtomicReference<>();
AtomicReference<Throwable> error = new AtomicReference<>();
public void clear() {
response.set(null);
error.set(null);
}
@Override
public String action() {
return "_noop_";
}
@Override
public String getProfileName() {
return "_noop_";
}
@Override
public void sendResponse(TransportResponse response) throws IOException {
this.response.set(response);
assertThat(error.get(), nullValue());
}
@Override
public void sendResponse(TransportResponse response, TransportResponseOptions options) throws IOException {
this.response.set(response);
assertThat(error.get(), nullValue());
}
@Override
public void sendResponse(Exception exception) throws IOException {
this.error.set(exception);
assertThat(response.get(), nullValue());
}
@Override
public long getRequestId() {
return 0;
}
@Override
public String getChannelType() {
return "capturing";
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager.webapp.dao;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlRootElement;
import org.apache.hadoop.yarn.api.records.NodeAttribute;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.NodeState;
import org.apache.hadoop.yarn.server.api.records.OpportunisticContainersStatus;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
import com.google.common.annotations.VisibleForTesting;
@XmlRootElement(name = "node")
@XmlAccessorType(XmlAccessType.FIELD)
public class NodeInfo {
protected String rack;
protected NodeState state;
private String id;
protected String nodeHostName;
protected String nodeHTTPAddress;
private long lastHealthUpdate;
protected String version;
protected String healthReport;
protected int numContainers;
protected long usedMemoryMB;
protected long availMemoryMB;
protected long usedVirtualCores;
protected long availableVirtualCores;
private int numRunningOpportContainers;
private long usedMemoryOpportGB;
private long usedVirtualCoresOpport;
private int numQueuedContainers;
protected ArrayList<String> nodeLabels = new ArrayList<String>();
private AllocationTagsInfo allocationTags;
protected ResourceUtilizationInfo resourceUtilization;
protected ResourceInfo usedResource;
protected ResourceInfo availableResource;
protected NodeAttributesInfo nodeAttributesInfo;
public NodeInfo() {
} // JAXB needs this
public NodeInfo(RMNode ni, ResourceScheduler sched) {
NodeId id = ni.getNodeID();
SchedulerNodeReport report = sched.getNodeReport(id);
this.numContainers = 0;
this.usedMemoryMB = 0;
this.availMemoryMB = 0;
if (report != null) {
this.numContainers = report.getNumContainers();
this.usedMemoryMB = report.getUsedResource().getMemorySize();
this.availMemoryMB = report.getAvailableResource().getMemorySize();
this.usedVirtualCores = report.getUsedResource().getVirtualCores();
this.availableVirtualCores =
report.getAvailableResource().getVirtualCores();
this.usedResource = new ResourceInfo(report.getUsedResource());
this.availableResource = new ResourceInfo(report.getAvailableResource());
}
this.id = id.toString();
this.rack = ni.getRackName();
this.nodeHostName = ni.getHostName();
this.state = ni.getState();
this.nodeHTTPAddress = ni.getHttpAddress();
this.lastHealthUpdate = ni.getLastHealthReportTime();
this.healthReport = String.valueOf(ni.getHealthReport());
this.version = ni.getNodeManagerVersion();
// Status of opportunistic containers.
this.numRunningOpportContainers = 0;
this.usedMemoryOpportGB = 0;
this.usedVirtualCoresOpport = 0;
this.numQueuedContainers = 0;
OpportunisticContainersStatus opportStatus =
ni.getOpportunisticContainersStatus();
if (opportStatus != null) {
this.numRunningOpportContainers =
opportStatus.getRunningOpportContainers();
this.usedMemoryOpportGB = opportStatus.getOpportMemoryUsed();
this.usedVirtualCoresOpport = opportStatus.getOpportCoresUsed();
this.numQueuedContainers = opportStatus.getQueuedOpportContainers();
}
// add labels
Set<String> labelSet = ni.getNodeLabels();
if (labelSet != null) {
nodeLabels.addAll(labelSet);
Collections.sort(nodeLabels);
}
// add attributes
Set<NodeAttribute> attrs = ni.getAllNodeAttributes();
nodeAttributesInfo = new NodeAttributesInfo();
for (NodeAttribute attribute : attrs) {
NodeAttributeInfo info = new NodeAttributeInfo(attribute);
this.nodeAttributesInfo.addNodeAttributeInfo(info);
}
// add allocation tags
allocationTags = new AllocationTagsInfo();
Map<String, Long> allocationTagsInfo = ni.getAllocationTagsWithCount();
if (allocationTagsInfo != null) {
allocationTagsInfo.forEach((tag, count) ->
allocationTags.addAllocationTag(new AllocationTagInfo(tag, count)));
}
// update node and containers resource utilization
this.resourceUtilization = new ResourceUtilizationInfo(ni);
}
public String getRack() {
return this.rack;
}
public String getState() {
return String.valueOf(this.state);
}
public String getNodeId() {
return this.id;
}
public String getNodeHTTPAddress() {
return this.nodeHTTPAddress;
}
public void setNodeHTTPAddress(String nodeHTTPAddress) {
this.nodeHTTPAddress = nodeHTTPAddress;
}
public long getLastHealthUpdate() {
return this.lastHealthUpdate;
}
public String getVersion() {
return this.version;
}
public String getHealthReport() {
return this.healthReport;
}
public int getNumContainers() {
return this.numContainers;
}
public long getUsedMemory() {
return this.usedMemoryMB;
}
public long getAvailableMemory() {
return this.availMemoryMB;
}
public long getUsedVirtualCores() {
return this.usedVirtualCores;
}
public long getAvailableVirtualCores() {
return this.availableVirtualCores;
}
public int getNumRunningOpportContainers() {
return numRunningOpportContainers;
}
public long getUsedMemoryOpportGB() {
return usedMemoryOpportGB;
}
public long getUsedVirtualCoresOpport() {
return usedVirtualCoresOpport;
}
public int getNumQueuedContainers() {
return numQueuedContainers;
}
public ArrayList<String> getNodeLabels() {
return this.nodeLabels;
}
public ResourceInfo getUsedResource() {
return usedResource;
}
public void setUsedResource(ResourceInfo used) {
this.usedResource = used;
}
public ResourceInfo getAvailableResource() {
return availableResource;
}
public void setAvailableResource(ResourceInfo avail) {
this.availableResource = avail;
}
public ResourceUtilizationInfo getResourceUtilization() {
return this.resourceUtilization;
}
public String getAllocationTagsSummary() {
return this.allocationTags == null ? "" :
this.allocationTags.toString();
}
@VisibleForTesting
public void setId(String id) {
this.id = id;
}
@VisibleForTesting
public void setLastHealthUpdate(long lastHealthUpdate) {
this.lastHealthUpdate = lastHealthUpdate;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.fileTemplates.impl;
import com.intellij.icons.AllIcons;
import com.intellij.ide.IdeBundle;
import com.intellij.ide.fileTemplates.*;
import com.intellij.ide.util.PropertiesComponent;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.actionSystem.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.components.ComponentsPackage;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.extensions.Extensions;
import com.intellij.openapi.options.Configurable;
import com.intellij.openapi.options.ConfigurationException;
import com.intellij.openapi.options.SearchableConfigurable;
import com.intellij.openapi.options.ShowSettingsUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.Splitter;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.Disposer;
import com.intellij.ui.IdeBorderFactory;
import com.intellij.ui.ScrollPaneFactory;
import com.intellij.ui.TabbedPaneWrapper;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Function;
import com.intellij.util.PlatformIcons;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.ui.JBUI;
import gnu.trove.THashSet;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
import javax.swing.*;
import javax.swing.event.ChangeEvent;
import javax.swing.event.ChangeListener;
import java.awt.*;
import java.net.URL;
import java.text.MessageFormat;
import java.util.*;
import java.util.List;
import static com.intellij.ide.fileTemplates.FileTemplateManager.*;
/*
* @author: MYakovlev
* Date: Jul 26, 2002
* Time: 12:44:56 PM
*/
public class AllFileTemplatesConfigurable implements SearchableConfigurable, Configurable.NoMargin, Configurable.NoScroll {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.fileTemplates.impl.AllFileTemplatesConfigurable");
private static final String TEMPLATES_TITLE = IdeBundle.message("tab.filetemplates.templates");
private static final String INCLUDES_TITLE = IdeBundle.message("tab.filetemplates.includes");
private static final String CODE_TITLE = IdeBundle.message("tab.filetemplates.code");
private static final String OTHER_TITLE = IdeBundle.message("tab.filetemplates.j2ee");
private final Project myProject;
private final FileTemplateManager myManager;
private JPanel myMainPanel;
private FileTemplateTab myCurrentTab;
private FileTemplateTab myTemplatesList;
private FileTemplateTab myIncludesList;
private FileTemplateTab myCodeTemplatesList;
@Nullable
private FileTemplateTab myOtherTemplatesList;
private JComponent myToolBar;
private TabbedPaneWrapper myTabbedPane;
private FileTemplateConfigurable myEditor;
private boolean myModified = false;
private JComponent myEditorComponent;
private JPanel myLeftPanel;
private FileTemplateTab[] myTabs;
private Disposable myUIDisposable;
private final Set<String> myInternalTemplateNames;
private FileTemplatesScheme myScheme;
private final Map<FileTemplatesScheme, Map<String, FileTemplate[]>> myChangesCache =
new HashMap<FileTemplatesScheme, Map<String, FileTemplate[]>>();
private static final String CURRENT_TAB = "FileTemplates.CurrentTab";
private static final String SELECTED_TEMPLATE = "FileTemplates.SelectedTemplate";
public AllFileTemplatesConfigurable(Project project) {
myProject = project;
myManager = getInstance(project);
myScheme = myManager.getCurrentScheme();
myInternalTemplateNames = ContainerUtil.map2Set(myManager.getInternalTemplates(), new Function<FileTemplate, String>() {
@Override
public String fun(FileTemplate template) {
return template.getName();
}
});
}
private void onRemove() {
myCurrentTab.removeSelected();
myModified = true;
}
private void onAdd() {
String ext = "java";
final FileTemplateDefaultExtension[] defaultExtensions = Extensions.getExtensions(FileTemplateDefaultExtension.EP_NAME);
if (defaultExtensions.length > 0) {
ext = defaultExtensions[0].value;
}
createTemplate(IdeBundle.message("template.unnamed"), ext, "");
}
private FileTemplate createTemplate(final @NotNull String prefName, final @NotNull String extension, final @NotNull String content) {
final FileTemplate[] templates = myCurrentTab.getTemplates();
final FileTemplate newTemplate = FileTemplateUtil.createTemplate(prefName, extension, content, templates);
myCurrentTab.addTemplate(newTemplate);
myModified = true;
myCurrentTab.selectTemplate(newTemplate);
fireListChanged();
myEditor.focusToNameField();
return newTemplate;
}
private void onClone() {
try {
myEditor.apply();
}
catch (ConfigurationException ignore) {
}
final FileTemplate selected = myCurrentTab.getSelectedTemplate();
if (selected == null) {
return;
}
final FileTemplate[] templates = myCurrentTab.getTemplates();
final Set<String> names = new HashSet<String>();
for (FileTemplate template : templates) {
names.add(template.getName());
}
@SuppressWarnings({"UnresolvedPropertyKey"})
final String nameTemplate = IdeBundle.message("template.copy.N.of.T");
String name = MessageFormat.format(nameTemplate, "", selected.getName());
int i = 0;
while (names.contains(name)) {
name = MessageFormat.format(nameTemplate, ++i + " ", selected.getName());
}
final FileTemplate newTemplate = new CustomFileTemplate(name, selected.getExtension());
newTemplate.setText(selected.getText());
newTemplate.setReformatCode(selected.isReformatCode());
myCurrentTab.addTemplate(newTemplate);
myModified = true;
myCurrentTab.selectTemplate(newTemplate);
fireListChanged();
}
@Override
public String getDisplayName() {
return IdeBundle.message("title.file.templates");
}
@Override
public String getHelpTopic() {
int index = myTabbedPane.getSelectedIndex();
switch (index) {
case 0:
return "fileTemplates.templates";
case 1:
return "fileTemplates.includes";
case 2:
return "fileTemplates.code";
case 3:
return "fileTemplates.j2ee";
default:
throw new IllegalStateException("wrong index: " + index);
}
}
@Override
public JComponent createComponent() {
myUIDisposable = Disposer.newDisposable();
myTemplatesList = new FileTemplateTabAsList(TEMPLATES_TITLE) {
@Override
public void onTemplateSelected() {
onListSelectionChanged();
}
};
myIncludesList = new FileTemplateTabAsList(INCLUDES_TITLE) {
@Override
public void onTemplateSelected() {
onListSelectionChanged();
}
};
myCodeTemplatesList = new FileTemplateTabAsList(CODE_TITLE) {
@Override
public void onTemplateSelected() {
onListSelectionChanged();
}
};
myCurrentTab = myTemplatesList;
final List<FileTemplateTab> allTabs = new ArrayList<FileTemplateTab>(Arrays.asList(myTemplatesList, myIncludesList, myCodeTemplatesList));
final Set<FileTemplateGroupDescriptorFactory> factories = new THashSet<FileTemplateGroupDescriptorFactory>();
factories.addAll(ComponentsPackage.getComponents(ApplicationManager.getApplication(), FileTemplateGroupDescriptorFactory.class));
ContainerUtil.addAll(factories, Extensions.getExtensions(FileTemplateGroupDescriptorFactory.EXTENSION_POINT_NAME));
if (!factories.isEmpty()) {
myOtherTemplatesList = new FileTemplateTabAsTree(OTHER_TITLE) {
@Override
public void onTemplateSelected() {
onListSelectionChanged();
}
@Override
protected FileTemplateNode initModel() {
SortedSet<FileTemplateGroupDescriptor> categories =
new TreeSet<FileTemplateGroupDescriptor>(new Comparator<FileTemplateGroupDescriptor>() {
@Override
public int compare(FileTemplateGroupDescriptor o1, FileTemplateGroupDescriptor o2) {
return o1.getTitle().compareTo(o2.getTitle());
}
});
for (FileTemplateGroupDescriptorFactory templateGroupFactory : factories) {
ContainerUtil.addIfNotNull(templateGroupFactory.getFileTemplatesDescriptor(), categories);
}
//noinspection HardCodedStringLiteral
return new FileTemplateNode("ROOT", null,
ContainerUtil.map2List(categories, new Function<FileTemplateGroupDescriptor, FileTemplateNode>() {
@Override
public FileTemplateNode fun(FileTemplateGroupDescriptor s) {
return new FileTemplateNode(s);
}
}));
}
};
allTabs.add(myOtherTemplatesList);
}
myEditor = new FileTemplateConfigurable(myProject);
myEditor.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
onEditorChanged();
}
});
myEditorComponent = myEditor.createComponent();
myEditorComponent.setBorder(JBUI.Borders.empty(10, 0, 10, 10));
myTabs = allTabs.toArray(new FileTemplateTab[allTabs.size()]);
myTabbedPane = new TabbedPaneWrapper(myUIDisposable);
myTabbedPane.setTabLayoutPolicy(JTabbedPane.SCROLL_TAB_LAYOUT);
myLeftPanel = new JPanel(new CardLayout());
myLeftPanel.setBorder(JBUI.Borders.empty(10, 10, 10, 0));
for (FileTemplateTab tab : myTabs) {
myLeftPanel.add(ScrollPaneFactory.createScrollPane(tab.getComponent()), tab.getTitle());
JPanel fakePanel = new JPanel();
fakePanel.setPreferredSize(new Dimension(0, 0));
myTabbedPane.addTab(tab.getTitle(), fakePanel);
}
myTabbedPane.addChangeListener(new ChangeListener() {
@Override
public void stateChanged(ChangeEvent e) {
onTabChanged();
}
});
DefaultActionGroup group = new DefaultActionGroup();
AnAction removeAction = new AnAction(IdeBundle.message("action.remove.template"), null, AllIcons.General.Remove) {
@Override
public void actionPerformed(AnActionEvent e) {
onRemove();
}
@Override
public void update(AnActionEvent e) {
super.update(e);
FileTemplate selectedItem = myCurrentTab.getSelectedTemplate();
e.getPresentation().setEnabled(selectedItem != null && !isInternalTemplate(selectedItem.getName(), myCurrentTab.getTitle()));
}
};
AnAction addAction = new AnAction(IdeBundle.message("action.create.template"), null, AllIcons.General.Add) {
@Override
public void actionPerformed(AnActionEvent e) {
onAdd();
}
@Override
public void update(AnActionEvent e) {
super.update(e);
e.getPresentation().setEnabled(!(myCurrentTab == myCodeTemplatesList || myCurrentTab == myOtherTemplatesList));
}
};
AnAction cloneAction = new AnAction(IdeBundle.message("action.copy.template"), null, PlatformIcons.COPY_ICON) {
@Override
public void actionPerformed(AnActionEvent e) {
onClone();
}
@Override
public void update(AnActionEvent e) {
super.update(e);
e.getPresentation().setEnabled(myCurrentTab != myCodeTemplatesList
&& myCurrentTab != myOtherTemplatesList
&& myCurrentTab.getSelectedTemplate() != null);
}
};
AnAction resetAction = new AnAction(IdeBundle.message("action.reset.to.default"), null, AllIcons.Actions.Reset) {
@Override
public void actionPerformed(AnActionEvent e) {
onReset();
}
@Override
public void update(AnActionEvent e) {
super.update(e);
final FileTemplate selectedItem = myCurrentTab.getSelectedTemplate();
e.getPresentation().setEnabled(selectedItem instanceof BundledFileTemplate && !selectedItem.isDefault());
}
};
group.add(addAction);
group.add(removeAction);
group.add(cloneAction);
group.add(resetAction);
addAction.registerCustomShortcutSet(CommonShortcuts.INSERT, myCurrentTab.getComponent());
removeAction.registerCustomShortcutSet(CommonShortcuts.getDelete(),
myCurrentTab.getComponent());
myToolBar = ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, group, true).getComponent();
myToolBar.setBorder(IdeBorderFactory.createEmptyBorder());
JPanel toolbarPanel = new JPanel(new BorderLayout());
toolbarPanel.add(myToolBar, BorderLayout.WEST);
JComponent schemaComponent =
ActionManager.getInstance().createActionToolbar(ActionPlaces.UNKNOWN, new DefaultCompactActionGroup(new ChangeSchemaCombo(this)), true)
.getComponent();
JPanel schemaPanel = new JPanel(new BorderLayout());
schemaPanel.add(schemaComponent, BorderLayout.EAST);
schemaPanel.add(new JLabel("Schema:"), BorderLayout.WEST);
toolbarPanel.add(schemaPanel, BorderLayout.EAST);
JPanel centerPanel = new JPanel(new BorderLayout());
centerPanel.add(myTabbedPane.getComponent(), BorderLayout.NORTH);
Splitter splitter = new Splitter(false, 0.3f);
splitter.setDividerWidth(JBUI.scale(10));
splitter.setFirstComponent(myLeftPanel);
splitter.setSecondComponent(myEditorComponent);
centerPanel.add(splitter, BorderLayout.CENTER);
myMainPanel = new JPanel(new BorderLayout());
myMainPanel.add(toolbarPanel, BorderLayout.NORTH);
myMainPanel.add(centerPanel, BorderLayout.CENTER);
final PropertiesComponent propertiesComponent = PropertiesComponent.getInstance();
final String tabName = propertiesComponent.getValue(CURRENT_TAB);
if (selectTab(tabName)) {
//final String selectedTemplateName = propertiesComponent.getValue(SELECTED_TEMPLATE);
//for (FileTemplate template : myCurrentTab.getTemplates()) {
// if (Comparing.strEqual(template.getName(), selectedTemplateName)) {
// myCurrentTab.selectTemplate(template);
// break;
// }
//}
}
return myMainPanel;
}
private void onReset() {
FileTemplate selected = myCurrentTab.getSelectedTemplate();
if (selected instanceof BundledFileTemplate) {
if (Messages.showOkCancelDialog(IdeBundle.message("prompt.reset.to.original.template"),
IdeBundle.message("title.reset.template"), Messages.getQuestionIcon()) !=
Messages.OK) {
return;
}
((BundledFileTemplate)selected).revertToDefaults();
myEditor.reset();
myModified = true;
}
}
private void onEditorChanged() {
fireListChanged();
}
private void onTabChanged() {
applyEditor(myCurrentTab.getSelectedTemplate());
final int selectedIndex = myTabbedPane.getSelectedIndex();
if (0 <= selectedIndex && selectedIndex < myTabs.length) {
myCurrentTab = myTabs[selectedIndex];
}
((CardLayout)myLeftPanel.getLayout()).show(myLeftPanel, myCurrentTab.getTitle());
onListSelectionChanged();
}
private void onListSelectionChanged() {
FileTemplate selectedValue = myCurrentTab.getSelectedTemplate();
FileTemplate prevTemplate = myEditor == null ? null : myEditor.getTemplate();
if (prevTemplate != selectedValue) {
LOG.assertTrue(myEditor != null, "selected:" + selectedValue + "; prev:" + prevTemplate);
//selection has changed
if (Arrays.asList(myCurrentTab.getTemplates()).contains(prevTemplate) && !applyEditor(prevTemplate)) {
return;
}
if (selectedValue == null) {
myEditor.setTemplate(null, FileTemplateManagerImpl.getInstanceImpl(myProject).getDefaultTemplateDescription());
myEditorComponent.repaint();
}
else {
selectTemplate(selectedValue);
}
}
}
private boolean applyEditor(FileTemplate prevTemplate) {
if (myEditor.isModified()) {
try {
myModified = true;
myEditor.apply();
fireListChanged();
}
catch (ConfigurationException e) {
if (Arrays.asList(myCurrentTab.getTemplates()).contains(prevTemplate)) {
myCurrentTab.selectTemplate(prevTemplate);
}
Messages.showErrorDialog(myMainPanel, e.getMessage(), IdeBundle.message("title.cannot.save.current.template"));
return false;
}
}
return true;
}
private void selectTemplate(FileTemplate template) {
URL defDesc = null;
if (myCurrentTab == myTemplatesList) {
defDesc = FileTemplateManagerImpl.getInstanceImpl(myProject).getDefaultTemplateDescription();
}
else if (myCurrentTab == myIncludesList) {
defDesc = FileTemplateManagerImpl.getInstanceImpl(myProject).getDefaultIncludeDescription();
}
if (myEditor.getTemplate() != template) {
myEditor.setTemplate(template, defDesc);
final boolean isInternal = template != null && isInternalTemplate(template.getName(), myCurrentTab.getTitle());
myEditor.setShowInternalMessage(isInternal ? " " : null);
myEditor.setShowAdjustCheckBox(myTemplatesList == myCurrentTab);
}
}
// internal template could not be removed and should be rendered bold
public static boolean isInternalTemplate(String templateName, String templateTabTitle) {
if (templateName == null) {
return false;
}
if (Comparing.strEqual(templateTabTitle, TEMPLATES_TITLE)) {
return isInternalTemplateName(templateName);
}
if (Comparing.strEqual(templateTabTitle, CODE_TITLE)) {
return true;
}
if (Comparing.strEqual(templateTabTitle, OTHER_TITLE)) {
return true;
}
if (Comparing.strEqual(templateTabTitle, INCLUDES_TITLE)) {
return Comparing.strEqual(templateName, FILE_HEADER_TEMPLATE_NAME);
}
return false;
}
private static boolean isInternalTemplateName(final String templateName) {
for(InternalTemplateBean bean: Extensions.getExtensions(InternalTemplateBean.EP_NAME)) {
if (Comparing.strEqual(templateName, bean.name)) {
return true;
}
}
return false;
}
private void initLists() {
FileTemplatesScheme scheme = myManager.getCurrentScheme();
myManager.setCurrentScheme(myScheme);
myTemplatesList.init(getTemplates(DEFAULT_TEMPLATES_CATEGORY));
myIncludesList.init(getTemplates(INCLUDES_TEMPLATES_CATEGORY));
myCodeTemplatesList.init(getTemplates(CODE_TEMPLATES_CATEGORY));
if (myOtherTemplatesList != null) {
myOtherTemplatesList.init(getTemplates(J2EE_TEMPLATES_CATEGORY));
}
myManager.setCurrentScheme(scheme);
}
private FileTemplate[] getTemplates(String category) {
Map<String, FileTemplate[]> templates = myChangesCache.get(myScheme);
if (templates == null) {
return myManager.getTemplates(category);
}
else {
return templates.get(category);
}
}
@Override
public boolean isModified() {
return myScheme != myManager.getCurrentScheme() || !myChangesCache.isEmpty() || isSchemeModified();
}
private boolean isSchemeModified() {
return myModified || myEditor != null && myEditor.isModified();
}
private void checkCanApply(FileTemplateTab list) throws ConfigurationException {
final FileTemplate[] templates = myCurrentTab.getTemplates();
final List<String> allNames = new ArrayList<String>();
FileTemplate itemWithError = null;
boolean errorInName = true;
String errorString = null;
for (FileTemplate template : templates) {
final String currName = template.getName();
final String currExt = template.getExtension();
if (currName.length() == 0) {
itemWithError = template;
errorString = IdeBundle.message("error.please.specify.template.name");
break;
}
if (allNames.contains(currName)) {
itemWithError = template;
errorString = "Template with name \'" + currName + "\' already exists. Please specify a different template name";
break;
}
if (currExt.length() == 0) {
itemWithError = template;
errorString = IdeBundle.message("error.please.specify.template.extension");
errorInName = false;
break;
}
allNames.add(currName);
}
if (itemWithError != null) {
final boolean _errorInName = errorInName;
myTabbedPane.setSelectedIndex(Arrays.asList(myTabs).indexOf(list));
selectTemplate(itemWithError);
list.selectTemplate(itemWithError);
ApplicationManager.getApplication().invokeLater(new Runnable() {
@Override
public void run() {
if (_errorInName) {
myEditor.focusToNameField();
}
else {
myEditor.focusToExtensionField();
}
}
});
throw new ConfigurationException(errorString);
}
}
private void fireListChanged() {
if (myCurrentTab != null) {
myCurrentTab.fireDataChanged();
}
if (myMainPanel != null) {
myMainPanel.revalidate();
}
}
@Override
public void apply() throws ConfigurationException {
if (myEditor != null && myEditor.isModified()) {
myModified = true;
myEditor.apply();
}
for (FileTemplateTab list : myTabs) {
checkCanApply(list);
}
updateCache();
for (Map.Entry<FileTemplatesScheme, Map<String, FileTemplate[]>> entry : myChangesCache.entrySet()) {
myManager.setCurrentScheme(entry.getKey());
myManager.setTemplates(DEFAULT_TEMPLATES_CATEGORY, Arrays.asList(entry.getValue().get(DEFAULT_TEMPLATES_CATEGORY)));
myManager.setTemplates(INTERNAL_TEMPLATES_CATEGORY, Arrays.asList(entry.getValue().get(INTERNAL_TEMPLATES_CATEGORY)));
myManager.setTemplates(INCLUDES_TEMPLATES_CATEGORY, Arrays.asList(entry.getValue().get(INCLUDES_TEMPLATES_CATEGORY)));
myManager.setTemplates(CODE_TEMPLATES_CATEGORY, Arrays.asList(entry.getValue().get(CODE_TEMPLATES_CATEGORY)));
myManager.setTemplates(J2EE_TEMPLATES_CATEGORY, Arrays.asList(entry.getValue().get(J2EE_TEMPLATES_CATEGORY)));
}
myChangesCache.clear();
myManager.setCurrentScheme(myScheme);
if (myEditor != null) {
myModified = false;
fireListChanged();
}
}
public void selectTemplatesTab() {
selectTab(TEMPLATES_TITLE);
}
private boolean selectTab(String tabName) {
int idx = 0;
for (FileTemplateTab tab : myTabs) {
if (Comparing.strEqual(tab.getTitle(), tabName)) {
myCurrentTab = tab;
myTabbedPane.setSelectedIndex(idx);
return true;
}
idx++;
}
return false;
}
@Override
public void reset() {
myEditor.reset();
changeScheme(myManager.getCurrentScheme());
myChangesCache.clear();
myModified = false;
}
@Override
public void disposeUIResources() {
if (myCurrentTab != null) {
final PropertiesComponent propertiesComponent = PropertiesComponent.getInstance();
propertiesComponent.setValue(CURRENT_TAB, myCurrentTab.getTitle());
final FileTemplate template = myCurrentTab.getSelectedTemplate();
if (template != null) {
propertiesComponent.setValue(SELECTED_TEMPLATE, template.getName());
}
}
if (myEditor != null) {
myEditor.disposeUIResources();
myEditor = null;
myEditorComponent = null;
}
myMainPanel = null;
if (myUIDisposable != null) {
Disposer.dispose(myUIDisposable);
myUIDisposable = null;
}
myTabbedPane = null;
myToolBar = null;
myTabs = null;
myCurrentTab = null;
myTemplatesList = null;
myCodeTemplatesList = null;
myIncludesList = null;
myOtherTemplatesList = null;
}
public FileTemplate createNewTemplate(@NotNull String preferredName, @NotNull String extension, @NotNull String text) {
return createTemplate(preferredName, extension, text);
}
@Override
@NotNull
public String getId() {
return "fileTemplates";
}
@Override
@Nullable
public Runnable enableSearch(String option) {
return null;
}
public static void editCodeTemplate(@NotNull final String templateId, Project project) {
final ShowSettingsUtil util = ShowSettingsUtil.getInstance();
final AllFileTemplatesConfigurable configurable = new AllFileTemplatesConfigurable(project);
util.editConfigurable(project, configurable, new Runnable() {
@Override
public void run() {
configurable.myTabbedPane.setSelectedIndex(ArrayUtil.indexOf(configurable.myTabs, configurable.myCodeTemplatesList));
for (FileTemplate template : configurable.myCodeTemplatesList.getTemplates()) {
if (Comparing.equal(templateId, template.getName())) {
configurable.myCodeTemplatesList.selectTemplate(template);
break;
}
}
}
});
}
public void changeScheme(FileTemplatesScheme scheme) {
if (myEditor != null && myEditor.isModified()) {
myModified = true;
try {
myEditor.apply();
}
catch (ConfigurationException e) {
Messages.showErrorDialog(myEditorComponent, e.getMessage(), e.getTitle());
return;
}
}
updateCache();
myScheme = scheme;
initLists();
}
@SuppressWarnings("ToArrayCallWithZeroLengthArrayArgument")
private void updateCache() {
if (isSchemeModified()) {
if (!myChangesCache.containsKey(myScheme)) {
Map<String, FileTemplate[]> templates = new HashMap<String, FileTemplate[]>();
FileTemplate[] allTemplates = myTemplatesList.getTemplates();
templates.put(DEFAULT_TEMPLATES_CATEGORY, ContainerUtil.filter(allTemplates, new Condition<FileTemplate>() {
@Override
public boolean value(FileTemplate template) {
return !myInternalTemplateNames.contains(template.getName());
}
}).toArray(FileTemplate.EMPTY_ARRAY));
templates.put(INTERNAL_TEMPLATES_CATEGORY, ContainerUtil.filter(allTemplates, new Condition<FileTemplate>() {
@Override
public boolean value(FileTemplate template) {
return myInternalTemplateNames.contains(template.getName());
}
}).toArray(FileTemplate.EMPTY_ARRAY));
templates.put(INCLUDES_TEMPLATES_CATEGORY, myIncludesList.getTemplates());
templates.put(CODE_TEMPLATES_CATEGORY, myCodeTemplatesList.getTemplates());
templates.put(J2EE_TEMPLATES_CATEGORY, myOtherTemplatesList == null ? FileTemplate.EMPTY_ARRAY : myOtherTemplatesList.getTemplates());
myChangesCache.put(myScheme, templates);
}
}
}
public FileTemplateManager getManager() {
return myManager;
}
public FileTemplatesScheme getCurrentScheme() {
return myScheme;
}
@TestOnly
FileTemplateConfigurable getEditor() {
return myEditor;
}
@TestOnly
FileTemplateTab[] getTabs() {
return myTabs;
}
}
| |
/*
* Licensed to Metamarkets Group Inc. (Metamarkets) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. Metamarkets licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package io.druid.client;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import com.google.common.collect.Ordering;
import com.google.inject.Inject;
import com.metamx.common.logger.Logger;
import com.metamx.emitter.service.ServiceEmitter;
import com.metamx.http.client.HttpClient;
import io.druid.client.selector.QueryableDruidServer;
import io.druid.client.selector.ServerSelector;
import io.druid.client.selector.TierSelectorStrategy;
import io.druid.concurrent.Execs;
import io.druid.guice.annotations.Client;
import io.druid.guice.annotations.Smile;
import io.druid.query.DataSource;
import io.druid.query.QueryRunner;
import io.druid.query.QueryToolChestWarehouse;
import io.druid.query.QueryWatcher;
import io.druid.server.coordination.DruidServerMetadata;
import io.druid.timeline.DataSegment;
import io.druid.timeline.VersionedIntervalTimeline;
import io.druid.timeline.partition.PartitionChunk;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.Executor;
import java.util.concurrent.ExecutorService;
/**
*/
public class BrokerServerView implements TimelineServerView
{
private static final Logger log = new Logger(BrokerServerView.class);
private final Object lock = new Object();
private final ConcurrentMap<String, QueryableDruidServer> clients;
private final Map<String, ServerSelector> selectors;
private final Map<String, VersionedIntervalTimeline<String, ServerSelector>> timelines;
private final QueryToolChestWarehouse warehouse;
private final QueryWatcher queryWatcher;
private final ObjectMapper smileMapper;
private final HttpClient httpClient;
private final ServerInventoryView baseView;
private final TierSelectorStrategy tierSelectorStrategy;
private final ServiceEmitter emitter;
private volatile boolean initialized = false;
@Inject
public BrokerServerView(
QueryToolChestWarehouse warehouse,
QueryWatcher queryWatcher,
@Smile ObjectMapper smileMapper,
@Client HttpClient httpClient,
ServerInventoryView baseView,
TierSelectorStrategy tierSelectorStrategy,
ServiceEmitter emitter
)
{
this.warehouse = warehouse;
this.queryWatcher = queryWatcher;
this.smileMapper = smileMapper;
this.httpClient = httpClient;
this.baseView = baseView;
this.tierSelectorStrategy = tierSelectorStrategy;
this.emitter = emitter;
this.clients = Maps.newConcurrentMap();
this.selectors = Maps.newHashMap();
this.timelines = Maps.newHashMap();
ExecutorService exec = Execs.singleThreaded("BrokerServerView-%s");
baseView.registerSegmentCallback(
exec,
new ServerView.SegmentCallback()
{
@Override
public ServerView.CallbackAction segmentAdded(DruidServerMetadata server, DataSegment segment)
{
serverAddedSegment(server, segment);
return ServerView.CallbackAction.CONTINUE;
}
@Override
public ServerView.CallbackAction segmentRemoved(final DruidServerMetadata server, DataSegment segment)
{
serverRemovedSegment(server, segment);
return ServerView.CallbackAction.CONTINUE;
}
@Override
public CallbackAction segmentViewInitialized()
{
initialized = true;
return ServerView.CallbackAction.CONTINUE;
}
}
);
baseView.registerServerCallback(
exec,
new ServerView.ServerCallback()
{
@Override
public ServerView.CallbackAction serverRemoved(DruidServer server)
{
removeServer(server);
return ServerView.CallbackAction.CONTINUE;
}
}
);
}
public boolean isInitialized()
{
return initialized;
}
public void clear()
{
synchronized (lock) {
final Iterator<String> clientsIter = clients.keySet().iterator();
while (clientsIter.hasNext()) {
clientsIter.remove();
}
timelines.clear();
final Iterator<ServerSelector> selectorsIter = selectors.values().iterator();
while (selectorsIter.hasNext()) {
final ServerSelector selector = selectorsIter.next();
selectorsIter.remove();
while (!selector.isEmpty()) {
final QueryableDruidServer pick = selector.pick();
selector.removeServer(pick);
}
}
}
}
private QueryableDruidServer addServer(DruidServer server)
{
QueryableDruidServer retVal = new QueryableDruidServer(server, makeDirectClient(server));
QueryableDruidServer exists = clients.put(server.getName(), retVal);
if (exists != null) {
log.warn("QueryRunner for server[%s] already existed!? Well it's getting replaced", server);
}
return retVal;
}
private DirectDruidClient makeDirectClient(DruidServer server)
{
return new DirectDruidClient(warehouse, queryWatcher, smileMapper, httpClient, server.getHost(), emitter);
}
private QueryableDruidServer removeServer(DruidServer server)
{
for (DataSegment segment : server.getSegments().values()) {
serverRemovedSegment(server.getMetadata(), segment);
}
return clients.remove(server.getName());
}
private void serverAddedSegment(final DruidServerMetadata server, final DataSegment segment)
{
String segmentId = segment.getIdentifier();
synchronized (lock) {
log.debug("Adding segment[%s] for server[%s]", segment, server);
ServerSelector selector = selectors.get(segmentId);
if (selector == null) {
selector = new ServerSelector(segment, tierSelectorStrategy);
VersionedIntervalTimeline<String, ServerSelector> timeline = timelines.get(segment.getDataSource());
if (timeline == null) {
timeline = new VersionedIntervalTimeline<>(Ordering.natural());
timelines.put(segment.getDataSource(), timeline);
}
timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(selector));
selectors.put(segmentId, selector);
}
QueryableDruidServer queryableDruidServer = clients.get(server.getName());
if (queryableDruidServer == null) {
queryableDruidServer = addServer(baseView.getInventoryValue(server.getName()));
}
selector.addServerAndUpdateSegment(queryableDruidServer, segment);
}
}
private void serverRemovedSegment(DruidServerMetadata server, DataSegment segment)
{
String segmentId = segment.getIdentifier();
final ServerSelector selector;
synchronized (lock) {
log.debug("Removing segment[%s] from server[%s].", segmentId, server);
selector = selectors.get(segmentId);
if (selector == null) {
log.warn("Told to remove non-existant segment[%s]", segmentId);
return;
}
QueryableDruidServer queryableDruidServer = clients.get(server.getName());
if (!selector.removeServer(queryableDruidServer)) {
log.warn(
"Asked to disassociate non-existant association between server[%s] and segment[%s]",
server,
segmentId
);
}
if (selector.isEmpty()) {
VersionedIntervalTimeline<String, ServerSelector> timeline = timelines.get(segment.getDataSource());
selectors.remove(segmentId);
final PartitionChunk<ServerSelector> removedPartition = timeline.remove(
segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(selector)
);
if (removedPartition == null) {
log.warn(
"Asked to remove timeline entry[interval: %s, version: %s] that doesn't exist",
segment.getInterval(),
segment.getVersion()
);
}
}
}
}
@Override
public VersionedIntervalTimeline<String, ServerSelector> getTimeline(DataSource dataSource)
{
String table = Iterables.getOnlyElement(dataSource.getNames());
synchronized (lock) {
return timelines.get(table);
}
}
@Override
public <T> QueryRunner<T> getQueryRunner(DruidServer server)
{
synchronized (lock) {
QueryableDruidServer queryableDruidServer = clients.get(server.getName());
if (queryableDruidServer == null) {
log.error("WTF?! No QueryableDruidServer found for %s", server.getName());
return null;
}
return queryableDruidServer.getClient();
}
}
@Override
public void registerServerCallback(Executor exec, ServerCallback callback)
{
baseView.registerServerCallback(exec, callback);
}
@Override
public void registerSegmentCallback(Executor exec, SegmentCallback callback)
{
baseView.registerSegmentCallback(exec, callback);
}
}
| |
/**
* Copyright Pravega Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.pravega.client.admin.impl;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import io.pravega.client.ClientConfig;
import io.pravega.client.admin.KeyValueTableInfo;
import io.pravega.client.admin.StreamInfo;
import io.pravega.client.admin.StreamManager;
import io.pravega.client.connection.impl.ConnectionPool;
import io.pravega.client.connection.impl.ConnectionPoolImpl;
import io.pravega.client.connection.impl.SocketConnectionFactoryImpl;
import io.pravega.client.control.impl.Controller;
import io.pravega.client.control.impl.ControllerFailureException;
import io.pravega.client.control.impl.ControllerImpl;
import io.pravega.client.control.impl.ControllerImplConfig;
import io.pravega.client.stream.DeleteScopeFailedException;
import io.pravega.client.stream.InvalidStreamException;
import io.pravega.client.stream.ReaderGroupNotFoundException;
import io.pravega.client.stream.Stream;
import io.pravega.client.stream.StreamConfiguration;
import io.pravega.client.stream.StreamCut;
import io.pravega.client.stream.impl.StreamCutImpl;
import io.pravega.common.Exceptions;
import io.pravega.common.concurrent.Futures;
import io.pravega.common.function.Callbacks;
import io.pravega.common.util.AsyncIterator;
import io.pravega.shared.NameUtils;
import lombok.extern.slf4j.Slf4j;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CompletableFuture;
import lombok.Getter;
import lombok.AccessLevel;
import static io.pravega.shared.NameUtils.READER_GROUP_STREAM_PREFIX;
/**
* A stream manager. Used to bootstrap the client.
*/
@Slf4j
public class StreamManagerImpl implements StreamManager {
private final Controller controller;
@VisibleForTesting
@Getter(AccessLevel.PACKAGE)
private final ConnectionPool connectionPool;
private final StreamCutHelper streamCutHelper;
public StreamManagerImpl(ClientConfig clientConfig) {
this(clientConfig, ControllerImplConfig.builder().clientConfig(clientConfig).build());
}
@VisibleForTesting
public StreamManagerImpl(ClientConfig clientConfig, ControllerImplConfig controllerConfig) {
this(controllerConfig, new ConnectionPoolImpl(clientConfig, new SocketConnectionFactoryImpl(clientConfig)));
}
private StreamManagerImpl(ControllerImplConfig controllerConfig, ConnectionPool connectionPool) {
this(new ControllerImpl(controllerConfig, connectionPool.getInternalExecutor()), connectionPool);
}
@VisibleForTesting
public StreamManagerImpl(Controller controller, ConnectionPool connectionPool) {
this.connectionPool = connectionPool;
this.controller = controller;
this.streamCutHelper = new StreamCutHelper(controller, connectionPool);
}
@Override
public boolean createStream(String scopeName, String streamName, StreamConfiguration config) {
NameUtils.validateUserStreamName(streamName);
NameUtils.validateUserScopeName(scopeName);
log.info("Creating scope/stream: {}/{} with configuration: {}", scopeName, streamName, config);
return Futures.getThrowingException(controller.createStream(scopeName, streamName, StreamConfiguration.builder()
.scalingPolicy(config.getScalingPolicy())
.retentionPolicy(config.getRetentionPolicy())
.tags(config.getTags())
.build()));
}
@Override
public boolean updateStream(String scopeName, String streamName, StreamConfiguration config) {
NameUtils.validateUserStreamName(streamName);
NameUtils.validateUserScopeName(scopeName);
log.info("Updating scope/stream: {}/{} with configuration: {}", scopeName, streamName, config);
return Futures.getThrowingException(controller.updateStream(scopeName, streamName,
StreamConfiguration.builder()
.scalingPolicy(config.getScalingPolicy())
.retentionPolicy(config.getRetentionPolicy())
.tags(config.getTags())
.build()));
}
@Override
public boolean truncateStream(String scopeName, String streamName, StreamCut streamCut) {
NameUtils.validateUserStreamName(streamName);
NameUtils.validateUserScopeName(scopeName);
Preconditions.checkNotNull(streamCut);
log.info("Truncating scope/stream: {}/{} with stream cut: {}", scopeName, streamName, streamCut);
return Futures.getThrowingException(controller.truncateStream(scopeName, streamName, streamCut));
}
@Override
public boolean sealStream(String scopeName, String streamName) {
NameUtils.validateUserStreamName(streamName);
NameUtils.validateUserScopeName(scopeName);
log.info("Sealing scope/stream: {}/{}", scopeName, streamName);
return Futures.getThrowingException(controller.sealStream(scopeName, streamName));
}
@Override
public boolean deleteStream(String scopeName, String streamName) {
NameUtils.validateUserStreamName(streamName);
NameUtils.validateUserScopeName(scopeName);
log.info("Deleting scope/stream: {}/{}", scopeName, streamName);
return Futures.getThrowingException(controller.deleteStream(scopeName, streamName));
}
@Override
public Iterator<String> listScopes() {
log.info("Listing scopes");
AsyncIterator<String> asyncIterator = controller.listScopes();
return asyncIterator.asIterator();
}
@Override
public boolean createScope(String scopeName) {
NameUtils.validateUserScopeName(scopeName);
log.info("Creating scope: {}", scopeName);
return Futures.getThrowingException(controller.createScope(scopeName));
}
@Override
public boolean checkScopeExists(String scopeName) {
log.info("Checking if scope {} exists", scopeName);
return Futures.getThrowingException(controller.checkScopeExists(scopeName));
}
@Override
public Iterator<Stream> listStreams(String scopeName) {
NameUtils.validateUserScopeName(scopeName);
log.info("Listing streams in scope: {}", scopeName);
AsyncIterator<Stream> asyncIterator = controller.listStreams(scopeName);
return asyncIterator.asIterator();
}
@Override
public Iterator<Stream> listStreams(String scopeName, String tagName) {
NameUtils.validateUserScopeName(scopeName);
log.info("Listing streams in scope: {} which has tag: {}", scopeName, tagName);
AsyncIterator<Stream> asyncIterator = controller.listStreamsForTag(scopeName, tagName);
return asyncIterator.asIterator();
}
@Override
public Collection<String> getStreamTags(String scopeName, String streamName) {
NameUtils.validateUserScopeName(scopeName);
NameUtils.validateUserStreamName(streamName);
log.info("Fetching tags associated with stream: {}/{}", scopeName, streamName);
return Futures.getThrowingException(controller.getStreamConfiguration(scopeName, streamName)
.thenApply(StreamConfiguration::getTags));
}
@Override
public boolean checkStreamExists(String scopeName, String streamName) {
log.info("Checking if stream {} exists in scope {}", streamName, scopeName);
return Futures.getThrowingException(controller.checkStreamExists(scopeName, streamName));
}
@Override
public boolean deleteScopeRecursive(String scopeName) {
NameUtils.validateUserScopeName(scopeName);
log.info("Deleting scope recursively: {}", scopeName);
return Futures.getThrowingException(controller.deleteScopeRecursive(scopeName));
}
@Override
public boolean deleteScope(String scopeName) {
NameUtils.validateUserScopeName(scopeName);
log.info("Deleting scope: {}", scopeName);
return Futures.getThrowingException(controller.deleteScope(scopeName));
}
/**
* A new API is created hence this is going to be deprecated.
*
* @deprecated As of Pravega release 0.11.0, replaced by {@link #deleteScopeRecursive(String)}.
*/
@Override
@Deprecated
public boolean deleteScope(String scopeName, boolean forceDelete) throws DeleteScopeFailedException {
NameUtils.validateUserScopeName(scopeName);
if (forceDelete) {
log.info("Deleting scope recursively: {}", scopeName);
List<String> readerGroupList = new ArrayList<>();
Iterator<Stream> iterator = listStreams(scopeName);
while (iterator.hasNext()) {
Stream stream = iterator.next();
if (stream.getStreamName().startsWith(READER_GROUP_STREAM_PREFIX)) {
readerGroupList.add(stream.getStreamName().substring(
READER_GROUP_STREAM_PREFIX.length()));
}
try {
Futures.getThrowingException(Futures.exceptionallyExpecting(controller.sealStream(stream.getScope(), stream.getStreamName()),
e -> {
Throwable unwrap = Exceptions.unwrap(e);
// If the stream was removed by another request while we attempted to seal it, we could get InvalidStreamException.
// ignore failures if the stream doesn't exist or we are unable to seal it.
return unwrap instanceof InvalidStreamException || unwrap instanceof ControllerFailureException;
}, false).thenCompose(sealed -> controller.deleteStream(stream.getScope(), stream.getStreamName())));
} catch (Exception e) {
String message = String.format("Failed to seal and delete stream %s", stream.getStreamName());
throw new DeleteScopeFailedException(message, e);
}
}
Iterator<KeyValueTableInfo> kvtIterator = controller.listKeyValueTables(scopeName).asIterator();
while (kvtIterator.hasNext()) {
KeyValueTableInfo kvt = kvtIterator.next();
try {
Futures.getThrowingException(controller.deleteKeyValueTable(scopeName, kvt.getKeyValueTableName()));
} catch (Exception e) {
String message = String.format("Failed to delete key-value table %s", kvt.getKeyValueTableName());
throw new DeleteScopeFailedException(message, e);
}
}
for (String groupName: readerGroupList) {
try {
Futures.getThrowingException(controller.getReaderGroupConfig(scopeName, groupName)
.thenCompose(conf -> controller.deleteReaderGroup(scopeName, groupName,
conf.getReaderGroupId())));
} catch (Exception e) {
if (Exceptions.unwrap(e) instanceof ReaderGroupNotFoundException) {
continue;
}
String message = String.format("Failed to delete reader group %s", groupName);
throw new DeleteScopeFailedException(message, e);
}
}
}
return Futures.getThrowingException(controller.deleteScope(scopeName));
}
@Override
public StreamInfo getStreamInfo(String scopeName, String streamName) {
NameUtils.validateUserStreamName(streamName);
NameUtils.validateUserScopeName(scopeName);
log.info("Fetching StreamInfo for scope/stream: {}/{}", scopeName, streamName);
return Futures.getThrowingException(getStreamInfo(Stream.of(scopeName, streamName)));
}
/**
* Fetch the {@link StreamInfo} for a given stream.
*
* @param stream The Stream.
* @return A future representing {@link StreamInfo}.
*/
private CompletableFuture<StreamInfo> getStreamInfo(final Stream stream) {
// Fetch the stream configuration which includes the tags associated with the stream.
CompletableFuture<StreamConfiguration> streamConfiguration = controller.getStreamConfiguration(stream.getScope(), stream.getStreamName());
// Fetch the stream cut representing the current TAIL and current HEAD of the stream.
CompletableFuture<StreamCut> currentTailStreamCut = streamCutHelper.fetchTailStreamCut(stream);
CompletableFuture<StreamCut> currentHeadStreamCut = streamCutHelper.fetchHeadStreamCut(stream);
return CompletableFuture.allOf(streamConfiguration, currentHeadStreamCut, currentTailStreamCut)
.thenApply(v -> {
boolean isSealed = ((StreamCutImpl) currentTailStreamCut.join()).getPositions().isEmpty();
return new StreamInfo(stream.getScope(), stream.getStreamName(), streamConfiguration.join(),
currentTailStreamCut.join(), currentHeadStreamCut.join(), isSealed);
});
}
@Override
public void close() {
if (this.controller != null) {
Callbacks.invokeSafely(this.controller::close, ex -> log.error("Unable to close Controller client.", ex));
}
if (this.connectionPool != null) {
this.connectionPool.close();
}
}
}
| |
/*
* #%L
* This file is part of a universal JDBC Connection factory.
* %%
* Copyright (C) 2014 - 2016 Michael Beiter <michael@beiter.org>
* %%
* All rights reserved.
* .
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the copyright holder nor the names of the
* contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
* .
* .
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
* #L%
*/
package org.beiter.michael.db;
import org.beiter.michael.db.propsbuilder.MapBasedConnPropsBuilder;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.sql.Connection;
import java.sql.SQLException;
import static org.hamcrest.Matchers.*;
import static org.junit.Assert.assertThat;
public class ConnectionFactoryDirectTest {
private static final Logger LOG = LoggerFactory.getLogger(ConnectionFactory.class);
private static final String DRIVER = H2Server.DRIVER;
private static final String URL = H2Server.URL;
private static final String USER = H2Server.USER;
private static final String PASSWORD = H2Server.PASSWORD;
private static final int POOL_MAX_CONNECTIONS = 2;
/**
* Start the in-memory database server
*
* @throws SQLException When the startup fails
*/
@BeforeClass
public static void startDbServer()
throws SQLException {
H2Server.start();
}
/**
* Stops the in-memory database server
*/
@AfterClass
public static void stopDbServer() {
H2Server.stop();
}
/**
* Initialize the database with a default database schema + values
*
* @throws SQLException When the initialization fails
*/
@Before
public void initDatabase()
throws SQLException {
H2Server.init();
ConnectionFactory.reset();
}
/**
* Test that the direct factory method does not accept a null driver
*
* @throws NullPointerException When a null driver is provided
*/
@Test(expected = NullPointerException.class)
public void directConstructorNullDriverTest() {
// Using the properties builder instead of
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver(null);
connProps.setUrl(URL);
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
try {
ConnectionFactory.getConnection(connProps);
} catch (FactoryException e) {
AssertionError ae = new AssertionError("Factory error");
ae.initCause(e);
throw ae;
}
}
/**
* Test that the direct factory method does not accept a null url
*
* @throws NullPointerException When a null connection spec is provided
*/
@Test(expected = NullPointerException.class)
public void directConstructorNullConnSpecTest() {
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver(DRIVER);
connProps.setUrl(null);
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
try {
ConnectionFactory.getConnection(connProps);
} catch (FactoryException e) {
AssertionError ae = new AssertionError("Factory error");
ae.initCause(e);
throw ae;
}
}
/**
* Test that the direct factory method does not accept a null pool spec
*
* @throws NullPointerException When a null connection pool spec is provided
*/
@Test(expected = NullPointerException.class)
public void directConstructorNullconnPropsTest() {
ConnectionProperties connProps = null;
try {
ConnectionFactory.getConnection(connProps);
} catch (FactoryException e) {
AssertionError ae = new AssertionError("Factory error");
ae.initCause(e);
throw ae;
}
}
/**
* Test that the direct factory method does not accept an empty driver
*
* @throws IllegalArgumentException When an empty / blank driver is provided
*/
@Test(expected = IllegalArgumentException.class)
public void directConstructorEmptyDriverTest() {
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver("");
connProps.setUrl(URL);
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
try {
ConnectionFactory.getConnection(connProps);
} catch (FactoryException e) {
AssertionError ae = new AssertionError("Factory error");
ae.initCause(e);
throw ae;
}
}
/**
* Test that the direct factory method does not accept an empty url
*
* @throws IllegalArgumentException When an empty / blank url is provided
*/
@Test(expected = IllegalArgumentException.class)
public void directConstructorEmptyUrlTest() {
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver(DRIVER);
connProps.setUrl("");
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
try {
ConnectionFactory.getConnection(connProps);
} catch (FactoryException e) {
AssertionError ae = new AssertionError("Factory error");
ae.initCause(e);
throw ae;
}
}
/**
* Test that the direct factory throws an exception when an illegal (i.e. non-existing) driver is provided
*
* @throws FactoryException When a driver is provided that does not exist in the class path
*/
@Test(expected = FactoryException.class)
public void directConstructorIllegalDriverTest()
throws FactoryException {
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver("IllegalDriver");
connProps.setUrl(URL);
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
ConnectionFactory.getConnection(connProps);
}
/**
* Test that the direct factory method returns a connection
*/
@Test
public void directConstructorConnectionTest() {
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver(DRIVER);
connProps.setUrl(URL);
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
try {
Connection con = ConnectionFactory.getConnection(connProps);
String error = "The DB connection is null";
assertThat(error, con, notNullValue());
con.close();
} catch (FactoryException e) {
AssertionError ae = new AssertionError("Factory error");
ae.initCause(e);
throw ae;
} catch (SQLException e) {
AssertionError ae = new AssertionError("Error closing the connection");
ae.initCause(e);
throw ae;
}
}
/**
* Test that the direct factory method returns two different connections
* if called multiple times with the same pool properties
*/
@Test
public void directConstructorMultipleConnectionTest() {
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver(DRIVER);
connProps.setUrl(URL);
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
connProps.setMaxTotal(POOL_MAX_CONNECTIONS);
connProps.setMaxWaitMillis(0); // fail with an exception if no connections are available in the pool
try {
Connection con1 = ConnectionFactory.getConnection(connProps);
Connection con2 = ConnectionFactory.getConnection(connProps);
String error = "The DB connection 1 is null";
assertThat(error, con1, notNullValue());
error = "The DB connection 2 is null";
assertThat(error, con2, notNullValue());
error = "The DB connection 2 is same instance as DB connection 1";
assertThat(error, con2, is(not(sameInstance(con1))));
con1.close();
con2.close();
} catch (FactoryException e) {
AssertionError ae = new AssertionError("Factory error");
ae.initCause(e);
throw ae;
} catch (SQLException e) {
AssertionError ae = new AssertionError("Error closing the connection");
ae.initCause(e);
throw ae;
}
}
/**
* Test that the direct factory method does not return more connections than are available in the pool
* <p>
* Note that this test is prone to resource leaks und certain conditions, which result in the borrowed connections
* not being properly returned to the pool. This is still okay for the unit tests, because the in-memory DB server
* used for the tests is destroyed after the tests are complete.
*
* @throws FactoryException When the instantiation of the connections does not work (expected)
* @throws SQLException When the connections cannot be closed
*/
@Test(expected = FactoryException.class)
public void directConstructorMultipleConnectionExhaustedPoolTest()
throws FactoryException, SQLException {
ConnectionProperties connProps = MapBasedConnPropsBuilder.buildDefault();
connProps.setDriver(DRIVER);
connProps.setUrl(URL);
connProps.setUsername(USER);
connProps.setPassword(PASSWORD);
connProps.setMaxTotal(POOL_MAX_CONNECTIONS);
connProps.setMaxWaitMillis(0); // fail with an exception if no connections are available in the pool
Connection con1 = null;
Connection con2 = null;
Connection con3 = null;
try {
con1 = ConnectionFactory.getConnection(connProps);
con2 = ConnectionFactory.getConnection(connProps);
String error = "The DB connection 1 is null";
assertThat(error, con1, notNullValue());
error = "The DB connection 2 is null";
assertThat(error, con2, notNullValue());
error = "The DB connection 2 is same instance as DB connection 1";
assertThat(error, con2, is(not(sameInstance(con1))));
// the pool supports only 2 connections (see JNDI_MAX_CONNECTIONS)
// borrowing a third connection will result in a FactoryException because the pool is exhausted
con3 = ConnectionFactory.getConnection(connProps);
} finally {
if (con1 != null) {
LOG.debug("closing connection 'con1'");
con1.close();
LOG.debug("'con1' has been closed");
}
if (con2 != null) {
LOG.debug("closing connection 'con2'");
con2.close();
LOG.debug("'con2' has been closed");
}
if (con3 != null) {
LOG.debug("closing connection 'con3'");
con3.close();
LOG.debug("'con3' has been closed");
}
}
}
}
| |
// Copyright (C) 2015 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.acceptance.api.change;
import static com.google.common.truth.Truth.assertThat;
import static com.google.gerrit.extensions.client.SubmitType.CHERRY_PICK;
import static com.google.gerrit.extensions.client.SubmitType.FAST_FORWARD_ONLY;
import static com.google.gerrit.extensions.client.SubmitType.MERGE_ALWAYS;
import static com.google.gerrit.extensions.client.SubmitType.MERGE_IF_NECESSARY;
import static com.google.gerrit.extensions.client.SubmitType.REBASE_IF_NECESSARY;
import static org.junit.Assert.fail;
import com.google.common.collect.ImmutableList;
import com.google.gerrit.acceptance.AbstractDaemonTest;
import com.google.gerrit.acceptance.NoHttpd;
import com.google.gerrit.acceptance.PushOneCommit;
import com.google.gerrit.extensions.api.changes.ReviewInput;
import com.google.gerrit.extensions.api.projects.BranchInput;
import com.google.gerrit.extensions.client.SubmitType;
import com.google.gerrit.extensions.common.TestSubmitRuleInput;
import com.google.gerrit.extensions.restapi.ResourceConflictException;
import com.google.gerrit.extensions.restapi.RestApiException;
import com.google.gerrit.reviewdb.client.Change;
import com.google.gerrit.reviewdb.client.RefNames;
import com.google.gerrit.server.git.MetaDataUpdate;
import com.google.gerrit.server.git.VersionedMetaData;
import com.google.gerrit.testutil.ConfigSuite;
import org.eclipse.jgit.api.Git;
import org.eclipse.jgit.errors.ConfigInvalidException;
import org.eclipse.jgit.lib.CommitBuilder;
import org.eclipse.jgit.lib.Config;
import org.eclipse.jgit.lib.ObjectId;
import org.eclipse.jgit.lib.Repository;
import org.eclipse.jgit.revwalk.RevCommit;
import org.junit.Before;
import org.junit.Test;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
@NoHttpd
public class SubmitTypeRuleIT extends AbstractDaemonTest {
@ConfigSuite.Default
public static Config submitWholeTopicEnabled() {
return submitWholeTopicEnabledConfig();
}
private class RulesPl extends VersionedMetaData {
private static final String FILENAME = "rules.pl";
private String rule;
@Override
protected String getRefName() {
return RefNames.REFS_CONFIG;
}
@Override
protected void onLoad() throws IOException, ConfigInvalidException {
rule = readUTF8(FILENAME);
}
@Override
protected boolean onSave(CommitBuilder commit)
throws IOException, ConfigInvalidException {
TestSubmitRuleInput in = new TestSubmitRuleInput();
in.rule = rule;
try {
gApi.changes().id(testChangeId.get()).current().testSubmitType(in);
} catch (RestApiException e) {
throw new ConfigInvalidException("Invalid submit type rule", e);
}
saveUTF8(FILENAME, rule);
return true;
}
}
private AtomicInteger fileCounter;
private Change.Id testChangeId;
@Before
public void setUp() throws Exception {
fileCounter = new AtomicInteger();
gApi.projects().name(project.get()).branch("test")
.create(new BranchInput());
testChangeId = createChange("test", "test change").getChange().getId();
}
private void setRulesPl(String rule) throws Exception {
try (MetaDataUpdate md = metaDataUpdateFactory.create(project)) {
RulesPl r = new RulesPl();
r.load(md);
r.rule = rule;
r.commit(md);
}
}
private static final String SUBMIT_TYPE_FROM_SUBJECT =
"submit_type(fast_forward_only) :-"
+ "gerrit:commit_message(M),"
+ "regex_matches('.*FAST_FORWARD_ONLY.*', M),"
+ "!.\n"
+ "submit_type(merge_if_necessary) :-"
+ "gerrit:commit_message(M),"
+ "regex_matches('.*MERGE_IF_NECESSARY.*', M),"
+ "!.\n"
+ "submit_type(rebase_if_necessary) :-"
+ "gerrit:commit_message(M),"
+ "regex_matches('.*REBASE_IF_NECESSARY.*', M),"
+ "!.\n"
+ "submit_type(merge_always) :-"
+ "gerrit:commit_message(M),"
+ "regex_matches('.*MERGE_ALWAYS.*', M),"
+ "!.\n"
+ "submit_type(cherry_pick) :-"
+ "gerrit:commit_message(M),"
+ "regex_matches('.*CHERRY_PICK.*', M),"
+ "!.\n"
+ "submit_type(T) :- gerrit:project_default_submit_type(T).";
private PushOneCommit.Result createChange(String dest, String subject)
throws Exception {
PushOneCommit push = pushFactory.create(db, admin.getIdent(), testRepo,
subject, "file" + fileCounter.incrementAndGet(),
PushOneCommit.FILE_CONTENT);
PushOneCommit.Result r = push.to("refs/for/" + dest);
r.assertOkStatus();
return r;
}
@Test
public void unconditionalCherryPick() throws Exception {
PushOneCommit.Result r = createChange();
assertSubmitType(MERGE_IF_NECESSARY, r.getChangeId());
setRulesPl("submit_type(cherry_pick).");
assertSubmitType(CHERRY_PICK, r.getChangeId());
}
@Test
public void submitTypeFromSubject() throws Exception {
PushOneCommit.Result r1 = createChange("master", "Default 1");
PushOneCommit.Result r2 = createChange("master", "FAST_FORWARD_ONLY 2");
PushOneCommit.Result r3 = createChange("master", "MERGE_IF_NECESSARY 3");
PushOneCommit.Result r4 = createChange("master", "REBASE_IF_NECESSARY 4");
PushOneCommit.Result r5 = createChange("master", "MERGE_ALWAYS 5");
PushOneCommit.Result r6 = createChange("master", "CHERRY_PICK 6");
assertSubmitType(MERGE_IF_NECESSARY, r1.getChangeId());
assertSubmitType(MERGE_IF_NECESSARY, r2.getChangeId());
assertSubmitType(MERGE_IF_NECESSARY, r3.getChangeId());
assertSubmitType(MERGE_IF_NECESSARY, r4.getChangeId());
assertSubmitType(MERGE_IF_NECESSARY, r5.getChangeId());
assertSubmitType(MERGE_IF_NECESSARY, r6.getChangeId());
setRulesPl(SUBMIT_TYPE_FROM_SUBJECT);
assertSubmitType(MERGE_IF_NECESSARY, r1.getChangeId());
assertSubmitType(FAST_FORWARD_ONLY, r2.getChangeId());
assertSubmitType(MERGE_IF_NECESSARY, r3.getChangeId());
assertSubmitType(REBASE_IF_NECESSARY, r4.getChangeId());
assertSubmitType(MERGE_ALWAYS, r5.getChangeId());
assertSubmitType(CHERRY_PICK, r6.getChangeId());
}
@Test
public void submitTypeIsUsedForSubmit() throws Exception {
setRulesPl(SUBMIT_TYPE_FROM_SUBJECT);
PushOneCommit.Result r = createChange("master", "CHERRY_PICK 1");
gApi.changes().id(r.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r.getChangeId()).current().submit();
List<RevCommit> log = log("master", 1);
assertThat(log.get(0).getShortMessage()).isEqualTo("CHERRY_PICK 1");
assertThat(log.get(0).name()).isNotEqualTo(r.getCommit().name());
assertThat(log.get(0).getFullMessage())
.contains("Change-Id: " + r.getChangeId());
assertThat(log.get(0).getFullMessage()).contains("Reviewed-on: ");
}
@Test
public void mixingSubmitTypesAcrossBranchesSucceeds() throws Exception {
setRulesPl(SUBMIT_TYPE_FROM_SUBJECT);
PushOneCommit.Result r1 = createChange("master", "MERGE_IF_NECESSARY 1");
RevCommit initialCommit = r1.getCommit().getParent(0);
BranchInput bin = new BranchInput();
bin.revision = initialCommit.name();
gApi.projects().name(project.get()).branch("branch").create(bin);
testRepo.reset(initialCommit);
PushOneCommit.Result r2 = createChange("branch", "MERGE_ALWAYS 1");
gApi.changes().id(r1.getChangeId()).topic(name("topic"));
gApi.changes().id(r1.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r2.getChangeId()).topic(name("topic"));
gApi.changes().id(r2.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r2.getChangeId()).current().submit();
assertThat(log("master", 1).get(0).name()).isEqualTo(r1.getCommit().name());
List<RevCommit> branchLog = log("branch", 1);
assertThat(branchLog.get(0).getParents()).hasLength(2);
assertThat(branchLog.get(0).getParent(1).name())
.isEqualTo(r2.getCommit().name());
}
@Test
public void mixingSubmitTypesOnOneBranchFails() throws Exception {
setRulesPl(SUBMIT_TYPE_FROM_SUBJECT);
PushOneCommit.Result r1 = createChange("master", "CHERRY_PICK 1");
PushOneCommit.Result r2 = createChange("master", "MERGE_IF_NECESSARY 2");
gApi.changes().id(r1.getChangeId()).current().review(ReviewInput.approve());
gApi.changes().id(r2.getChangeId()).current().review(ReviewInput.approve());
try {
gApi.changes().id(r2.getChangeId()).current().submit();
fail("Expected ResourceConflictException");
} catch (ResourceConflictException e) {
assertThat(e).hasMessage(
"Failed to submit 2 changes due to the following problems:\n"
+ "Change " + r1.getChange().getId() + ": Change has submit type "
+ "CHERRY_PICK, but previously chose submit type MERGE_IF_NECESSARY "
+ "from change " + r2.getChange().getId() + " in the same batch");
}
}
private List<RevCommit> log(String commitish, int n) throws Exception {
try (Repository repo = repoManager.openRepository(project);
Git git = new Git(repo)) {
ObjectId id = repo.resolve(commitish);
assertThat(id).isNotNull();
return ImmutableList.copyOf(git.log().add(id).setMaxCount(n).call());
}
}
private void assertSubmitType(SubmitType expected, String id)
throws Exception {
assertThat(gApi.changes().id(id).current().submitType())
.isEqualTo(expected);
}
}
| |
/*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ide.ui.laf.darcula.ui;
import com.intellij.ide.ui.laf.darcula.DarculaUIUtil;
import com.intellij.openapi.ui.GraphicsConfig;
import com.intellij.openapi.util.IconLoader;
import com.intellij.ui.Gray;
import com.intellij.util.ui.JBInsets;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.basic.BasicTextFieldUI;
import javax.swing.text.JTextComponent;
import java.awt.*;
import java.awt.event.*;
/**
* @author Konstantin Bulenkov
*/
public class DarculaTextFieldUI extends BasicTextFieldUI {
private static final Icon SEARCH_ICON = IconLoader.findIcon("/com/intellij/ide/ui/laf/darcula/icons/search.png", DarculaTextFieldUI.class, true);
private static final Icon SEARCH_WITH_HISTORY_ICON = IconLoader.findIcon("/com/intellij/ide/ui/laf/darcula/icons/searchWithHistory.png", DarculaTextFieldUI.class, true);
private static final Icon CLEAR_ICON = IconLoader.findIcon("/com/intellij/ide/ui/laf/darcula/icons/clear.png", DarculaTextFieldUI.class, true);
private enum SearchAction {POPUP, CLEAR}
private final JTextField myTextField;
protected JLabel myClearIcon;
protected JLabel myRecentIcon;
public DarculaTextFieldUI(JTextField textField) {
myTextField = textField;
}
@SuppressWarnings("MethodOverridesStaticMethodOfSuperclass")
public static ComponentUI createUI(final JComponent c) {
final DarculaTextFieldUI ui = new DarculaTextFieldUI((JTextField)c);
c.addFocusListener(new FocusAdapter() {
@Override
public void focusGained(FocusEvent e) {
c.repaint();
}
@Override
public void focusLost(FocusEvent e) {
c.repaint();
}
});
c.addMouseMotionListener(new MouseMotionAdapter() {
@Override
public void mouseMoved(MouseEvent e) {
if (ui.getComponent() != null && isSearchField(c)) {
if (ui.getActionUnder(e) != null) {
c.setCursor(Cursor.getPredefinedCursor(Cursor.HAND_CURSOR));
} else {
c.setCursor(Cursor.getPredefinedCursor(Cursor.TEXT_CURSOR));
}
}
}
});
c.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if (isSearchField(c)) {
final SearchAction action = ui.getActionUnder(e);
if (action != null) {
switch (action) {
case POPUP:
ui.showSearchPopup();
break;
case CLEAR:
((JTextField)c).setText("");
break;
}
e.consume();
}
}
}
});
return ui;
}
protected void showSearchPopup() {
final Object value = getComponent().getClientProperty("JTextField.Search.FindPopup");
if (value instanceof JPopupMenu) {
final JPopupMenu popup = (JPopupMenu)value;
popup.show(getComponent(), getSearchIconCoord().x, getComponent().getHeight());
}
}
private SearchAction getActionUnder(MouseEvent e) {
final Point cPoint = getClearIconCoord();
final Point sPoint = getSearchIconCoord();
cPoint.x+=8;
cPoint.y+=8;
sPoint.x+=8;
sPoint.y+=8;
final Point ePoint = e.getPoint();
return cPoint.distance(ePoint) <= 8 ? SearchAction.CLEAR : sPoint.distance(ePoint) <= 8 ? SearchAction.POPUP : null;
}
protected Rectangle getDrawingRect() {
final JTextComponent c = getComponent();
final JBInsets i = JBInsets.create(c.getInsets());
final int x = i.right - 4 - 16;
final int y = i.top - 3;
final int w = c.getWidth() - i.width() + 16*2 +7*2 - 5;
int h = c.getBounds().height - i.height() + 4*2 - 3;
if (h%2==1) h++;
return new Rectangle(x, y, w, h);
}
protected Point getSearchIconCoord() {
final Rectangle r = getDrawingRect();
return new Point(r.x + 3, r.y + (r.height - 16) / 2 + 1);
}
protected Point getClearIconCoord() {
final Rectangle r = getDrawingRect();
return new Point(r.x + r.width - 16 - 1, r.y + (r.height - 16) / 2);
}
@Override
protected void paintBackground(Graphics graphics) {
Graphics2D g = (Graphics2D)graphics;
final JTextComponent c = getComponent();
final Container parent = c.getParent();
if (parent != null) {
g.setColor(parent.getBackground());
g.fillRect(0, 0, c.getWidth(), c.getHeight());
}
final GraphicsConfig config = new GraphicsConfig(g);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
final Border border = c.getBorder();
if (isSearchField(c)) {
g.setColor(c.getBackground());
final Rectangle r = getDrawingRect();
int radius = r.height-1;
g.fillRoundRect(r.x, r.y, r.width, r.height-1, radius, radius);
g.setColor(c.isEnabled() ? Gray._100 : new Color(0x535353));
if (c.hasFocus()) {
DarculaUIUtil.paintSearchFocusRing(g, r);
} else {
g.drawRoundRect(r.x, r.y, r.width, r.height-1, radius, radius);
}
Point p = getSearchIconCoord();
Icon searchIcon = getComponent().getClientProperty("JTextField.Search.FindPopup") instanceof JPopupMenu ? SEARCH_WITH_HISTORY_ICON : SEARCH_ICON;
searchIcon.paintIcon(null, g, p.x, p.y);
if (getComponent().hasFocus() && getComponent().getText().length() > 0) {
p = getClearIconCoord();
CLEAR_ICON.paintIcon(null, g, p.x, p.y);
}
} else if (border instanceof DarculaTextBorder) {
if (c.isEnabled() && c.isEditable()) {
g.setColor(c.getBackground());
}
final int width = c.getWidth();
final int height = c.getHeight();
final Insets i = border.getBorderInsets(c);
if (c.hasFocus()) {
g.fillRoundRect(i.left - 5, i.top - 2, width - i.right - i.left + 10, height - i.top - i.bottom + 6, 5, 5);
} else {
g.fillRect(i.left - 5, i.top - 2, width - i.right - i.left + 12, height - i.top - i.bottom + 6);
}
} else {
super.paintBackground(g);
}
config.restore();
}
@Override
protected void paintSafely(Graphics g) {
paintBackground(g);
super.paintSafely(g);
}
public static boolean isSearchField(Component c) {
return c instanceof JTextField && "search".equals(((JTextField)c).getClientProperty("JTextField.variant"));
}
public static boolean isSearchFieldWithHistoryPopup(Component c) {
return isSearchField(c) && ((JTextField)c).getClientProperty("JTextField.Search.FindPopup") instanceof JPopupMenu;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.pinterest.secor.common;
import com.google.api.client.repackaged.com.google.common.base.Strings;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.TimeZone;
/**
* One-stop shop for Secor configuration options.
*
* @author Pawel Garbacki (pawel@pinterest.com)
*/
public class SecorConfig {
private final PropertiesConfiguration mProperties;
private static final ThreadLocal<SecorConfig> mSecorConfig = new ThreadLocal<SecorConfig>() {
@Override
protected SecorConfig initialValue() {
// Load the default configuration file first
Properties systemProperties = System.getProperties();
String configProperty = systemProperties.getProperty("config");
PropertiesConfiguration properties;
try {
properties = new PropertiesConfiguration(configProperty);
} catch (ConfigurationException e) {
throw new RuntimeException("Error loading configuration from " + configProperty);
}
for (final Map.Entry<Object, Object> entry : systemProperties.entrySet()) {
properties.setProperty(entry.getKey().toString(), entry.getValue());
}
return new SecorConfig(properties);
}
};
public static SecorConfig load() throws ConfigurationException {
return mSecorConfig.get();
}
/**
* Exposed for testability
*
* @param properties
*/
public SecorConfig(PropertiesConfiguration properties) {
mProperties = properties;
}
public String getKafkaSeedBrokerHost() {
return getString("kafka.seed.broker.host");
}
public int getKafkaSeedBrokerPort() {
return getInt("kafka.seed.broker.port");
}
public String getKafkaZookeeperPath() {
return getString("kafka.zookeeper.path");
}
public String getZookeeperQuorum() {
return StringUtils.join(getStringArray("zookeeper.quorum"), ',');
}
public int getConsumerTimeoutMs() {
return getInt("kafka.consumer.timeout.ms");
}
public String getConsumerAutoOffsetReset() {
return getString("kafka.consumer.auto.offset.reset");
}
public String getPartitionAssignmentStrategy() {
return getString("kafka.partition.assignment.strategy");
}
public String getRebalanceMaxRetries() {
return getString("kafka.rebalance.max.retries");
}
public String getRebalanceBackoffMs() {
return getString("kafka.rebalance.backoff.ms");
}
public String getFetchMessageMaxBytes() {
return getString("kafka.fetch.message.max.bytes");
}
public String getSocketReceiveBufferBytes() {
return getString("kafka.socket.receive.buffer.bytes");
}
public String getFetchMinBytes() {
return getString("kafka.fetch.min.bytes");
}
public String getFetchWaitMaxMs() {
return getString("kafka.fetch.wait.max.ms");
}
public String getDualCommitEnabled() {
return getString("kafka.dual.commit.enabled");
}
public String getOffsetsStorage() {
return getString("kafka.offsets.storage");
}
public int getGeneration() {
return getInt("secor.generation");
}
public int getConsumerThreads() {
return getInt("secor.consumer.threads");
}
public long getMaxFileSizeBytes() {
return getLong("secor.max.file.size.bytes");
}
public long getMaxFileAgeSeconds() {
return getLong("secor.max.file.age.seconds");
}
public boolean getFileAgeYoungest() {
return getBoolean("secor.file.age.youngest");
}
public long getOffsetsPerPartition() {
return getLong("secor.offsets.per.partition");
}
public int getMessagesPerSecond() {
return getInt("secor.messages.per.second");
}
public String getS3FileSystem() { return getString("secor.s3.filesystem"); }
public boolean getSeparateContainersForTopics() {
return getString("secor.swift.containers.for.each.topic").toLowerCase().equals("true");
}
public String getSwiftContainer() {
return getString("secor.swift.container");
}
public String getSwiftPath() {
return getString("secor.swift.path");
}
public String getS3Bucket() {
return getString("secor.s3.bucket");
}
public String getS3Path() {
return getString("secor.s3.path");
}
public String getS3AlternativePath() {
return getString("secor.s3.alternative.path");
}
public String getS3AlterPathDate() {
return getString("secor.s3.alter.path.date");
}
public String getS3Prefix() {
return getS3FileSystem() + "://" + getS3Bucket() + "/" + getS3Path();
}
public String getLocalPath() {
return getString("secor.local.path");
}
public String getKafkaTopicFilter() {
return getString("secor.kafka.topic_filter");
}
public String getKafkaTopicBlacklist() {
return getString("secor.kafka.topic_blacklist");
}
public String getKafkaTopicUploadAtMinuteMarkFilter() { return getString("secor.kafka.upload_at_minute_mark.topic_filter");}
public int getUploadMinuteMark(){ return getInt("secor.upload.minute_mark");}
public String getKafkaGroup() {
return getString("secor.kafka.group");
}
public int getZookeeperSessionTimeoutMs() {
return getInt("zookeeper.session.timeout.ms");
}
public int getZookeeperSyncTimeMs() {
return getInt("zookeeper.sync.time.ms");
}
public String getMessageParserClass() {
return getString("secor.message.parser.class");
}
public String getUploaderClass() {
return getString("secor.upload.class", "com.pinterest.secor.uploader.Uploader");
}
public String getUploadManagerClass() {
return getString("secor.upload.manager.class");
}
public String getMessageTransformerClass(){
return getString("secor.message.transformer.class");
}
public int getTopicPartitionForgetSeconds() {
return getInt("secor.topic_partition.forget.seconds");
}
public int getLocalLogDeleteAgeHours() {
return getInt("secor.local.log.delete.age.hours");
}
public String getFileExtension() {
return getString("secor.file.extension");
}
public int getOstrichPort() {
return getInt("ostrich.port");
}
public String getCloudService() {
return getString("cloud.service");
}
public String getAwsAccessKey() {
return getString("aws.access.key");
}
public String getAwsSecretKey() {
return getString("aws.secret.key");
}
public String getAwsEndpoint() {
return getString("aws.endpoint");
}
public String getAwsRole() {
return getString("aws.role");
}
public boolean getAwsProxyEnabled(){
return getBoolean("aws.proxy.isEnabled");
}
public String getAwsProxyHttpHost() {
return getString("aws.proxy.http.host");
}
public int getAwsProxyHttpPort() {
return getInt("aws.proxy.http.port");
}
public String getAwsRegion() {
return getString("aws.region");
}
public String getAwsSseType() {
return getString("aws.sse.type");
}
public String getAwsSseKmsKey() {
return getString("aws.sse.kms.key");
}
public String getAwsSseCustomerKey() {
return getString("aws.sse.customer.key");
}
public String getSwiftTenant() {
return getString("swift.tenant");
}
public String getSwiftUsername() {
return getString("swift.username");
}
public String getSwiftPassword() {
return getString("swift.password");
}
public String getSwiftAuthUrl() {
return getString("swift.auth.url");
}
public String getSwiftPublic() {
return getString("swift.public");
}
public String getSwiftPort() {
return getString("swift.port");
}
public String getSwiftGetAuth() {
return getString("swift.use.get.auth");
}
public String getSwiftApiKey() {
return getString("swift.api.key");
}
public String getQuboleApiToken() {
return getString("qubole.api.token");
}
public String getTsdbHostport() {
return getString("tsdb.hostport");
}
public String getStatsDHostPort() {
return getString("statsd.hostport");
}
public boolean getStatsDPrefixWithConsumerGroup(){
return getBoolean("statsd.prefixWithConsumerGroup");
}
public String getMonitoringBlacklistTopics() {
return getString("monitoring.blacklist.topics");
}
public String getMonitoringPrefix() {
return getString("monitoring.prefix");
}
public long getMonitoringIntervalSeconds() {
return getLong("monitoring.interval.seconds");
}
public String getMessageTimestampName() {
return getString("message.timestamp.name");
}
public String getMessageTimestampNameSeparator() {
return getString("message.timestamp.name.separator");
}
public int getMessageTimestampId() {
return getInt("message.timestamp.id");
}
public String getMessageTimestampType() {
return getString("message.timestamp.type");
}
public String getMessageTimestampInputPattern() {
return getString("message.timestamp.input.pattern");
}
public boolean isMessageTimestampRequired() {
return mProperties.getBoolean("message.timestamp.required");
}
public int getFinalizerLookbackPeriods() {
return getInt("secor.finalizer.lookback.periods", 10);
}
public String getHivePrefix() {
return getString("secor.hive.prefix");
}
public String getHiveTableName(String topic) {
String key = "secor.hive.table.name." + topic;
return mProperties.getString(key, null);
}
public boolean getQuboleEnabled() {
return getBoolean("secor.enable.qubole");
}
public long getQuboleTimeoutMs() {
return getLong("secor.qubole.timeout.ms");
}
public String getCompressionCodec() {
return getString("secor.compression.codec");
}
public int getMaxMessageSizeBytes() {
return getInt("secor.max.message.size.bytes");
}
public String getFileReaderWriterFactory() {
return getString("secor.file.reader.writer.factory");
}
public String getPerfTestTopicPrefix() {
return getString("secor.kafka.perf_topic_prefix");
}
public String getZookeeperPath() {
return getString("secor.zookeeper.path");
}
public String getGsCredentialsPath() {
return getString("secor.gs.credentials.path");
}
public String getGsBucket() {
return getString("secor.gs.bucket");
}
public String getGsPath() {
return getString("secor.gs.path");
}
public int getGsConnectTimeoutInMs() {
return getInt("secor.gs.connect.timeout.ms", 3 * 60000);
}
public int getGsReadTimeoutInMs() {
return getInt("secor.gs.read.timeout.ms", 3 * 60000);
}
public boolean getGsDirectUpload() {
return getBoolean("secor.gs.upload.direct");
}
public int getFinalizerDelaySeconds() {
return getInt("partitioner.finalizer.delay.seconds");
}
public boolean getS3MD5HashPrefix() {
return getBoolean("secor.s3.prefix.md5hash");
}
public String getAzureEndpointsProtocol() { return getString("secor.azure.endpoints.protocol"); }
public String getAzureAccountName() { return getString("secor.azure.account.name"); }
public String getAzureAccountKey() { return getString("secor.azure.account.key"); }
public String getAzureContainer() { return getString("secor.azure.container.name"); }
public String getAzurePath() { return getString("secor.azure.path"); }
public Map<String, String> getProtobufMessageClassPerTopic() {
String prefix = "secor.protobuf.message.class";
Iterator<String> keys = mProperties.getKeys(prefix);
Map<String, String> protobufClasses = new HashMap<String, String>();
while (keys.hasNext()) {
String key = keys.next();
String className = mProperties.getString(key);
protobufClasses.put(key.substring(prefix.length() + 1), className);
}
return protobufClasses;
}
public TimeZone getTimeZone() {
String timezone = getString("secor.parser.timezone");
return Strings.isNullOrEmpty(timezone) ? TimeZone.getTimeZone("UTC") : TimeZone.getTimeZone(timezone);
}
public boolean getBoolean(String name, boolean defaultValue) {
return mProperties.getBoolean(name, defaultValue);
}
public boolean getBoolean(String name) {
return mProperties.getBoolean(name);
}
public void checkProperty(String name) {
if (!mProperties.containsKey(name)) {
throw new RuntimeException("Failed to find required configuration option '" +
name + "'.");
}
}
public String getString(String name) {
checkProperty(name);
return mProperties.getString(name);
}
public String getString(String name, String defaultValue) {
return mProperties.getString(name, defaultValue);
}
public int getInt(String name) {
checkProperty(name);
return mProperties.getInt(name);
}
public int getInt(String name, int defaultValue) {
return mProperties.getInt(name, defaultValue);
}
public long getLong(String name) {
return mProperties.getLong(name);
}
public String[] getStringArray(String name) {
return mProperties.getStringArray(name);
}
}
| |
/*
* Copyright (C) 2014 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.example.android.sunshine.app.data;
import android.annotation.TargetApi;
import android.content.ContentProvider;
import android.content.ContentValues;
import android.content.UriMatcher;
import android.database.Cursor;
import android.database.sqlite.SQLiteDatabase;
import android.database.sqlite.SQLiteQueryBuilder;
import android.net.Uri;
import android.util.Log;
public class WeatherProvider extends ContentProvider {
// The URI Matcher used by this content provider.
private static final UriMatcher sUriMatcher = buildUriMatcher();
private static final String TAG =WeatherProvider.class.getSimpleName() ;
private WeatherDbHelper mOpenHelper;
static final int WEATHER = 100;
static final int WEATHER_WITH_LOCATION = 101;
static final int WEATHER_WITH_LOCATION_AND_DATE = 102;
static final int LOCATION = 300;
private static final SQLiteQueryBuilder sWeatherByLocationSettingQueryBuilder;
static{
sWeatherByLocationSettingQueryBuilder = new SQLiteQueryBuilder();
//This is an inner join which looks like
//weather INNER JOIN location ON weather.location_id = location._id
sWeatherByLocationSettingQueryBuilder.setTables(
WeatherContract.WeatherEntry.TABLE_NAME + " INNER JOIN " +
WeatherContract.LocationEntry.TABLE_NAME +
" ON " + WeatherContract.WeatherEntry.TABLE_NAME +
"." + WeatherContract.WeatherEntry.COLUMN_LOC_KEY +
" = " + WeatherContract.LocationEntry.TABLE_NAME +
"." + WeatherContract.LocationEntry._ID);
}
//location.location_setting = ?
private static final String sLocationSettingSelection =
WeatherContract.LocationEntry.TABLE_NAME+
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? ";
//location.location_setting = ? AND date >= ?
private static final String sLocationSettingWithStartDateSelection =
WeatherContract.LocationEntry.TABLE_NAME+
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " +
WeatherContract.WeatherEntry.COLUMN_DATE + " >= ? ";
//location.location_setting = ? AND date = ?
private static final String sLocationSettingAndDaySelection =
WeatherContract.LocationEntry.TABLE_NAME +
"." + WeatherContract.LocationEntry.COLUMN_LOCATION_SETTING + " = ? AND " +
WeatherContract.WeatherEntry.COLUMN_DATE + " = ? ";
private Cursor getWeatherByLocationSetting(Uri uri, String[] projection, String sortOrder) {
String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri);
long startDate = WeatherContract.WeatherEntry.getStartDateFromUri(uri);
String[] selectionArgs;
String selection;
if (startDate == 0) {
selection = sLocationSettingSelection;
selectionArgs = new String[]{locationSetting};
} else {
selectionArgs = new String[]{locationSetting, Long.toString(startDate)};
selection = sLocationSettingWithStartDateSelection;
}
return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
}
private Cursor getWeatherByLocationSettingAndDate(
Uri uri, String[] projection, String sortOrder) {
String locationSetting = WeatherContract.WeatherEntry.getLocationSettingFromUri(uri);
long date = WeatherContract.WeatherEntry.getDateFromUri(uri);
return sWeatherByLocationSettingQueryBuilder.query(mOpenHelper.getReadableDatabase(),
projection,
sLocationSettingAndDaySelection,
new String[]{locationSetting, Long.toString(date)},
null,
null,
sortOrder
);
}
/*
Students: Here is where you need to create the UriMatcher. This UriMatcher will
match each URI to the WEATHER, WEATHER_WITH_LOCATION, WEATHER_WITH_LOCATION_AND_DATE,
and LOCATION integer constants defined above. You can test this by uncommenting the
testUriMatcher test within TestUriMatcher.
*/
static UriMatcher buildUriMatcher() {
// I know what you're thinking. Why create a UriMatcher when you can use regular
// expressions instead? Because you're not crazy, that's why.
// All paths added to the UriMatcher have a corresponding code to return when a match is
// found. The code passed into the constructor represents the code to return for the root
// URI. It's common to use NO_MATCH as the code for this case.
final UriMatcher matcher = new UriMatcher(UriMatcher.NO_MATCH);
final String authority = WeatherContract.CONTENT_AUTHORITY;
// For each type of URI you want to add, create a corresponding code.
matcher.addURI(authority, WeatherContract.PATH_WEATHER, WEATHER);
matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*", WEATHER_WITH_LOCATION);
matcher.addURI(authority, WeatherContract.PATH_WEATHER + "/*/#", WEATHER_WITH_LOCATION_AND_DATE);
matcher.addURI(authority, WeatherContract.PATH_LOCATION, LOCATION);
return matcher;
}
/*
Students: We've coded this for you. We just create a new WeatherDbHelper for later use
here.
*/
@Override
public boolean onCreate() {
mOpenHelper = new WeatherDbHelper(getContext());
return true;
}
/*
Students: Here's where you'll code the getType function that uses the UriMatcher. You can
test this by uncommenting testGetType in TestProvider.
*/
@Override
public String getType(Uri uri) {
// Use the Uri Matcher to determine what kind of URI this is.
final int match = sUriMatcher.match(uri);
switch (match) {
// Student: Uncomment and fill out these two cases
case WEATHER_WITH_LOCATION_AND_DATE:
return WeatherContract.WeatherEntry.CONTENT_ITEM_TYPE;
case WEATHER_WITH_LOCATION:
return WeatherContract.WeatherEntry.CONTENT_TYPE;
case WEATHER:
return WeatherContract.WeatherEntry.CONTENT_TYPE;
case LOCATION:
return WeatherContract.LocationEntry.CONTENT_TYPE;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
}
@Override
public Cursor query(Uri uri, String[] projection, String selection, String[] selectionArgs,
String sortOrder) {
// Here's the switch statement that, given a URI, will determine what kind of request it is,
// and query the database accordingly.
Cursor retCursor;
switch (sUriMatcher.match(uri)) {
// "weather/*/*"
case WEATHER_WITH_LOCATION_AND_DATE:
{
retCursor = getWeatherByLocationSettingAndDate(uri, projection, sortOrder);
if(retCursor !=null && retCursor.getCount()>0){
retCursor.moveToFirst();
Log.d(TAG,"inside withdate");
double maxt=retCursor.getDouble(retCursor.getColumnIndexOrThrow(WeatherContract.WeatherEntry.COLUMN_MAX_TEMP));
Log.d(TAG,"the maxt is "+ maxt);
}
break;
}
// "weather/*"
case WEATHER_WITH_LOCATION: {
retCursor = getWeatherByLocationSetting(uri, projection, sortOrder);
break;
}
// "weather"
case WEATHER: {
retCursor = mOpenHelper.getReadableDatabase().query(
WeatherContract.WeatherEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
// "location"
case LOCATION: {
retCursor = mOpenHelper.getReadableDatabase().query(
WeatherContract.LocationEntry.TABLE_NAME,
projection,
selection,
selectionArgs,
null,
null,
sortOrder
);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
retCursor.setNotificationUri(getContext().getContentResolver(), uri);
return retCursor;
}
/*
Student: Add the ability to insert Locations to the implementation of this function.
*/
@Override
public Uri insert(Uri uri, ContentValues values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
Uri returnUri;
switch (match) {
case WEATHER: {
normalizeDate(values);
long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, values);
if ( _id > 0 )
returnUri = WeatherContract.WeatherEntry.buildWeatherUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
break;
}
case LOCATION: {
long _id = db.insert(WeatherContract.LocationEntry.TABLE_NAME, null, values);
if ( _id > 0 )
returnUri = WeatherContract.LocationEntry.buildLocationUri(_id);
else
throw new android.database.SQLException("Failed to insert row into " + uri);
break;
}
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
getContext().getContentResolver().notifyChange(uri, null);
return returnUri;
}
@Override
public int delete(Uri uri, String selection, String[] selectionArgs) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsDeleted;
// this makes delete all rows return the number of rows deleted
if ( null == selection ) selection = "1";
switch (match) {
case WEATHER:
rowsDeleted = db.delete(
WeatherContract.WeatherEntry.TABLE_NAME, selection, selectionArgs);
break;
case LOCATION:
rowsDeleted = db.delete(
WeatherContract.LocationEntry.TABLE_NAME, selection, selectionArgs);
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
// Because a null deletes all rows
if (rowsDeleted != 0) {
getContext().getContentResolver().notifyChange(uri, null);
}
return rowsDeleted;
}
private void normalizeDate(ContentValues values) {
// normalize the date value
if (values.containsKey(WeatherContract.WeatherEntry.COLUMN_DATE)) {
long dateValue = values.getAsLong(WeatherContract.WeatherEntry.COLUMN_DATE);
values.put(WeatherContract.WeatherEntry.COLUMN_DATE, WeatherContract.normalizeDate(dateValue));
}
}
@Override
public int update(
Uri uri, ContentValues values, String selection, String[] selectionArgs) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
int rowsUpdated;
switch (match) {
case WEATHER:
normalizeDate(values);
rowsUpdated = db.update(WeatherContract.WeatherEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
case LOCATION:
rowsUpdated = db.update(WeatherContract.LocationEntry.TABLE_NAME, values, selection,
selectionArgs);
break;
default:
throw new UnsupportedOperationException("Unknown uri: " + uri);
}
if (rowsUpdated != 0) {
getContext().getContentResolver().notifyChange(uri, null);
}
return rowsUpdated;
}
@Override
public int bulkInsert(Uri uri, ContentValues[] values) {
final SQLiteDatabase db = mOpenHelper.getWritableDatabase();
final int match = sUriMatcher.match(uri);
switch (match) {
case WEATHER:
db.beginTransaction();
int returnCount = 0;
try {
for (ContentValues value : values) {
normalizeDate(value);
long _id = db.insert(WeatherContract.WeatherEntry.TABLE_NAME, null, value);
if (_id != -1) {
returnCount++;
}
}
db.setTransactionSuccessful();
} finally {
db.endTransaction();
}
getContext().getContentResolver().notifyChange(uri, null);
return returnCount;
default:
return super.bulkInsert(uri, values);
}
}
// You do not need to call this method. This is a method specifically to assist the testing
// framework in running smoothly. You can read more at:
// http://developer.android.com/reference/android/content/ContentProvider.html#shutdown()
@Override
@TargetApi(11)
public void shutdown() {
mOpenHelper.close();
super.shutdown();
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.android.toolchain.AndroidPlatformTarget;
import com.facebook.buck.core.build.buildable.context.BuildableContext;
import com.facebook.buck.core.build.context.BuildContext;
import com.facebook.buck.core.build.execution.context.IsolatedExecutionContext;
import com.facebook.buck.core.build.execution.context.StepExecutionContext;
import com.facebook.buck.core.filesystems.AbsPath;
import com.facebook.buck.core.filesystems.RelPath;
import com.facebook.buck.io.filesystem.BuildCellRelativePath;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.impl.ProjectFilesystemUtils;
import com.facebook.buck.step.AbstractExecutionStep;
import com.facebook.buck.step.Step;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.step.StepExecutionResults;
import com.facebook.buck.step.fs.MakeCleanDirectoryStep;
import com.facebook.buck.step.isolatedsteps.common.TouchStep;
import com.facebook.buck.step.isolatedsteps.shell.IsolatedShellStep;
import com.facebook.buck.util.zip.CustomZipOutputStream;
import com.facebook.buck.util.zip.ZipOutputStreams;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.zip.ZipEntry;
public final class ProGuardObfuscateStep extends IsolatedShellStep {
public static final int DEFAULT_OPTIMIZATION_PASSES = 1;
enum SdkProguardType {
DEFAULT,
OPTIMIZED,
NONE,
}
private final AndroidPlatformTarget androidPlatformTarget;
private final ImmutableList<String> javaRuntimeLauncher;
private final ProjectFilesystem filesystem;
private final Map<Path, Path> inputAndOutputEntries;
private final Path pathToProGuardCommandLineArgsFile;
private final boolean skipProguard;
private final Optional<Path> proguardJarOverride;
private final String proguardMaxHeapSize;
private final Optional<List<String>> proguardJvmArgs;
private final Optional<String> proguardAgentPath;
/**
* Create steps that write out ProGuard's command line arguments to a text file and then run
* ProGuard using those arguments. We write the arguments to a file to avoid blowing out exec()'s
* ARG_MAX limit.
*
* @param steps Where to append the generated steps.
*/
public static void create(
AndroidPlatformTarget androidPlatformTarget,
ImmutableList<String> javaRuntimeLauncher,
ProjectFilesystem filesystem,
Optional<Path> proguardJarOverride,
String proguardMaxHeapSize,
Optional<String> proguardAgentPath,
Set<Path> customProguardConfigs,
SdkProguardType sdkProguardConfig,
int optimizationPasses,
Optional<List<String>> proguardJvmArgs,
Map<Path, Path> inputAndOutputEntries,
ImmutableSet<Path> additionalLibraryJarsForProguard,
Path proguardDirectory,
BuildableContext buildableContext,
BuildContext buildContext,
boolean skipProguard,
ImmutableList.Builder<Step> steps,
boolean withDownwardApi) {
steps.addAll(
MakeCleanDirectoryStep.of(
BuildCellRelativePath.fromCellRelativePath(
buildContext.getBuildCellRootPath(), filesystem, proguardDirectory)));
Path pathToProGuardCommandLineArgsFile = proguardDirectory.resolve("command-line.txt");
CommandLineHelperStep commandLineHelperStep =
new CommandLineHelperStep(
filesystem,
androidPlatformTarget,
customProguardConfigs,
sdkProguardConfig,
optimizationPasses,
inputAndOutputEntries,
additionalLibraryJarsForProguard,
proguardDirectory,
pathToProGuardCommandLineArgsFile);
if (skipProguard) {
steps.add(commandLineHelperStep, new TouchStep(commandLineHelperStep.getMappingTxt()));
} else {
ProGuardObfuscateStep proGuardStep =
new ProGuardObfuscateStep(
androidPlatformTarget,
javaRuntimeLauncher,
filesystem,
inputAndOutputEntries,
pathToProGuardCommandLineArgsFile,
skipProguard,
proguardJarOverride,
proguardMaxHeapSize,
proguardJvmArgs,
proguardAgentPath,
ProjectFilesystemUtils.relativize(
filesystem.getRootPath(), buildContext.getBuildCellRootPath()),
withDownwardApi);
buildableContext.recordArtifact(commandLineHelperStep.getConfigurationTxt());
buildableContext.recordArtifact(commandLineHelperStep.getMappingTxt());
buildableContext.recordArtifact(commandLineHelperStep.getSeedsTxt());
buildableContext.recordArtifact(commandLineHelperStep.getUsageTxt());
steps.add(
commandLineHelperStep,
proGuardStep,
// Some proguard configs can propagate the "-dontobfuscate" flag which disables
// obfuscation and prevents the mapping.txt & usage.txt file from being generated.
// So touch it here to guarantee it's around when we go to cache this rule.
new TouchStep(commandLineHelperStep.getMappingTxt()),
new TouchStep(commandLineHelperStep.getUsageTxt()));
}
}
/**
* @param inputAndOutputEntries Map of input/output pairs to proguard. The key represents an input
* jar (-injars); the value an output jar (-outjars).
* @param pathToProGuardCommandLineArgsFile Path to file containing arguments to ProGuard.
*/
private ProGuardObfuscateStep(
AndroidPlatformTarget androidPlatformTarget,
ImmutableList<String> javaRuntimeLauncher,
ProjectFilesystem filesystem,
Map<Path, Path> inputAndOutputEntries,
Path pathToProGuardCommandLineArgsFile,
boolean skipProguard,
Optional<Path> proguardJarOverride,
String proguardMaxHeapSize,
Optional<List<String>> proguardJvmArgs,
Optional<String> proguardAgentPath,
RelPath cellPath,
boolean withDownwardApi) {
super(filesystem.getRootPath(), cellPath, withDownwardApi);
this.androidPlatformTarget = androidPlatformTarget;
this.javaRuntimeLauncher = javaRuntimeLauncher;
this.filesystem = filesystem;
this.inputAndOutputEntries = ImmutableMap.copyOf(inputAndOutputEntries);
this.pathToProGuardCommandLineArgsFile = pathToProGuardCommandLineArgsFile;
this.skipProguard = skipProguard;
this.proguardJarOverride = proguardJarOverride;
this.proguardMaxHeapSize = proguardMaxHeapSize;
this.proguardJvmArgs = proguardJvmArgs;
this.proguardAgentPath = proguardAgentPath;
}
@Override
public String getShortName() {
return "proguard_obfuscation";
}
@Override
protected ImmutableList<String> getShellCommandInternal(IsolatedExecutionContext context) {
// Run ProGuard as a standalone executable JAR file.
Path proguardJar;
if (proguardJarOverride.isPresent()) {
proguardJar = filesystem.getPathForRelativePath(proguardJarOverride.get());
} else {
proguardJar = androidPlatformTarget.getProguardJar();
}
ImmutableList.Builder<String> args = ImmutableList.builder();
args.addAll(javaRuntimeLauncher);
proguardAgentPath.ifPresent(s -> args.add("-agentpath:" + s));
proguardJvmArgs.ifPresent(args::addAll);
args.add("-Xmx" + proguardMaxHeapSize)
.add("-jar")
.add(proguardJar.toString())
.add("@" + pathToProGuardCommandLineArgsFile);
return args.build();
}
@Override
public StepExecutionResult executeIsolatedStep(IsolatedExecutionContext context)
throws IOException, InterruptedException {
StepExecutionResult executionResult = super.executeIsolatedStep(context);
// proguard has a peculiar behaviour when multiple -injars/outjars pairs are specified in which
// any -injars that would have been fully stripped away will not produce their matching -outjars
// as requested (so the file won't exist). Our build steps are not sophisticated enough to
// account for this and remove those entries from the classes to dex so we hack things here to
// ensure that the files exist but are empty.
if (executionResult.isSuccess() && !this.skipProguard) {
return StepExecutionResult.of(ensureAllOutputsExist(context));
}
return executionResult;
}
private int ensureAllOutputsExist(IsolatedExecutionContext context) {
for (Path outputJar : inputAndOutputEntries.values()) {
if (!Files.exists(outputJar)) {
try {
createEmptyZip(outputJar);
} catch (IOException e) {
context.logError(e, "Error creating empty zip file at: %s.", outputJar);
return 1;
}
}
}
return 0;
}
@VisibleForTesting
static void createEmptyZip(Path file) throws IOException {
Files.createDirectories(file.getParent());
CustomZipOutputStream out = ZipOutputStreams.newOutputStream(file);
// Sun's java 6 runtime doesn't allow us to create a truly empty zip, but this should be enough
// to pass through dx/split-zip without any issue.
// ...and Sun's java 7 runtime doesn't let us use an empty string for the zip entry name.
out.putNextEntry(new ZipEntry("proguard_no_result"));
out.close();
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (!(obj instanceof ProGuardObfuscateStep)) {
return false;
}
ProGuardObfuscateStep that = (ProGuardObfuscateStep) obj;
return Objects.equal(this.inputAndOutputEntries, that.inputAndOutputEntries)
&& Objects.equal(
this.pathToProGuardCommandLineArgsFile, that.pathToProGuardCommandLineArgsFile);
}
@Override
public int hashCode() {
return Objects.hashCode(inputAndOutputEntries, pathToProGuardCommandLineArgsFile);
}
/**
* Helper class to run as a step before ProGuardObfuscateStep to write out the command-line
* parameters to a file. The ProGuardObfuscateStep references this file when it runs using
* ProGuard's '@' syntax. This allows for longer command-lines than would otherwise be supported.
*/
@VisibleForTesting
static class CommandLineHelperStep extends AbstractExecutionStep {
private final ProjectFilesystem filesystem;
private final AndroidPlatformTarget androidPlatformTarget;
private final Set<Path> customProguardConfigs;
private final Map<Path, Path> inputAndOutputEntries;
private final ImmutableSet<Path> additionalLibraryJarsForProguard;
private final SdkProguardType sdkProguardConfig;
private final int optimizationPasses;
private final Path proguardDirectory;
private final Path pathToProGuardCommandLineArgsFile;
/**
* @param customProguardConfigs Main rule and its dependencies proguard configurations.
* @param sdkProguardConfig Which proguard config from the Android SDK to use.
* @param inputAndOutputEntries Map of input/output pairs to proguard. The key represents an
* input jar (-injars); the value an output jar (-outjars).
* @param additionalLibraryJarsForProguard Libraries that are not operated upon by proguard but
* needed to resolve symbols.
* @param proguardDirectory Output directory for various proguard-generated meta artifacts.
* @param pathToProGuardCommandLineArgsFile Path to file containing arguments to ProGuard.
*/
private CommandLineHelperStep(
ProjectFilesystem filesystem,
AndroidPlatformTarget androidPlatformTarget,
Set<Path> customProguardConfigs,
SdkProguardType sdkProguardConfig,
int optimizationPasses,
Map<Path, Path> inputAndOutputEntries,
ImmutableSet<Path> additionalLibraryJarsForProguard,
Path proguardDirectory,
Path pathToProGuardCommandLineArgsFile) {
super("write_proguard_command_line_parameters");
this.filesystem = filesystem;
this.androidPlatformTarget = androidPlatformTarget;
this.customProguardConfigs = ImmutableSet.copyOf(customProguardConfigs);
this.sdkProguardConfig = sdkProguardConfig;
this.optimizationPasses = optimizationPasses;
this.inputAndOutputEntries = ImmutableMap.copyOf(inputAndOutputEntries);
this.additionalLibraryJarsForProguard = additionalLibraryJarsForProguard;
this.proguardDirectory = proguardDirectory;
this.pathToProGuardCommandLineArgsFile = pathToProGuardCommandLineArgsFile;
}
@Override
public StepExecutionResult execute(StepExecutionContext context) throws IOException {
String proGuardArguments =
Joiner.on('\n').join(getParameters(filesystem.getRootPath().getPath()));
filesystem.writeContentsToPath(proGuardArguments, pathToProGuardCommandLineArgsFile);
return StepExecutionResults.SUCCESS;
}
/** @return the list of arguments to pass to ProGuard. */
@VisibleForTesting
ImmutableList<String> getParameters(Path workingDirectory) {
ImmutableList.Builder<String> args = ImmutableList.builder();
// Relative paths should be interpreted relative to project directory root, not the
// written parameters file.
args.add("-basedirectory").add(workingDirectory.toAbsolutePath().toString());
// -include
switch (sdkProguardConfig) {
case OPTIMIZED:
args.add("-include").add(androidPlatformTarget.getOptimizedProguardConfig().toString());
args.add("-optimizationpasses").add(String.valueOf(optimizationPasses));
break;
case DEFAULT:
args.add("-include").add(androidPlatformTarget.getProguardConfig().toString());
break;
case NONE:
break;
default:
throw new RuntimeException("Illegal value for sdkProguardConfig: " + sdkProguardConfig);
}
for (Path proguardConfig : customProguardConfigs) {
args.add("-include").add("\"" + proguardConfig.toString() + "\"");
}
// -injars and -outjars paired together for each input.
for (Map.Entry<Path, Path> inputOutputEntry : inputAndOutputEntries.entrySet()) {
args.add("-injars").add(inputOutputEntry.getKey().toString());
args.add("-outjars").add(inputOutputEntry.getValue().toString());
}
// -libraryjars
Iterable<Path> bootclasspathPaths =
() ->
androidPlatformTarget.getBootclasspathEntries().stream()
.map(AbsPath::getPath)
.iterator();
Iterable<Path> libraryJars =
Iterables.concat(bootclasspathPaths, additionalLibraryJarsForProguard);
char separator = File.pathSeparatorChar;
args.add("-libraryjars").add(Joiner.on(separator).join(libraryJars));
// -dump
args.add("-printmapping").add(getMappingTxt().toString());
args.add("-printconfiguration").add(getConfigurationTxt().toString());
args.add("-printseeds").add(getSeedsTxt().toString());
args.add("-printusage").add(getUsageTxt().toString());
return args.build();
}
public Path getConfigurationTxt() {
return proguardDirectory.resolve("configuration.txt");
}
public Path getMappingTxt() {
return proguardDirectory.resolve("mapping.txt");
}
public Path getSeedsTxt() {
return proguardDirectory.resolve("seeds.txt");
}
public Path getUsageTxt() {
return proguardDirectory.resolve("usage.txt");
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof CommandLineHelperStep)) {
return false;
}
CommandLineHelperStep that = (CommandLineHelperStep) obj;
return Objects.equal(sdkProguardConfig, that.sdkProguardConfig)
&& Objects.equal(additionalLibraryJarsForProguard, that.additionalLibraryJarsForProguard)
&& Objects.equal(customProguardConfigs, that.customProguardConfigs)
&& Objects.equal(inputAndOutputEntries, that.inputAndOutputEntries)
&& Objects.equal(proguardDirectory, that.proguardDirectory)
&& Objects.equal(
pathToProGuardCommandLineArgsFile, that.pathToProGuardCommandLineArgsFile);
}
@Override
public int hashCode() {
return Objects.hashCode(
sdkProguardConfig,
additionalLibraryJarsForProguard,
customProguardConfigs,
inputAndOutputEntries,
proguardDirectory,
pathToProGuardCommandLineArgsFile);
}
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.completion;
import com.intellij.codeInsight.lookup.InsertHandlerDecorator;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupElementBuilder;
import com.intellij.codeInsight.lookup.LookupElementDecorator;
import com.intellij.codeInsight.template.emmet.completion.EmmetAbbreviationCompletionProvider;
import com.intellij.featureStatistics.FeatureUsageTracker;
import com.intellij.ide.highlighter.XmlFileType;
import com.intellij.lang.ASTNode;
import com.intellij.lang.xml.XMLLanguage;
import com.intellij.openapi.actionSystem.IdeActions;
import com.intellij.openapi.editor.CaretModel;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.keymap.KeymapUtil;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.patterns.XmlPatterns;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.PsiReference;
import com.intellij.psi.search.PsiElementProcessor;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.tree.TokenSet;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.*;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ProcessingContext;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.text.CharArrayUtil;
import com.intellij.xml.XmlBundle;
import com.intellij.xml.XmlExtension;
import com.intellij.xml.util.XmlEnumeratedValueReference;
import com.intellij.xml.util.XmlUtil;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import static com.intellij.patterns.PlatformPatterns.psiElement;
import static com.intellij.xml.util.XmlUtil.VALUE_ATTR_NAME;
import static com.intellij.xml.util.XmlUtil.findDescriptorFile;
/**
* @author Dmitry Avdeev
*/
public final class XmlCompletionContributor extends CompletionContributor {
public static final Key<Boolean> WORD_COMPLETION_COMPATIBLE = Key.create("WORD_COMPLETION_COMPATIBLE");
public static final EntityRefInsertHandler ENTITY_INSERT_HANDLER = new EntityRefInsertHandler();
@NonNls public static final String TAG_NAME_COMPLETION_FEATURE = "tag.name.completion";
private static final InsertHandler<LookupElementDecorator<LookupElement>> QUOTE_EATER = new InsertHandlerDecorator<>() {
@Override
public void handleInsert(@NotNull InsertionContext context, @NotNull LookupElementDecorator<LookupElement> item) {
final char completionChar = context.getCompletionChar();
if (completionChar == '\'' || completionChar == '\"') {
context.setAddCompletionChar(false);
item.getDelegate().handleInsert(context);
final Editor editor = context.getEditor();
final Document document = editor.getDocument();
int tailOffset = editor.getCaretModel().getOffset();
if (document.getTextLength() > tailOffset) {
final char c = document.getCharsSequence().charAt(tailOffset);
if (c == completionChar || completionChar == '\'') {
editor.getCaretModel().moveToOffset(tailOffset + 1);
}
}
}
else {
item.getDelegate().handleInsert(context);
}
}
};
public XmlCompletionContributor() {
extend(CompletionType.BASIC, psiElement().inside(XmlPatterns.xmlFile()), new EmmetAbbreviationCompletionProvider());
extend(CompletionType.BASIC, psiElement().inside(XmlPatterns.xmlFile()), new CompletionProvider<>() {
@Override
protected void addCompletions(@NotNull CompletionParameters parameters,
@NotNull ProcessingContext context,
@NotNull CompletionResultSet result) {
PsiElement position = parameters.getPosition();
IElementType type = position.getNode().getElementType();
if (type != XmlTokenType.XML_DATA_CHARACTERS && type != XmlTokenType.XML_ATTRIBUTE_VALUE_TOKEN) {
return;
}
if ((position.getPrevSibling() != null && position.getPrevSibling().textMatches("&")) || position.textContains('&')) {
PrefixMatcher matcher = result.getPrefixMatcher();
String prefix = matcher.getPrefix();
if (prefix.startsWith("&")) {
prefix = prefix.substring(1);
}
else if (prefix.contains("&")) {
prefix = prefix.substring(prefix.indexOf("&") + 1);
}
addEntityRefCompletions(position, result.withPrefixMatcher(prefix));
}
}
});
extend(CompletionType.BASIC,
psiElement().inside(XmlPatterns.xmlAttributeValue()),
new CompletionProvider<>() {
@Override
protected void addCompletions(@NotNull CompletionParameters parameters,
@NotNull ProcessingContext context,
@NotNull final CompletionResultSet result) {
final PsiElement position = parameters.getPosition();
if (!position.getLanguage().isKindOf(XMLLanguage.INSTANCE)) {
return;
}
final XmlAttributeValue attributeValue = PsiTreeUtil.getParentOfType(position, XmlAttributeValue.class, false);
if (attributeValue == null) {
// we are injected, only getContext() returns attribute value
return;
}
final Set<String> usedWords = new HashSet<>();
final Ref<Boolean> addWordVariants = Ref.create(true);
result.runRemainingContributors(parameters, r -> {
if (r.getLookupElement().getUserData(WORD_COMPLETION_COMPATIBLE) == null) {
addWordVariants.set(false);
}
usedWords.add(r.getLookupElement().getLookupString());
result.passResult(r.withLookupElement(LookupElementDecorator.withInsertHandler(r.getLookupElement(), QUOTE_EATER)));
});
if (addWordVariants.get().booleanValue()) {
addWordVariants.set(attributeValue.getReferences().length == 0);
}
if (addWordVariants.get().booleanValue() && parameters.getInvocationCount() > 0) {
WordCompletionContributor.addWordCompletionVariants(result, parameters, usedWords);
}
}
});
extend(CompletionType.BASIC, psiElement().withElementType(XmlTokenType.XML_DATA_CHARACTERS),
new CompletionProvider<>() {
@Override
protected void addCompletions(@NotNull CompletionParameters parameters,
@NotNull ProcessingContext context,
@NotNull CompletionResultSet result) {
XmlTag tag = PsiTreeUtil.getParentOfType(parameters.getPosition(), XmlTag.class, false);
if (tag != null && !hasEnumerationReference(parameters, result)) {
final XmlTag simpleContent = XmlUtil.getSchemaSimpleContent(tag);
if (simpleContent != null) {
XmlUtil.processEnumerationValues(simpleContent, (element) -> {
String value = element.getAttributeValue(VALUE_ATTR_NAME);
assert value != null;
result.addElement(LookupElementBuilder.create(value));
return true;
});
}
}
}
});
}
static boolean hasEnumerationReference(CompletionParameters parameters, CompletionResultSet result) {
Ref<Boolean> hasRef = Ref.create(false);
LegacyCompletionContributor.processReferences(parameters, result, (reference, resultSet) -> {
if (reference instanceof XmlEnumeratedValueReference) {
hasRef.set(true);
}
});
return hasRef.get();
}
public static boolean isXmlNameCompletion(final CompletionParameters parameters) {
final ASTNode node = parameters.getPosition().getNode();
return node != null && node.getElementType() == XmlTokenType.XML_NAME;
}
@Override
public void fillCompletionVariants(@NotNull final CompletionParameters parameters, @NotNull final CompletionResultSet result) {
super.fillCompletionVariants(parameters, result);
if (result.isStopped()) {
return;
}
final PsiElement element = parameters.getPosition();
if (parameters.isExtendedCompletion()) {
completeTagName(parameters, result);
}
else if (parameters.getCompletionType() == CompletionType.SMART) {
new XmlSmartCompletionProvider().complete(parameters, result, element);
}
}
static void completeTagName(CompletionParameters parameters, CompletionResultSet result) {
PsiElement element = parameters.getPosition();
if (!isXmlNameCompletion(parameters)) return;
PsiElement parent = element.getParent();
if (!(parent instanceof XmlTag) ||
!(parameters.getOriginalFile() instanceof XmlFile)) {
return;
}
result.stopHere();
final XmlTag tag = (XmlTag)parent;
final String namespace = tag.getNamespace();
final String prefix = result.getPrefixMatcher().getPrefix();
final int pos = prefix.indexOf(':');
final PsiReference reference = tag.getReference();
String namespacePrefix = tag.getNamespacePrefix();
if (reference != null && !namespace.isEmpty() && !namespacePrefix.isEmpty()) {
// fallback to simple completion
result.runRemainingContributors(parameters, true);
}
else {
final CompletionResultSet newResult = result.withPrefixMatcher(pos >= 0 ? prefix.substring(pos + 1) : prefix);
final XmlFile file = (XmlFile)parameters.getOriginalFile();
final List<XmlExtension.TagInfo> names = XmlExtension.getExtension(file).getAvailableTagNames(file, tag);
for (XmlExtension.TagInfo info : names) {
final LookupElement item = createLookupElement(info, info.namespace, namespacePrefix.isEmpty() ? null : namespacePrefix);
newResult.addElement(item);
}
}
}
public static LookupElement createLookupElement(XmlExtension.TagInfo tagInfo,
final String tailText, @Nullable String namespacePrefix) {
LookupElementBuilder builder =
LookupElementBuilder.create(tagInfo, tagInfo.name).withInsertHandler(
new ExtendedTagInsertHandler(tagInfo.name, tagInfo.namespace, namespacePrefix));
if (!StringUtil.isEmpty(tailText)) {
builder = builder.withTypeText(tailText, true);
}
return builder;
}
@Override
public String advertise(@NotNull final CompletionParameters parameters) {
if (isXmlNameCompletion(parameters) && parameters.getCompletionType() == CompletionType.BASIC) {
if (FeatureUsageTracker.getInstance().isToBeAdvertisedInLookup(TAG_NAME_COMPLETION_FEATURE, parameters.getPosition().getProject())) {
final String shortcut = KeymapUtil.getFirstKeyboardShortcutText(IdeActions.ACTION_CODE_COMPLETION);
return XmlBundle.message("tag.name.completion.hint", shortcut);
}
}
return super.advertise(parameters);
}
@Override
public void beforeCompletion(@NotNull final CompletionInitializationContext context) {
final int offset = context.getStartOffset();
final PsiFile file = context.getFile();
final XmlAttributeValue attributeValue = PsiTreeUtil.findElementOfClassAtOffset(file, offset, XmlAttributeValue.class, true);
if (attributeValue != null && offset == attributeValue.getTextRange().getStartOffset()) {
context.setDummyIdentifier("");
}
final PsiElement at = file.findElementAt(offset);
if (at != null && at.getNode().getElementType() == XmlTokenType.XML_NAME && at.getParent() instanceof XmlAttribute) {
context.getOffsetMap().addOffset(CompletionInitializationContext.IDENTIFIER_END_OFFSET, at.getTextRange().getEndOffset());
}
if (at != null && at.getParent() instanceof XmlAttributeValue) {
final int end = at.getParent().getTextRange().getEndOffset();
final Document document = context.getEditor().getDocument();
final int lineEnd = document.getLineEndOffset(document.getLineNumber(offset));
if (lineEnd < end) {
context.setReplacementOffset(lineEnd);
}
}
}
private static void addEntityRefCompletions(PsiElement context, CompletionResultSet resultSet) {
XmlFile containingFile = ObjectUtils.tryCast(context.getContainingFile(), XmlFile.class);
if (containingFile == null) {
return;
}
List<XmlFile> descriptorFiles = XmlExtension.getExtension(containingFile).getCharEntitiesDTDs(containingFile);
final XmlTag tag = PsiTreeUtil.getParentOfType(context, XmlTag.class);
if (tag != null && descriptorFiles.isEmpty()) {
descriptorFiles = ContainerUtil.packNullables(findDescriptorFile(tag, containingFile));
}
final boolean acceptSystemEntities = containingFile.getFileType() == XmlFileType.INSTANCE;
final PsiElementProcessor<PsiElement> processor = new PsiElementProcessor<>() {
@Override
public boolean execute(@NotNull final PsiElement element) {
if (element instanceof XmlEntityDecl) {
final XmlEntityDecl xmlEntityDecl = (XmlEntityDecl)element;
if (xmlEntityDecl.isInternalReference() || acceptSystemEntities) {
final LookupElementBuilder _item = buildEntityLookupItem(xmlEntityDecl);
if (_item != null) {
resultSet.addElement(_item);
resultSet.stopHere();
}
}
}
return true;
}
};
for (XmlFile descriptorFile: descriptorFiles) {
XmlUtil.processXmlElements(descriptorFile, processor, true);
}
final XmlDocument document = containingFile.getDocument();
if (acceptSystemEntities && document != null) {
final XmlProlog element = document.getProlog();
if (element != null) XmlUtil.processXmlElements(element, processor, true);
}
}
@Nullable
private static LookupElementBuilder buildEntityLookupItem(@NotNull final XmlEntityDecl decl) {
final String name = decl.getName();
if (name == null) {
return null;
}
final LookupElementBuilder result = LookupElementBuilder.create(name).withInsertHandler(ENTITY_INSERT_HANDLER);
final XmlAttributeValue value = decl.getValueElement();
final ASTNode node = value.getNode();
if (node != null) {
final ASTNode[] nodes = node.getChildren(TokenSet.create(XmlTokenType.XML_CHAR_ENTITY_REF));
if (nodes.length == 1) {
final String valueText = nodes[0].getText();
final int i = valueText.indexOf('#');
if (i > 0) {
String s = valueText.substring(i + 1);
s = StringUtil.trimEnd(s, ";");
try {
final char unicodeChar = (char)Integer.valueOf(s).intValue();
return result.withTypeText(String.valueOf(unicodeChar)).withLookupString(String.valueOf(unicodeChar));
}
catch (NumberFormatException e) {
return result;
}
}
}
}
return result;
}
private static class EntityRefInsertHandler extends BasicInsertHandler<LookupElement> {
@Override
public void handleInsert(@NotNull InsertionContext context, @NotNull LookupElement item) {
super.handleInsert(context, item);
context.setAddCompletionChar(false);
Editor editor = context.getEditor();
final CaretModel caretModel = editor.getCaretModel();
int caretOffset = caretModel.getOffset();
if (!CharArrayUtil.regionMatches(editor.getDocument().getCharsSequence(), caretOffset, ";")) {
editor.getDocument().insertString(caretOffset, ";");
}
caretModel.moveToOffset(caretOffset + 1);
}
}
}
| |
/**
* Copyright (c) 2000-present Liferay, Inc. All rights reserved.
*
* This library is free software; you can redistribute it and/or modify it under
* the terms of the GNU Lesser General Public License as published by the Free
* Software Foundation; either version 2.1 of the License, or (at your option)
* any later version.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
* details.
*/
package org.oep.processmgt.model;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* This class is used by SOAP remote services, specifically {@link org.oep.processmgt.service.http.StepTransitionServiceSoap}.
*
* @author trungdk
* @see org.oep.processmgt.service.http.StepTransitionServiceSoap
* @generated
*/
public class StepTransitionSoap implements Serializable {
public static StepTransitionSoap toSoapModel(StepTransition model) {
StepTransitionSoap soapModel = new StepTransitionSoap();
soapModel.setStepTransitionId(model.getStepTransitionId());
soapModel.setUserId(model.getUserId());
soapModel.setGroupId(model.getGroupId());
soapModel.setCompanyId(model.getCompanyId());
soapModel.setCreateDate(model.getCreateDate());
soapModel.setModifiedDate(model.getModifiedDate());
soapModel.setDossierProcessId(model.getDossierProcessId());
soapModel.setPreDossierStepId(model.getPreDossierStepId());
soapModel.setPostDossierStepId(model.getPostDossierStepId());
soapModel.setAutoCondition(model.getAutoCondition());
soapModel.setTransitionName(model.getTransitionName());
soapModel.setDossierStatus(model.getDossierStatus());
soapModel.setSendResults(model.getSendResults());
soapModel.setUserAssignment(model.getUserAssignment());
soapModel.setNewProcessOrder(model.getNewProcessOrder());
return soapModel;
}
public static StepTransitionSoap[] toSoapModels(StepTransition[] models) {
StepTransitionSoap[] soapModels = new StepTransitionSoap[models.length];
for (int i = 0; i < models.length; i++) {
soapModels[i] = toSoapModel(models[i]);
}
return soapModels;
}
public static StepTransitionSoap[][] toSoapModels(StepTransition[][] models) {
StepTransitionSoap[][] soapModels = null;
if (models.length > 0) {
soapModels = new StepTransitionSoap[models.length][models[0].length];
}
else {
soapModels = new StepTransitionSoap[0][0];
}
for (int i = 0; i < models.length; i++) {
soapModels[i] = toSoapModels(models[i]);
}
return soapModels;
}
public static StepTransitionSoap[] toSoapModels(List<StepTransition> models) {
List<StepTransitionSoap> soapModels = new ArrayList<StepTransitionSoap>(models.size());
for (StepTransition model : models) {
soapModels.add(toSoapModel(model));
}
return soapModels.toArray(new StepTransitionSoap[soapModels.size()]);
}
public StepTransitionSoap() {
}
public long getPrimaryKey() {
return _stepTransitionId;
}
public void setPrimaryKey(long pk) {
setStepTransitionId(pk);
}
public long getStepTransitionId() {
return _stepTransitionId;
}
public void setStepTransitionId(long stepTransitionId) {
_stepTransitionId = stepTransitionId;
}
public long getUserId() {
return _userId;
}
public void setUserId(long userId) {
_userId = userId;
}
public long getGroupId() {
return _groupId;
}
public void setGroupId(long groupId) {
_groupId = groupId;
}
public long getCompanyId() {
return _companyId;
}
public void setCompanyId(long companyId) {
_companyId = companyId;
}
public Date getCreateDate() {
return _createDate;
}
public void setCreateDate(Date createDate) {
_createDate = createDate;
}
public Date getModifiedDate() {
return _modifiedDate;
}
public void setModifiedDate(Date modifiedDate) {
_modifiedDate = modifiedDate;
}
public long getDossierProcessId() {
return _dossierProcessId;
}
public void setDossierProcessId(long dossierProcessId) {
_dossierProcessId = dossierProcessId;
}
public long getPreDossierStepId() {
return _preDossierStepId;
}
public void setPreDossierStepId(long preDossierStepId) {
_preDossierStepId = preDossierStepId;
}
public long getPostDossierStepId() {
return _postDossierStepId;
}
public void setPostDossierStepId(long postDossierStepId) {
_postDossierStepId = postDossierStepId;
}
public String getAutoCondition() {
return _autoCondition;
}
public void setAutoCondition(String autoCondition) {
_autoCondition = autoCondition;
}
public String getTransitionName() {
return _transitionName;
}
public void setTransitionName(String transitionName) {
_transitionName = transitionName;
}
public String getDossierStatus() {
return _dossierStatus;
}
public void setDossierStatus(String dossierStatus) {
_dossierStatus = dossierStatus;
}
public int getSendResults() {
return _sendResults;
}
public void setSendResults(int sendResults) {
_sendResults = sendResults;
}
public int getUserAssignment() {
return _userAssignment;
}
public void setUserAssignment(int userAssignment) {
_userAssignment = userAssignment;
}
public int getNewProcessOrder() {
return _newProcessOrder;
}
public void setNewProcessOrder(int newProcessOrder) {
_newProcessOrder = newProcessOrder;
}
private long _stepTransitionId;
private long _userId;
private long _groupId;
private long _companyId;
private Date _createDate;
private Date _modifiedDate;
private long _dossierProcessId;
private long _preDossierStepId;
private long _postDossierStepId;
private String _autoCondition;
private String _transitionName;
private String _dossierStatus;
private int _sendResults;
private int _userAssignment;
private int _newProcessOrder;
}
| |
/*
* $Id$
*
* SARL is an general-purpose agent programming language.
* More details on http://www.sarl.io
*
* Copyright (C) 2014-2021 the original authors or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.sarl.lang.core.tests.scoping.extensions.numbers.cast.longobject;
import static io.sarl.tests.api.tools.TestUtils.multilineString;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Tag;
import io.sarl.lang.SARLVersion;
import io.sarl.lang.sarl.SarlPackage;
import io.sarl.tests.api.AbstractSarlTest;
import io.sarl.tests.api.globalcompilation.GlobalCompilationSuite;
import io.sarl.tests.api.globalcompilation.GlobalCompilationTestContribution;
import io.sarl.tests.api.globalcompilation.ResourceSetGlobalCompilationContext;
/**
* @author $Author: sgalland$
* @version $FullVersion$
* @mavengroupid $GroupId$
* @mavenartifactid $ArtifactId$
* @see "https://github.com/eclipse/xtext-extras/issues/186"
*/
@SuppressWarnings("all")
@DisplayName("Compiling Long cast operator")
@GlobalCompilationSuite
@Tag("core")
@Tag("compileToJava")
public class CompilerTest extends AbstractSarlTest {
@GlobalCompilationTestContribution
public void as_byte(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : byte {",
" left as byte",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public byte fct(final Long left) {",
" return (left == null ? 0 : left.byteValue());",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_short(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : short {",
" left as short",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public short fct(final Long left) {",
" return (left == null ? 0 : left.shortValue());",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_int(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : int {",
" left as int",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public int fct(final Long left) {",
" return (left == null ? 0 : left.intValue());",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_long(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : long {",
" left as long",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public long fct(final Long left) {",
" return ((left) == null ? 0 : (left).longValue());",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_float(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : float {",
" left as float",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public float fct(final Long left) {",
" return ((left) == null ? 0 : (left).longValue());",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_double(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : double {",
" left as double",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public double fct(final Long left) {",
" return ((left) == null ? 0 : (left).longValue());",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_Byte(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : Byte {",
" left as Byte",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public Byte fct(final Long left) {",
" return (left == null ? null : Byte.valueOf(left.byteValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_Short(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : Short {",
" left as Short",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public Short fct(final Long left) {",
" return (left == null ? null : Short.valueOf(left.shortValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_Integer(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : Integer {",
" left as Integer",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public Integer fct(final Long left) {",
" return (left == null ? null : Integer.valueOf(left.intValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_Long(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : Long {",
" left as Long",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public Long fct(final Long left) {",
" return left;",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_Float(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : Float {",
" left as Float",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public Float fct(final Long left) {",
" return (left == null ? null : Float.valueOf(left.floatValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_Double(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : Double {",
" left as Double",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public Double fct(final Long left) {",
" return (left == null ? null : Double.valueOf(left.doubleValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_AtomicInteger(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"import java.util.concurrent.atomic.AtomicInteger",
"class A {",
" def fct(left : Long) : AtomicInteger {",
" left as AtomicInteger",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import java.util.concurrent.atomic.AtomicInteger;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public AtomicInteger fct(final Long left) {",
" return (left == null ? null : new AtomicInteger(left.intValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_AtomicLong(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"import java.util.concurrent.atomic.AtomicLong",
"class A {",
" def fct(left : Long) : AtomicLong {",
" left as AtomicLong",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import java.util.concurrent.atomic.AtomicLong;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public AtomicLong fct(final Long left) {",
" return (left == null ? null : new AtomicLong(left.longValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_AtomicDouble(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"import com.google.common.util.concurrent.AtomicDouble",
"class A {",
" def fct(left : Long) : AtomicDouble {",
" left as AtomicDouble",
" }",
"}"),
multilineString(
"import com.google.common.util.concurrent.AtomicDouble;",
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public AtomicDouble fct(final Long left) {",
" return (left == null ? null : new AtomicDouble(left.doubleValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_BigInteger(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"import java.math.BigInteger",
"class A {",
" def fct(left : Long) : BigInteger {",
" left as BigInteger",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import java.math.BigInteger;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public BigInteger fct(final Long left) {",
" return (left == null ? null : BigInteger.valueOf(left.longValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_BigDecimal(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"import java.math.BigDecimal",
"class A {",
" def fct(left : Long) : BigDecimal {",
" left as BigDecimal",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import java.math.BigDecimal;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public BigDecimal fct(final Long left) {",
" return (left == null ? null : BigDecimal.valueOf(left.doubleValue()));",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
@GlobalCompilationTestContribution
public void as_Number(ResourceSetGlobalCompilationContext ctx) throws Exception {
ctx.compileTo(multilineString(
"class A {",
" def fct(left : Long) : Number {",
" left as Number",
" }",
"}"),
multilineString(
"import io.sarl.lang.annotation.SarlElementType;",
"import io.sarl.lang.annotation.SarlSpecification;",
"import io.sarl.lang.annotation.SyntheticMember;",
"import org.eclipse.xtext.xbase.lib.Pure;",
"",
"@SarlSpecification(\"" + SARLVersion.SPECIFICATION_RELEASE_VERSION_STRING + "\")",
"@SarlElementType(" + SarlPackage.SARL_CLASS + ")",
"@SuppressWarnings(\"all\")",
"public class A {",
" @Pure",
" public Number fct(final Long left) {",
" return left;",
" }",
" ",
" @SyntheticMember",
" public A() {",
" super();",
" }",
"}",
""));
}
}
| |
package com.ibm.util.merge.template.content;
import static org.junit.Assert.*;
import java.io.ByteArrayOutputStream;
import java.util.HashMap;
import org.junit.Before;
import org.junit.Test;
import com.ibm.util.merge.exception.Merge500;
import com.ibm.util.merge.exception.MergeException;
import com.ibm.util.merge.template.Wrapper;
public class ContentTest {
String complex = "[{\"companies\":[\n\t<bookmark=\"company\" group=\"test\" template=\"company\">\n\t]\n},\n<bookmark=\"employees.file\" group=\"test\" template=\"allEmployees\">,\n<bookmark=\"owners.file\" group=\"test\" template=\"allOwners\">\n]";
@Before
public void setUp() throws Exception {
}
@Test
public void testContentParseVsMergableTime() throws MergeException {
Content test = new Content("{","}", complex, TagSegment.ENCODE_NONE);
Long startParse = System.currentTimeMillis();
for (int x = 1; x < 10000; x++) {
test = new Content("{","}", complex, TagSegment.ENCODE_NONE);
}
assertEquals(complex, test.getValue());
Long endParse = System.currentTimeMillis();
test = new Content("{","}", complex, TagSegment.ENCODE_NONE);
Long startMergable = System.currentTimeMillis();
for (int x = 1; x < 10000; x++) {
test = test.getMergable();
}
Long endMergable = System.currentTimeMillis();
assertTrue((endMergable-startMergable) < (endParse-startParse));
}
@Test
public void testContent() throws Merge500 {
Wrapper wrapper = new Wrapper("{","}");
Content test = new Content(wrapper,"This is a test string", TagSegment.ENCODE_NONE);
assertEquals("This is a test string", test.getValue());
assertEquals("This is a test string", test.getSource());
assertEquals("{", test.getOpen());
assertEquals("}", test.getClose());
assertEquals(0, test.getBookmarks().size());
assertEquals(0, test.getTags().size());
}
@Test
public void testContent1() throws Merge500 {
Content test = new Content("{","}","This is a test string", TagSegment.ENCODE_NONE);
assertEquals("This is a test string", test.getValue());
assertEquals("This is a test string", test.getSource());
assertEquals("{", test.getOpen());
assertEquals("}", test.getClose());
assertEquals(0, test.getBookmarks().size());
assertEquals(0, test.getTags().size());
}
@Test
public void testContent2() throws Merge500 {
Content test = new Content("{","}","This is a {test} string", TagSegment.ENCODE_NONE);
assertEquals("This is a {test} string", test.getValue());
assertEquals(1, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
}
@Test
public void testContent3() throws Merge500 {
Content test = new Content("{","}","This is a {test} string with {multiple} tags", TagSegment.ENCODE_NONE);
assertEquals("This is a {test} string with {multiple} tags", test.getValue());
assertEquals(2, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("multiple", test.getTags().get(1).getTag());
}
@Test
public void testContent4() throws Merge500 {
Content test = new Content("{","}","This is a {test} string with {tag=\"multiple\"} tags", TagSegment.ENCODE_NONE);
assertEquals("This is a {test} string with {tag=\"multiple\"} tags", test.getValue());
assertEquals(2, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("multiple", test.getTags().get(1).getTag());
}
@Test
public void testContent5() throws Merge500 {
Content test = new Content("{","}","This is a {test} string with {tag=\"multiple\"} tags and a {bookmark=\"foo\" group=\"grp\" template=\"bar\"} bookmark", TagSegment.ENCODE_NONE);
assertEquals("This is a {test} string with {tag=\"multiple\"} tags and a bookmark", test.getValue());
assertEquals(2, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("multiple", test.getTags().get(1).getTag());
assertEquals(1, test.getBookmarks().size());
assertEquals("foo", test.getBookmarks().get(0).getBookmarkName());
}
@Test
public void testContent6() throws Merge500 {
Content test = new Content("{","}","{test}", TagSegment.ENCODE_NONE);
assertEquals("{test}", test.getValue());
assertEquals(1, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("{test}", test.getFirst().getValue());
}
@Test
public void testContent7() throws Merge500 {
Content test = new Content("{","}","a{test}", TagSegment.ENCODE_NONE);
assertEquals("a{test}", test.getValue());
assertEquals(1, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("{test}", test.getLast().getValue());
}
@Test
public void testContent8() throws Merge500 {
Content test = new Content("{","}","{test}b", TagSegment.ENCODE_NONE);
assertEquals("{test}b", test.getValue());
assertEquals(1, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("{test}", test.getFirst().getValue());
}
@Test
public void testContent9() throws Merge500 {
Content test = new Content("{","}","{test1}{test2}{test3}", TagSegment.ENCODE_NONE);
assertEquals("{test1}{test2}{test3}", test.getValue());
assertEquals(3, test.getTags().size());
assertEquals("test2", test.getTags().get(1).getTag());
assertEquals("{test1}", test.getFirst().getValue());
assertEquals("{test3}", test.getLast().getValue());
}
@Test
public void testContent10() throws Merge500 {
Content test = new Content("{--","--}","{--test1--}{--test2--}{--test3--}", TagSegment.ENCODE_NONE);
assertEquals("{--test1--}{--test2--}{--test3--}", test.getValue());
assertEquals(3, test.getTags().size());
assertEquals("test2", test.getTags().get(1).getTag());
assertEquals("{--test1--}", test.getFirst().getValue());
assertEquals("{--test3--}", test.getLast().getValue());
}
@Test
public void testGetMergable() throws Merge500 {
Content test = new Content("{","}","This is a {test} string with {tag=\"multiple\"} tags and a {bookmark=\"foo\" group=\"grp\" template=\"bar\"} bookmark", TagSegment.ENCODE_NONE);
assertEquals("This is a {test} string with {tag=\"multiple\"} tags and a bookmark", test.getValue());
Segment seg = test.getFirst();
assertEquals("This is a ", seg.getValue());
seg = seg.getNext();
assertEquals("{test}", seg.getValue());
seg = seg.getNext();
assertEquals(" string with ", seg.getValue());
seg = seg.getNext();
assertEquals("{tag=\"multiple\"}", seg.getValue());
seg = seg.getNext();
assertEquals(" tags and a ", seg.getValue());
seg = seg.getNext();
assertEquals("", seg.getValue());
seg = seg.getNext();
assertEquals(" bookmark", seg.getValue());
assertEquals(2, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("multiple", test.getTags().get(1).getTag());
assertEquals(1, test.getBookmarks().size());
assertEquals("foo", test.getBookmarks().get(0).getBookmarkName());
test = test.getMergable();
assertEquals("This is a {test} string with {tag=\"multiple\"} tags and a bookmark", test.getValue());
seg = test.getFirst();
assertEquals("This is a ", seg.getValue());
seg = seg.getNext();
assertEquals("{test}", seg.getValue());
seg = seg.getNext();
assertEquals(" string with ", seg.getValue());
seg = seg.getNext();
assertEquals("{tag=\"multiple\"}", seg.getValue());
seg = seg.getNext();
assertEquals(" tags and a ", seg.getValue());
seg = seg.getNext();
assertEquals("", seg.getValue());
seg = seg.getNext();
assertEquals(" bookmark", seg.getValue());
assertEquals(2, test.getTags().size());
assertEquals("test", test.getTags().get(0).getTag());
assertEquals("multiple", test.getTags().get(1).getTag());
assertEquals(1, test.getBookmarks().size());
assertEquals("foo", test.getBookmarks().get(0).getBookmarkName());
}
@Test
public void testContentFail1() {
try {
@SuppressWarnings("unused")
Content test = new Content("{--","--}","{--test1--}{--test2", TagSegment.ENCODE_NONE);
} catch (Merge500 e) {
return;
}
fail("Exception Expected");
}
@Test
public void testReplace1() throws Merge500 {
Content test = new Content("{","}","{test1}{test2}{test3}", TagSegment.ENCODE_NONE);
assertEquals("{test1}{test2}{test3}", test.getValue());
assertSame(test.getFirst().getPrevious(), test);
assertSame(test.getLast().getNext(), test);
assertEquals("{test1}", test.getFirst().getValue());
assertEquals("{test2}", test.getFirst().getNext().getValue());
assertEquals("{test3}", test.getLast().getValue());
HashMap<String,String> replace = new HashMap<String,String>();
replace.put("test1", "value1");
replace.put("test2", "value2");
replace.put("test3", "value3");
test.replace(replace, false, 3);
assertEquals("value1value2value3", test.getValue());
assertEquals(0, test.getTags().size());
assertEquals("value1", test.getFirst().getValue());
assertEquals("value2", test.getFirst().getNext().getValue());
assertEquals("value3", test.getLast().getValue());
}
@Test
public void testReplace2() throws Merge500 {
Content test = new Content("{","}","here is {foo parseFirst} for us", Segment.ENCODE_NONE);
HashMap<String,String> replace = new HashMap<String,String>();
replace.put("foo", "text with {sub} tags");
replace.put("sub", "embeded");
test.replace(replace, true, 3);
assertEquals("here is text with embeded tags for us", test.getValue());
}
@Test
public void testReplace3() throws Merge500 {
// replace series
Content test = new Content("{","}","this is {rock parseFirst} test", Segment.ENCODE_NONE);
HashMap<String,String> replace = new HashMap<String,String>();
replace.put("rock", "{paper parseFirst}");
replace.put("paper", "{scissors parseFirst}");
replace.put("scissors", "{rock parseFirst}");
try {
test.replace(replace, true, 3);
} catch (Merge500 e) {
return;
}
fail("Exception Expected");
}
@Test
public void testReplace4() throws Merge500 {
// replace with parseFirst=true and looping replace
Content test = new Content("{","}","this is {foo parseFirst} test", Segment.ENCODE_NONE);
HashMap<String,String> replace = new HashMap<String,String>();
replace.put("foo", "{bar parseFirst}");
replace.put("bar", "bam");
test.replace(replace, true, 3);
assertEquals("this is bam test", test.getValue());
}
@Test
public void testReplace5() throws Merge500 {
Content test = new Content( "<", ">", "<A><B><C><D><E><F><G><H><I><J>", Segment.ENCODE_NONE);
HashMap<String,String> replace = new HashMap<String,String>();
replace.put("A", "a");
replace.put("B", "b");
replace.put("C", "c");
replace.put("D", "d");
replace.put("E", "e");
replace.put("F", "f");
replace.put("G", "g");
replace.put("H", "h");
replace.put("I", "i");
replace.put("J", "j");
test.replace(replace, true, 3);
assertEquals("abcdefghij", test.getValue());
}
@Test
public void testGetFirst() throws Merge500 {
Content test = new Content("{","}","This is a {test} string with {multiple} tags", TagSegment.ENCODE_NONE);
assertEquals("This is a ", test.getFirst().getValue());
}
@Test
public void testGetLast() throws Merge500 {
Content test = new Content("{","}","This is a {test} string", TagSegment.ENCODE_NONE);
assertEquals(" string", test.getLast().getValue());
}
@Test
public void testGetSource() throws Merge500 {
Content test = new Content("{","}","This is a {test} string", TagSegment.ENCODE_NONE);
assertEquals("This is a {test} string", test.getSource());
}
@Test
public void testGetOpen() throws Merge500 {
Content test = new Content("{","}","This is a {test} string", TagSegment.ENCODE_NONE);
assertEquals("{", test.getOpen());
}
@Test
public void testGetClose() throws Merge500 {
Content test = new Content("{","}","This is a {test} string", TagSegment.ENCODE_NONE);
assertEquals("}", test.getClose());
}
@Test
public void testGetTags() throws Merge500 {
Content test = new Content("{","}","{test1}{test2}{test3}", TagSegment.ENCODE_NONE);
assertEquals(3, test.getTags().size());
}
@Test
public void testGetBookmarks() throws Merge500 {
Content test = new Content("{","}","{bookmark=\"foo\" group=\"grp\" template=\"temp\"}{bookmark=\"foo\" group=\"grp\" template=\"temp\"}{bookmark=\"foo\" group=\"grp\" template=\"temp\"}", TagSegment.ENCODE_NONE);
assertEquals(3, test.getBookmarks().size());
}
@Test
public void testGetBookmarks2() throws Merge500 {
Content test = new Content("<",">","\t{\n\t\t\"company\" : \"<company>\",\n\t\t\"owners\" : \"<bookmark=\"owners\" group=\"test\" template=\"owner\">\",\n\t\t\"employees\" : \"<bookmark=\"employees\" group=\"test\" template=\"employee\">\",\n\t\t\"customers\" : \"<bookmark=\"customers\" group=\"test\" template=\"customer\">\",\n\t}", TagSegment.ENCODE_NONE);
assertEquals(3, test.getBookmarks().size());
}
@Test
public void testGetBookmarks3() throws Merge500 {
Content test = new Content("<",">","{owner=\"<idmuContext>\", <bookmark=\"owner\" group=\"test\" template=\"anOwner\">}<,>", TagSegment.ENCODE_NONE);
assertEquals(1, test.getBookmarks().size());
}
@Test
public void testRemoveBookmarks() throws Merge500 {
Content test = new Content("{","}","{bookmark=\"foo\" group=\"grp\" template=\"temp\"}{bookmark=\"foo\" group=\"grp\" template=\"temp\"}{bookmark=\"foo\" group=\"grp\" template=\"temp\"}", TagSegment.ENCODE_NONE);
assertEquals(3, test.getBookmarks().size());
test.removeBookmarks();
assertEquals(0, test.getBookmarks().size());
}
@Test
public void testStremValue1() throws Merge500 {
String content = "This is a {test} string with {tag=\"multiple\"} tags and a {bookmark=\"foo\" group=\"grp\" template=\"bar\"} bookmark";
String value = "This is a {test} string with {tag=\"multiple\"} tags and a bookmark";
Content test = new Content("{","}",content, TagSegment.ENCODE_NONE);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
test.streamValue(bos);
assertEquals(bos.toString(), value);
}
}
| |
/*
* Copyright (C) 2018 Satomichi Nishihara
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package burai.app.project.editor.input.geom;
import java.util.List;
import javafx.scene.control.TableView;
import burai.atoms.element.ElementUtil;
import burai.input.card.QEAtomicPositions;
import burai.input.card.QECardEvent;
public class AtomAnsatzBinder {
private TableView<AtomAnsatz> atomTable;
private QEAtomicPositions atomicPositions;
public AtomAnsatzBinder(TableView<AtomAnsatz> atomTable, QEAtomicPositions atomicPositions) {
if (atomTable == null) {
throw new IllegalArgumentException("atomTable is null.");
}
if (atomicPositions == null) {
throw new IllegalArgumentException("atomicPositions is null.");
}
this.atomTable = atomTable;
this.atomicPositions = atomicPositions;
}
public void bindTable() {
this.setupAtomTable();
this.setupAtomicPositions();
}
private void setupAtomTable() {
int numAtoms = this.atomicPositions.numPositions();
for (int i = 0; i < numAtoms; i++) {
AtomAnsatz atom = this.createAtom(i);
if (atom != null) {
this.atomTable.getItems().add(atom);
}
}
}
private AtomAnsatz createAtom(int index) {
if (index < 0 || this.atomicPositions.numPositions() <= index) {
return null;
}
String label = this.atomicPositions.getLabel(index);
if (label == null) {
return null;
}
double[] position = this.atomicPositions.getPosition(index);
if (position == null || position.length < 3) {
return null;
}
boolean[] mobile = this.atomicPositions.getMobile(index);
if (mobile == null || mobile.length < 3) {
return null;
}
AtomAnsatz atom = new AtomAnsatz(index);
atom.setElement(label);
atom.setX(position[0], mobile[0]);
atom.setY(position[1], mobile[1]);
atom.setZ(position[2], mobile[2]);
atom.elementProperty().addListener(o -> this.actionOnElementChanged(atom));
atom.xProperty().addListener(o -> this.actionOnXYZChanged(atom));
atom.yProperty().addListener(o -> this.actionOnXYZChanged(atom));
atom.zProperty().addListener(o -> this.actionOnXYZChanged(atom));
return atom;
}
private void actionOnElementChanged(AtomAnsatz atom) {
if (atom == null) {
return;
}
int index = atom.getIndex();
if (index < 0 || this.atomicPositions.numPositions() <= index) {
return;
}
String element = atom.getElement();
String label = ElementUtil.toAvailableName(element);
this.atomicPositions.setLabel(index, label);
}
private void actionOnXYZChanged(AtomAnsatz atom) {
if (atom == null) {
return;
}
int index = atom.getIndex();
if (index < 0 || this.atomicPositions.numPositions() <= index) {
return;
}
double[] position = this.atomicPositions.getPosition(index);
boolean[] mobile = this.atomicPositions.getMobile(index);
this.createPosition(atom, position, mobile);
this.atomicPositions.setPosition(index, position);
this.atomicPositions.setMobile(index, mobile);
}
private void createPosition(AtomAnsatz atom, double[] position, boolean[] mobile) {
if (atom == null) {
return;
}
if (position == null || position.length < 3) {
return;
}
if (mobile == null || mobile.length < 3) {
return;
}
try {
double xPosition = atom.getXValue();
position[0] = xPosition;
} catch (RuntimeException e) {
// NOP
}
try {
double yPosition = atom.getYValue();
position[1] = yPosition;
} catch (RuntimeException e) {
// NOP
}
try {
double zPosition = atom.getZValue();
position[2] = zPosition;
} catch (RuntimeException e) {
// NOP
}
try {
boolean xMobile = atom.isXMobile();
mobile[0] = xMobile;
} catch (RuntimeException e) {
// NOP
}
try {
boolean yMobile = atom.isYMobile();
mobile[1] = yMobile;
} catch (RuntimeException e) {
// NOP
}
try {
boolean zMobile = atom.isZMobile();
mobile[2] = zMobile;
} catch (RuntimeException e) {
// NOP
}
}
private void setupAtomicPositions() {
this.atomicPositions.addListener(event -> {
if (event == null) {
return;
}
int eventType = event.getEventType();
int index = event.getAtomIndex();
if (eventType == QECardEvent.EVENT_TYPE_ATOM_CHANGED) {
this.actionOnAtomChanged(index);
} else if (eventType == QECardEvent.EVENT_TYPE_ATOM_MOVED) {
this.actionOnAtomChanged(index);
} else if (eventType == QECardEvent.EVENT_TYPE_ATOM_ADDED) {
this.actionOnAtomAdded(index);
} else if (eventType == QECardEvent.EVENT_TYPE_ATOM_REMOVED) {
this.actionOnAtomRemoved(index);
} else if (eventType == QECardEvent.EVENT_TYPE_ATOM_CLEARED) {
this.actionOnAtomsCleared();
} else {
this.actionForAllAtoms();
}
});
}
private AtomAnsatz pickOutAtom(int index) {
List<AtomAnsatz> atoms = this.atomTable.getItems();
if (atoms == null) {
return null;
}
for (AtomAnsatz atom : atoms) {
if (atom == null) {
continue;
}
if (index == atom.getIndex()) {
return atom;
}
}
return null;
}
private void actionOnAtomChanged(int index) {
String label = this.atomicPositions.getLabel(index);
if (label == null) {
return;
}
double[] position = this.atomicPositions.getPosition(index);
if (position == null || position.length < 3) {
return;
}
boolean[] mobile = this.atomicPositions.getMobile(index);
if (mobile == null || mobile.length < 3) {
return;
}
AtomAnsatz atom = this.pickOutAtom(index);
if (atom == null) {
return;
}
atom.setElement(label);
atom.setX(position[0], mobile[0]);
atom.setY(position[1], mobile[1]);
atom.setZ(position[2], mobile[2]);
}
private void actionOnAtomAdded(int index) {
AtomAnsatz atom = this.createAtom(index);
if (atom != null) {
this.atomTable.getItems().add(atom);
}
}
private void actionOnAtomRemoved(int index) {
AtomAnsatz removedAtom = this.pickOutAtom(index);
if (removedAtom == null) {
return;
}
this.atomTable.getItems().remove(removedAtom);
for (AtomAnsatz atom : this.atomTable.getItems()) {
if (atom.getIndex() >= index) {
atom.setIndex(atom.getIndex() - 1);
}
}
}
private void actionOnAtomsCleared() {
this.atomTable.getItems().clear();
}
private void actionForAllAtoms() {
this.atomTable.getItems().clear();
this.setupAtomTable();
}
public void addAtom(AtomAnsatz atom) {
if (atom == null) {
return;
}
String label = atom.getElement();
double[] position = { 0.0, 0.0, 0.0 };
boolean[] mobile = { true, true, true };
this.createPosition(atom, position, mobile);
this.atomicPositions.addPosition(label, position, mobile);
}
public void removeAtom(AtomAnsatz atom) {
if (atom == null) {
return;
}
this.atomicPositions.removePosition(atom.getIndex());
}
}
| |
/*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.api.tasks.diagnostics.internal.insight;
import org.gradle.api.artifacts.component.*;
import org.gradle.api.internal.artifacts.ivyservice.ivyresolve.strategy.VersionMatcher;
import org.gradle.api.tasks.diagnostics.internal.graph.nodes.DependencyEdge;
import org.gradle.util.CollectionUtils;
import java.util.Collection;
import java.util.Comparator;
/**
* Created: 17/08/2012
*/
public class DependencyResultSorter {
/**
* sorts by group:name:version mostly.
* If requested matches selected then it will override the version comparison
* so that the dependency that was selected is more prominent.
*/
public static Collection<DependencyEdge> sort(Collection<DependencyEdge> input, VersionMatcher versionMatcher) {
return CollectionUtils.sort(input, new DependencyComparator(versionMatcher));
}
private static class DependencyComparator implements Comparator<DependencyEdge> {
private final VersionMatcher matcher;
private DependencyComparator(VersionMatcher matcher) {
this.matcher = matcher;
}
public int compare(DependencyEdge left, DependencyEdge right) {
checkRequestedComponentSelectorType(left);
checkRequestedComponentSelectorType(right);
if(isLeftProjectButRightIsModuleComponentSelector(left, right)) {
return -1;
}
if(isLeftModuleButRightIsProjectComponentSelector(left, right)) {
return 1;
}
if(isLeftAndRightProjectComponentSelector(left, right)) {
return compareRequestedProjectComponentSelectors(left, right);
}
if(isLeftAndRightModuleComponentSelector(left, right)) {
return compareModuleComponentSelectors(left, right);
}
return 0;
}
private void checkRequestedComponentSelectorType(DependencyEdge dependencyEdge) {
if(dependencyEdge == null || dependencyEdge.getRequested() == null) {
throw new IllegalArgumentException("Dependency edge or the requested component selector may not be null");
}
ComponentSelector requested = dependencyEdge.getRequested();
if(!isExpectedComponentSelector(requested)) {
throw new IllegalArgumentException("Unexpected component selector type for dependency edge: " + requested.getClass());
}
}
private boolean isExpectedComponentSelector(ComponentSelector componentSelector) {
return componentSelector instanceof ProjectComponentSelector || componentSelector instanceof ModuleComponentSelector;
}
private boolean isLeftProjectButRightIsModuleComponentSelector(DependencyEdge left, DependencyEdge right) {
return left.getRequested() instanceof ProjectComponentSelector && right.getRequested() instanceof ModuleComponentSelector;
}
private boolean isLeftModuleButRightIsProjectComponentSelector(DependencyEdge left, DependencyEdge right) {
return left.getRequested() instanceof ModuleComponentSelector && right.getRequested() instanceof ProjectComponentSelector;
}
private boolean isLeftAndRightProjectComponentSelector(DependencyEdge left, DependencyEdge right) {
return left.getRequested() instanceof ProjectComponentSelector && right.getRequested() instanceof ProjectComponentSelector;
}
private boolean isLeftAndRightModuleComponentSelector(DependencyEdge left, DependencyEdge right) {
return left.getRequested() instanceof ModuleComponentSelector && right.getRequested() instanceof ModuleComponentSelector;
}
private int compareModuleComponentSelectors(DependencyEdge left, DependencyEdge right) {
ModuleComponentSelector leftRequested = (ModuleComponentSelector)left.getRequested();
ModuleComponentSelector rightRequested = (ModuleComponentSelector)right.getRequested();
int byGroup = leftRequested.getGroup().compareTo(rightRequested.getGroup());
if (byGroup != 0) {
return byGroup;
}
int byModule = leftRequested.getModule().compareTo(rightRequested.getModule());
if (byModule != 0) {
return byModule;
}
//if selected matches requested version comparison is overridden
boolean leftMatches = leftRequested.matchesStrictly(left.getActual());
boolean rightMatches = rightRequested.matchesStrictly(right.getActual());
if (leftMatches && !rightMatches) {
return -1;
} else if (!leftMatches && rightMatches) {
return 1;
}
//order dynamic selectors after static selectors
boolean leftDynamic = matcher.isDynamic(leftRequested.getVersion());
boolean rightDynamic = matcher.isDynamic(rightRequested.getVersion());
if (leftDynamic && !rightDynamic) {
return 1;
} else if (!leftDynamic && rightDynamic) {
return -1;
}
int byVersion;
if (leftDynamic && rightDynamic) {
// order dynamic selectors lexicographically
byVersion = leftRequested.getVersion().compareTo(rightRequested.getVersion());
} else {
// order static selectors semantically
byVersion = matcher.compare(leftRequested.getVersion(), rightRequested.getVersion());
}
if (byVersion != 0) {
return byVersion;
}
return compareFromComponentIdentifiers(left.getFrom(), right.getFrom());
}
private int compareRequestedProjectComponentSelectors(DependencyEdge left, DependencyEdge right) {
ProjectComponentSelector leftRequested = (ProjectComponentSelector)left.getRequested();
ProjectComponentSelector rightRequested = (ProjectComponentSelector)right.getRequested();
return leftRequested.getProjectPath().compareTo(rightRequested.getProjectPath());
}
public int compareFromComponentIdentifiers(ComponentIdentifier left, ComponentIdentifier right) {
if(isLeftAndRightFromProjectComponentIdentifier(left, right)) {
return compareFromProjectComponentIdentifiers(left, right);
}
if(isLeftAndRightFromModuleComponentIdentifier(left, right)) {
return compareFromModuleComponentIdentifiers(left, right);
}
return isLeftFromProjectButRightIsModuleComponentIdentifier(left, right) ? -1 : 1;
}
private int compareFromProjectComponentIdentifiers(ComponentIdentifier left, ComponentIdentifier right) {
ProjectComponentIdentifier leftFrom = (ProjectComponentIdentifier)left;
ProjectComponentIdentifier rightFrom = (ProjectComponentIdentifier)right;
return leftFrom.getProjectPath().compareTo(rightFrom.getProjectPath());
}
private int compareFromModuleComponentIdentifiers(ComponentIdentifier left, ComponentIdentifier right) {
ModuleComponentIdentifier leftFrom = (ModuleComponentIdentifier)left;
ModuleComponentIdentifier rightFrom = (ModuleComponentIdentifier)right;
int byGroup = leftFrom.getGroup().compareTo(rightFrom.getGroup());
if (byGroup != 0) {
return byGroup;
}
int byModule = leftFrom.getModule().compareTo(rightFrom.getModule());
if (byModule != 0) {
return byModule;
}
return matcher.compare(leftFrom.getVersion(), rightFrom.getVersion());
}
private boolean isLeftAndRightFromProjectComponentIdentifier(ComponentIdentifier left, ComponentIdentifier right) {
return left instanceof ProjectComponentIdentifier && right instanceof ProjectComponentIdentifier;
}
private boolean isLeftAndRightFromModuleComponentIdentifier(ComponentIdentifier left, ComponentIdentifier right) {
return left instanceof ModuleComponentIdentifier && right instanceof ModuleComponentIdentifier;
}
private boolean isLeftFromProjectButRightIsModuleComponentIdentifier(ComponentIdentifier left, ComponentIdentifier right) {
return left instanceof ProjectComponentIdentifier && right instanceof ModuleComponentIdentifier;
}
}
}
| |
package liquibase.snapshot.jvm;
import liquibase.database.AbstractJdbcDatabase;
import liquibase.database.Database;
import liquibase.database.core.*;
import liquibase.diff.compare.DatabaseObjectComparatorFactory;
import liquibase.exception.DatabaseException;
import liquibase.snapshot.CachedRow;
import liquibase.snapshot.DatabaseSnapshot;
import liquibase.snapshot.InvalidExampleException;
import liquibase.snapshot.JdbcDatabaseSnapshot;
import liquibase.structure.DatabaseObject;
import liquibase.structure.core.*;
import liquibase.util.StringUtil;
import java.sql.DatabaseMetaData;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Analyses the properties of a database index and creates an object representation ("snapshot").
*/
public class IndexSnapshotGenerator extends JdbcSnapshotGenerator {
public IndexSnapshotGenerator() {
super(Index.class, new Class[]{Table.class, View.class, ForeignKey.class, UniqueConstraint.class});
}
@Override
protected void addTo(DatabaseObject foundObject, DatabaseSnapshot snapshot) throws DatabaseException, InvalidExampleException {
if (!snapshot.getSnapshotControl().shouldInclude(Index.class)) {
return;
}
if (foundObject instanceof Table || foundObject instanceof View) {
if (foundObject instanceof View && !addToViews(snapshot.getDatabase())) {
return;
}
Relation relation = (Relation) foundObject;
Database database = snapshot.getDatabase();
Schema schema;
schema = relation.getSchema();
List<CachedRow> rs = null;
JdbcDatabaseSnapshot.CachingDatabaseMetaData databaseMetaData = null;
try {
databaseMetaData = ((JdbcDatabaseSnapshot) snapshot).getMetaDataFromCache();
rs = databaseMetaData.getIndexInfo(((AbstractJdbcDatabase) database).getJdbcCatalogName(schema), ((AbstractJdbcDatabase) database).getJdbcSchemaName(schema), relation.getName(), null);
Map<String, Index> foundIndexes = new HashMap<>();
for (CachedRow row : rs) {
String indexName = row.getString("INDEX_NAME");
if (indexName == null) {
continue;
}
if ((database instanceof AbstractDb2Database) && "SYSIBM".equals(row.getString("INDEX_QUALIFIER"))) {
continue;
}
Index index = foundIndexes.get(indexName);
if (index == null) {
index = new Index();
index.setName(indexName);
index.setRelation(relation);
short type = row.getShort("TYPE");
if (type == DatabaseMetaData.tableIndexClustered) {
index.setClustered(true);
} else if (database instanceof MSSQLDatabase) {
index.setClustered(false);
}
foundIndexes.put(indexName, index);
}
String ascOrDesc;
if (database instanceof Db2zDatabase) {
ascOrDesc = row.getString("ORDER");
} else {
ascOrDesc = row.getString("ASC_OR_DESC");
}
Boolean descending = "D".equals(ascOrDesc) ? Boolean.TRUE : ("A".equals(ascOrDesc) ? Boolean
.FALSE : null);
index.addColumn(new Column(row.getString("COLUMN_NAME")).setComputed(false).setDescending(descending).setRelation(index.getRelation()));
}
//add clustered indexes first, than all others in case there is a clustered and non-clustered version of the same index. Prefer the clustered version
List<Index> stillToAdd = new ArrayList<>();
for (Index exampleIndex : foundIndexes.values()) {
if ((exampleIndex.getClustered() != null) && exampleIndex.getClustered()) {
relation.getIndexes().add(exampleIndex);
} else {
stillToAdd.add(exampleIndex);
}
}
for (Index exampleIndex : stillToAdd) {
boolean alreadyAddedSimilar = false;
for (Index index : relation.getIndexes()) {
if (DatabaseObjectComparatorFactory.getInstance().isSameObject(index, exampleIndex, null, database)) {
alreadyAddedSimilar = true;
}
}
if (!alreadyAddedSimilar) {
relation.getIndexes().add(exampleIndex);
}
}
} catch (Exception e) {
throw new DatabaseException(e);
}
}
if ((foundObject instanceof UniqueConstraint) && (((UniqueConstraint) foundObject).getBackingIndex() == null)
&& !(snapshot.getDatabase() instanceof DB2Database) && !(snapshot.getDatabase() instanceof DerbyDatabase)) {
Index exampleIndex = new Index().setRelation(((UniqueConstraint) foundObject).getRelation());
exampleIndex.getColumns().addAll(((UniqueConstraint) foundObject).getColumns());
((UniqueConstraint) foundObject).setBackingIndex(exampleIndex);
}
if ((foundObject instanceof ForeignKey) && (((ForeignKey) foundObject).getBackingIndex() == null)) {
Index exampleIndex = new Index().setRelation(((ForeignKey) foundObject).getForeignKeyTable());
exampleIndex.getColumns().addAll(((ForeignKey) foundObject).getForeignKeyColumns());
((ForeignKey) foundObject).setBackingIndex(exampleIndex);
}
}
@Override
protected DatabaseObject snapshotObject(DatabaseObject example, DatabaseSnapshot snapshot) throws DatabaseException, InvalidExampleException {
Database database = snapshot.getDatabase();
Relation exampleIndex = ((Index) example).getRelation();
String tableName = null;
Schema schema = null;
if (exampleIndex != null) {
tableName = exampleIndex.getName();
schema = exampleIndex.getSchema();
}
if (schema == null) {
schema = new Schema(database.getDefaultCatalogName(), database.getDefaultSchemaName());
}
for (int i = 0; i < ((Index) example).getColumns().size(); i++) {
((Index) example).getColumns().set(i, ((Index) example).getColumns().get(i));
}
String exampleName = example.getName();
if (exampleName != null) {
exampleName = database.correctObjectName(exampleName, Index.class);
}
Map<String, Index> foundIndexes = new HashMap<>();
JdbcDatabaseSnapshot.CachingDatabaseMetaData databaseMetaData = null;
List<CachedRow> rs = null;
try {
databaseMetaData = ((JdbcDatabaseSnapshot) snapshot).getMetaDataFromCache();
rs = databaseMetaData.getIndexInfo(((AbstractJdbcDatabase) database).getJdbcCatalogName(schema), ((AbstractJdbcDatabase) database).getJdbcSchemaName(schema), tableName, exampleName);
for (CachedRow row : rs) {
String rawIndexName = row.getString("INDEX_NAME");
String indexName = cleanNameFromDatabase(rawIndexName, database);
String correctedIndexName = database.correctObjectName(indexName, Index.class);
if (indexName == null) {
continue;
}
if ((exampleName != null) && !exampleName.equals(correctedIndexName)) {
continue;
}
/*
* TODO Informix generates indexnames with a leading blank if no name given.
* An identifier with a leading blank is not allowed.
* So here is it replaced.
*/
if ((database instanceof InformixDatabase) && indexName.startsWith(" ")) {
continue; // suppress creation of generated_index records
}
short type = row.getShort("TYPE");
Boolean nonUnique = row.getBoolean("NON_UNIQUE");
if (nonUnique == null) {
nonUnique = true;
}
String columnName = cleanNameFromDatabase(row.getString("COLUMN_NAME"), database);
short position = row.getShort("ORDINAL_POSITION");
String definition = StringUtil.trimToNull(row.getString("FILTER_CONDITION"));
if (definition != null) {
if (!(database instanceof OracleDatabase)) { //TODO: this replaceAll code has been there for a long time but we don't know why. Investigate when it is ever needed and modify it to be smarter
definition = definition.replaceAll("\"", "");
}
}
if ((columnName == null) && (definition == null)) {
//nothing to index, not sure why these come through sometimes
continue;
}
if (type == DatabaseMetaData.tableIndexStatistic) {
continue;
}
/*
* In Oracle database, ALL_IND_COLUMNS/ALL_IND_EXPRESSIONS (the views from which we bulk-fetch the
* column definitions for a given index) can show a strange behaviour if an index column consists of
* a regular table column, but its sort order is DESC(ending). In this case, we get something like
* this (example values):
* ALL_IND_COLUMNS.COLUMN_NAME=SYS_NC00006$
* ALL_IND_EXPRESSIONS.COLUMN_EXPRESSIONS="COLUMN1FORDESC"
* Note that the quote characters (") are part of the actual column value!
* Our strategy here is: If the expression would be a valid Oracle identifier, but not a valid Oracle
* function name, then we assume it is the name of a regular column.
*/
if ((database instanceof OracleDatabase) && (definition != null) && (columnName != null)) {
String potentialColumnExpression = definition.replaceFirst("^\"?(.*?)\"?$", "$1");
OracleDatabase oracle = (OracleDatabase) database;
if (oracle.isValidOracleIdentifier(potentialColumnExpression, Index.class)
&& (!oracle.isFunction(potentialColumnExpression))) {
columnName = potentialColumnExpression;
definition = null;
}
}
// Have we already seen/found this index? If not, let's read its properties!
Index returnIndex = foundIndexes.get(correctedIndexName);
if (returnIndex == null) {
returnIndex = new Index();
Relation relation = new Table();
if ("V".equals(row.getString("INTERNAL_OBJECT_TYPE"))) {
relation = new View();
}
returnIndex.setRelation(relation.setName(row.getString("TABLE_NAME")).setSchema(schema));
returnIndex.setName(indexName);
returnIndex.setUnique(!nonUnique);
String tablespaceName = row.getString("TABLESPACE_NAME");
if ((tablespaceName != null) && database.supportsTablespaces()) {
returnIndex.setTablespace(tablespaceName);
}
if (type == DatabaseMetaData.tableIndexClustered) {
returnIndex.setClustered(true);
} else if (database instanceof MSSQLDatabase) {
returnIndex.setClustered(false);
}
if (database instanceof MSSQLDatabase) {
Boolean recompute = (Boolean) row.get("NO_RECOMPUTE");
if (recompute != null) {
recompute = !recompute;
}
returnIndex.setAttribute("padIndex", row.get("IS_PADDED"));
returnIndex.setAttribute("fillFactor", row.get("FILL_FACTOR"));
returnIndex.setAttribute("ignoreDuplicateKeys", row.get("IGNORE_DUP_KEY"));
returnIndex.setAttribute("recomputeStatistics", recompute);
returnIndex.setAttribute("incrementalStatistics", row.get("IS_INCREMENTAL"));
returnIndex.setAttribute("allowRowLocks", row.get("ALLOW_ROW_LOCKS"));
returnIndex.setAttribute("allowPageLocks", row.get("ALLOW_PAGE_LOCKS"));
}
foundIndexes.put(correctedIndexName, returnIndex);
}
if ((database instanceof MSSQLDatabase) && (Boolean) row.get("IS_INCLUDED_COLUMN")) {
List<String> includedColumns = returnIndex.getAttribute("includedColumns", List.class);
if (includedColumns == null) {
includedColumns = new ArrayList<>();
returnIndex.setAttribute("includedColumns", includedColumns);
}
includedColumns.add(columnName);
} else {
if (position != 0) { //if really a column, position is 1-based.
for (int i = returnIndex.getColumns().size(); i < position; i++) {
returnIndex.getColumns().add(null);
}
// Is this column a simple column (definition == null)
// or is it a computed expression (definition != null)
if (definition == null) {
String ascOrDesc;
if (database instanceof Db2zDatabase) {
ascOrDesc = row.getString("ORDER");
} else {
ascOrDesc = row.getString("ASC_OR_DESC");
}
Boolean descending = "D".equals(ascOrDesc) ? Boolean.TRUE : ("A".equals(ascOrDesc) ?
Boolean.FALSE : null);
returnIndex.getColumns().set(position - 1, new Column(columnName)
.setDescending(descending).setRelation(returnIndex.getRelation()));
} else {
returnIndex.getColumns().set(position - 1, new Column()
.setRelation(returnIndex.getRelation()).setName(definition, true));
}
}
}
}
} catch (Exception e) {
throw new DatabaseException(e);
}
if (exampleName != null) {
Index index = foundIndexes.get(exampleName);
return index;
} else {
//prefer clustered version of the index
List<Index> nonClusteredIndexes = new ArrayList<>();
for (Index index : foundIndexes.values()) {
if (DatabaseObjectComparatorFactory.getInstance().isSameObject(index.getRelation(), exampleIndex, snapshot.getSchemaComparisons(), database)) {
boolean actuallyMatches = false;
if (database.isCaseSensitive()) {
if (index.getColumnNames().equals(((Index) example).getColumnNames())) {
actuallyMatches = true;
}
} else {
if (index.getColumnNames().equalsIgnoreCase(((Index) example).getColumnNames())) {
actuallyMatches = true;
}
}
if (actuallyMatches) {
if ((index.getClustered() != null) && index.getClustered()) {
return finalizeIndex(schema, tableName, index, snapshot);
} else {
nonClusteredIndexes.add(index);
}
}
}
}
if (!nonClusteredIndexes.isEmpty()) {
return finalizeIndex(schema, tableName, nonClusteredIndexes.get(0), snapshot);
}
return null;
}
}
protected Index finalizeIndex(Schema schema, String tableName, Index index, DatabaseSnapshot snapshot) {
if ((index.isUnique() == null) || !index.isUnique()) {
List<Column> columns = index.getColumns();
PrimaryKey tablePK = new PrimaryKey(null, schema.getCatalogName(), schema.getName(), tableName, columns.toArray(new Column[index.getColumns().size()]));
if (snapshot.get(tablePK) != null) { //actually is unique since it's the PK
index.setUnique(true);
}
}
return index;
}
protected boolean addToViews(Database database) {
return database instanceof MSSQLDatabase;
}
}
| |
package com.crawljax.plugins.testilizer.generated.wolfcms_RAND;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.junit.*;
import static org.junit.Assert.*;
import org.openqa.selenium.*;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.firefox.FirefoxProfile;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.NodeList;
import com.crawljax.forms.RandomInputValueGenerator;
import com.crawljax.util.DomUtils;
/*
* Generated @ Sun Apr 13 22:36:39 PDT 2014
*/
public class GeneratedTestCase0 {
private WebDriver driver;
private String url;
private boolean acceptNextAlert = true;
private StringBuffer verificationErrors = new StringBuffer();
private DOMElement element;
private DOMElement parentElement;
private ArrayList<DOMElement> childrenElements = new ArrayList<DOMElement>();
private String DOM = null;
boolean getCoverageReport = false;
@Before
public void setUp() throws Exception {
// Setting the JavaScript code coverage switch
getCoverageReport = com.crawljax.plugins.testilizer.Testilizer.getCoverageReport();
if (getCoverageReport)
driver = new FirefoxDriver(getProfile());
else
driver = new FirefoxDriver();
url = "http://localhost:8888/wolfcms/?/admin/";
driver.manage().timeouts().implicitlyWait(30, TimeUnit.SECONDS);
}
public static FirefoxProfile getProfile() {
FirefoxProfile profile = new FirefoxProfile();
profile.setPreference("network.proxy.http", "localhost");
profile.setPreference("network.proxy.http_port", 3128);
profile.setPreference("network.proxy.type", 1);
/* use proxy for everything, including localhost */
profile.setPreference("network.proxy.no_proxies_on", "");
return profile;
}
@After
public void tearDown() throws Exception {
if (getCoverageReport)
((JavascriptExecutor) driver).executeScript(" if (window.jscoverage_report) {return jscoverage_report('CodeCoverageReport');}");
driver.quit();
String verificationErrorString = verificationErrors.toString();
if (!"".equals(verificationErrorString)) {
fail(verificationErrorString);
}
}
/*
* Test Cases
*/
@Test
public void method0(){
driver.get(url);
//From state 0 to state 2
//Eventable{eventType=click, identification=xpath /HTML[1]/BODY[1]/P[1]/A[1], element=Element{node=[A: null], tag=A, text=Wolf CMS, attributes={href=http://localhost:8888/wolfcms/}}, source=StateVertexImpl{id=0, name=index}, target=StateVertexImpl{id=2, name=state2}}
mutateDOMTree(0);
checkState0_OriginalAssertions();
checkState0_ReusedAssertions();
checkState0_GeneratedAssertions();
checkState0_LearnedAssertions();
checkState0_AllAssertions();
checkState0_RandAssertions1();
checkState0_RandAssertions2();
checkState0_RandAssertions3();
checkState0_RandAssertions4();
checkState0_RandAssertions5();
driver.findElement(By.id("login-username")).clear();
driver.findElement(By.id("login-username")).sendKeys("admin");
driver.findElement(By.id("login-password")).clear();
driver.findElement(By.id("login-password")).sendKeys("admin");
driver.findElement(By.xpath("/HTML[1]/BODY[1]/P[1]/A[1]")).click();
//From state 2 to state 25
//Eventable{eventType=click, identification=xpath /HTML[1]/BODY[1]/DIV[2]/UL[1]/LI[3]/A[1], element=Element{node=[A: null], tag=A, text=RND-GjoR, attributes={class=current, href=http://localhost:8888/wolfcms/?}}, source=StateVertexImpl{id=2, name=state2}, target=StateVertexImpl{id=25, name=state25}}
mutateDOMTree(2);
checkState2_OriginalAssertions();
checkState2_ReusedAssertions();
checkState2_GeneratedAssertions();
checkState2_LearnedAssertions();
checkState2_AllAssertions();
checkState2_RandAssertions1();
checkState2_RandAssertions2();
checkState2_RandAssertions3();
checkState2_RandAssertions4();
checkState2_RandAssertions5();
driver.findElement(By.xpath("/HTML[1]/BODY[1]/DIV[2]/UL[1]/LI[3]/A[1]")).click();
//Sink node at state 25
mutateDOMTree(25);
checkState25_OriginalAssertions();
checkState25_ReusedAssertions();
checkState25_GeneratedAssertions();
checkState25_LearnedAssertions();
checkState25_AllAssertions();
checkState25_RandAssertions1();
checkState25_RandAssertions2();
checkState25_RandAssertions3();
checkState25_RandAssertions4();
checkState25_RandAssertions5();
}
public void checkState0_OriginalAssertions(){
}
public void checkState0_ReusedAssertions(){
}
public void checkState0_GeneratedAssertions(){
}
public void checkState0_LearnedAssertions(){
}
public void checkState0_AllAssertions(){
}
public void checkState0_RandAssertions1(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/SCRIPT"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV/FORM/DIV[5]/INPUT"))); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"dialog\"")));
parentElement = new DOMElement("BODY", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("H1", "", new ArrayList<String>(Arrays.asList("action=\"http://localhost:8888/wolfcms/?/admin/login/login\"","method=\"post\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("H1", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"dialog\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LABEL", "", new ArrayList<String>(Arrays.asList("for=\"login-username\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"login-username-div\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState0_RandAssertions2(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV/FORM/DIV[4]/INPUT"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/HEAD/SCRIPT[2]"))); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("class=\"clean\"")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"http://localhost:8888/wolfcms/?/admin/login/login\"","method=\"post\"")));
childrenElements.clear();
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("P", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("BODY", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/\""))));
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"login-password-div\"")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"http://localhost:8888/wolfcms/?/admin/login/login\"","method=\"post\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("for=\"login-password\""))));
childrenElements.add(new DOMElement("LABEL", "", new ArrayList<String>(Arrays.asList("class=\"medium\"","id=\"login-password\"","name=\"login[password]\"","type=\"password\"","value=\"\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState0_RandAssertions3(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV/FORM/DIV[4]/LABEL"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV/FORM/DIV[5]/SPAN/A"))); // Random element assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/\"")));
parentElement = new DOMElement("P", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LABEL", "", new ArrayList<String>(Arrays.asList("for=\"login-username\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"login-username-div\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"login_submit\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?/admin/login/forgot\""))));
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState0_RandAssertions4(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/P/A"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/HEAD/SCRIPT"))); // Random element assertion
element = new DOMElement("SCRIPT", "", new ArrayList<String>(Arrays.asList("type=\"text/javascript\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("style=\"margin-top: 6px\"")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"http://localhost:8888/wolfcms/?/admin/login/login\"","method=\"post\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"checkbox\"","id=\"login-remember-me\"","name=\"login[remember]\"","type=\"checkbox\"","value=\"checked\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("id=\"login-redirect\"","name=\"login[redirect]\"","type=\"hidden\"","value=\"/wolfcms/?/admin/\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"checkbox\"","for=\"login-remember-me\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("LABEL", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LABEL", "", new ArrayList<String>(Arrays.asList("class=\"checkbox\"","for=\"login-remember-me\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("style=\"margin-top: 6px\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState0_RandAssertions5(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV/H1"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV"))); // Random element assertion
element = new DOMElement("TITLE", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"login_submit\"")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"http://localhost:8888/wolfcms/?/admin/login/login\"","method=\"post\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("accesskey=\"s\"","class=\"submit\"","type=\"submit\"","value=\"Login\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("SPAN", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("style=\"margin-top: 6px\"")));
parentElement = new DOMElement("FORM", "", new ArrayList<String>(Arrays.asList("action=\"http://localhost:8888/wolfcms/?/admin/login/login\"","method=\"post\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"checkbox\"","id=\"login-remember-me\"","name=\"login[remember]\"","type=\"checkbox\"","value=\"checked\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList("id=\"login-redirect\"","name=\"login[redirect]\"","type=\"hidden\"","value=\"/wolfcms/?/admin/\""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList("class=\"checkbox\"","for=\"login-remember-me\""))));
childrenElements.add(new DOMElement("INPUT", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("LABEL", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState2_OriginalAssertions(){
}
public void checkState2_ReusedAssertions(){
}
public void checkState2_GeneratedAssertions(){
}
public void checkState2_LearnedAssertions(){
}
public void checkState2_AllAssertions(){
}
public void checkState2_RandAssertions1(){
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[41]"))); // Random element assertion
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[44]/A"))); // Random element assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-yafn.html\""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1wrap\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colright\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1pad\""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colmask\"")));
parentElement = new DOMElement("BODY", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colmid\""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-nedy.html\""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState2_RandAssertions2(){
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[40]/A"))); // Random element assertion
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[24]"))); // Random element assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-zkkp.html\""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-kgji.html\""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-dsjv.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-taqu.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState2_RandAssertions3(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL"))); // Random element assertion
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[39]/A"))); // Random element assertion
element = new DOMElement("LINK", "", new ArrayList<String>(Arrays.asList("href=\"/wolfcms/public/themes/simple/images/favicon.ico\"","rel=\"favourites icon\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-uzjc.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1wrap\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colright\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1pad\""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-yafn.html\""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState2_RandAssertions4(){
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[38]/A"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[12]"))); // Random element assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-cpya.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-coxy.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-kgji.html\""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-coxy.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState2_RandAssertions5(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[3]/DIV/DIV/DIV[2]/UL[2]"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[10]/A"))); // Random element assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-gboq.html\""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-zcfk.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-estb.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-estb.html\""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState25_OriginalAssertions(){
}
public void checkState25_ReusedAssertions(){
}
public void checkState25_GeneratedAssertions(){
}
public void checkState25_LearnedAssertions(){
}
public void checkState25_AllAssertions(){
}
public void checkState25_RandAssertions1(){
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[40]/A"))); // Random element assertion
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[41]"))); // Random element assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-zkkp.html\""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1wrap\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colright\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1pad\""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colmask\"")));
parentElement = new DOMElement("BODY", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colmid\""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-nedy.html\""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState25_RandAssertions2(){
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[24]"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL"))); // Random element assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-kgji.html\""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LINK", "", new ArrayList<String>(Arrays.asList("href=\"/wolfcms/public/themes/simple/images/favicon.ico\"","rel=\"favourites icon\"")));
parentElement = new DOMElement("HEAD", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-taqu.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-dsjv.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState25_RandAssertions3(){
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[39]/A"))); // Random element assertion
// assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[38]/A"))); // Random element assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-uzjc.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-coxy.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1wrap\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"colright\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1pad\""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-kgji.html\""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState25_RandAssertions4(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[12]"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[10]/A"))); // Random element assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-zcfk.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-uzjc.html\""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-coxy.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-estb.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
public void checkState25_RandAssertions5(){
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[3]/DIV/DIV/DIV[2]/UL[2]"))); // Random element assertion
assertTrue(isElementPresent(By.xpath("/HTML/BODY/DIV[2]/UL/LI[35]/A"))); // Random element assertion
element = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1\"")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col1pad\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("H2", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#comment", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("DIV", "", new ArrayList<String>(Arrays.asList("id=\"col2\"")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("LI", "", new ArrayList<String>(Arrays.asList(""))));
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
assertTrue(isElementPatternTagPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
parentElement = new DOMElement("UL", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-estb.html\""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
element = new DOMElement("A", "", new ArrayList<String>(Arrays.asList("href=\"http://localhost:8888/wolfcms/?rnd-lgcq.html\"")));
parentElement = new DOMElement("LI", "", new ArrayList<String>(Arrays.asList("")));
childrenElements.clear();
childrenElements.add(new DOMElement("#text", "", new ArrayList<String>(Arrays.asList(""))));
// assertTrue(isElementPatternFullPresent(parentElement , element, childrenElements)); // Random pattern assertion
}
/*
* Auxiliary methods
*/
private boolean isElementPresent(By by) {
try {
driver.findElement(by);
return true;
} catch (NoSuchElementException e) {
return false;
}
}
private boolean isElementPatternTagPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
// check parent node's tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parentTagName.equals(parent.getTagName()))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isElementPatternFullPresent(DOMElement parent, DOMElement element, ArrayList<DOMElement> children) {
try {
String source = driver.getPageSource();
Document dom = DomUtils.asDocument(source);
NodeList nodeList = dom.getElementsByTagName(element.getTagName());
org.w3c.dom.Element sourceElement = null;
for (int i = 0; i < nodeList.getLength(); i++){
// check node's attributes
sourceElement = (org.w3c.dom.Element) nodeList.item(i);
NamedNodeMap elementAttList = sourceElement.getAttributes();
HashSet<String> elemetAtts = new HashSet<String>();
for (int j = 0; j < elementAttList.getLength(); j++)
elemetAtts.add(elementAttList.item(j).getNodeName() + "=\"" + elementAttList.item(j).getNodeValue() + "\"");
if (!element.getAttributes().equals(elemetAtts))
continue;
// check parent node's tag and attributes
String parentTagName = sourceElement.getParentNode().getNodeName();
if (!parentTagName.equals(parent.getTagName()))
continue;
NamedNodeMap parentAttList = sourceElement.getParentNode().getAttributes();
HashSet<String> parentAtts = new HashSet<String>();
for (int j = 0; j < parentAttList.getLength(); j++)
parentAtts.add(parentAttList.item(j).getNodeName() + "=\"" + parentAttList.item(j).getNodeValue() + "\"");
if (!parent.getAttributes().equals(parentAtts))
continue;
// check children nodes' tags
HashSet<String> childrenTagNameFromDOM = new HashSet<String>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++)
childrenTagNameFromDOM.add(sourceElement.getChildNodes().item(j).getNodeName());
HashSet<String> childrenTagNameToTest = new HashSet<String>();
for (int k=0; k<children.size();k++)
childrenTagNameToTest.add(children.get(k).getTagName());
if (!childrenTagNameToTest.equals(childrenTagNameFromDOM))
continue;
// check children nodes' attributes
HashSet<HashSet<String>> childrenAttsFromDOM = new HashSet<HashSet<String>>();
for (int j=0; j<sourceElement.getChildNodes().getLength();j++){
NamedNodeMap childAttListFromDOM = sourceElement.getChildNodes().item(j).getAttributes();
HashSet<String> childAtts = new HashSet<String>();
if (childAttListFromDOM!=null)
for (int k = 0; k < childAttListFromDOM.getLength(); k++)
childAtts.add(childAttListFromDOM.item(k).getNodeName() + "=\"" + childAttListFromDOM.item(k).getNodeValue() + "\"");
childrenAttsFromDOM.add(childAtts);
}
HashSet<HashSet<String>> childrenAttsToTest = new HashSet<HashSet<String>>();
for (int k=0; k<children.size();k++)
childrenAttsToTest.add(children.get(k).getAttributes());
if (!childrenAttsToTest.equals(childrenAttsFromDOM))
continue;
return true;
}
} catch (IOException e) {
e.printStackTrace();
}
return false;
}
private boolean isAlertPresent() {
try {
driver.switchTo().alert();
return true;
} catch (NoAlertPresentException e) {
return false;
}
}
private String closeAlertAndGetItsText() {
try {
Alert alert = driver.switchTo().alert();
String alertText = alert.getText();
if (acceptNextAlert) {
alert.accept();
} else {
alert.dismiss();
}
return alertText;
} finally {
acceptNextAlert = true;
}
}
public class DOMElement {
private String tagName;
private String textContent;
private HashSet<String> attributes = new HashSet<String>();
public DOMElement(String tagName, String textContent, ArrayList<String> attributes){
this.tagName = tagName;
this.textContent = textContent;
if (attributes.get(0)!="")
for (int i=0; i<attributes.size();i++)
this.attributes.add(attributes.get(i));
}
public String getTagName() {
return tagName;
}
public String getTextContent() {
return textContent;
}
public HashSet<String> getAttributes() {
return attributes;
}
}
private void mutateDOMTree(int stateID){
// execute JavaScript code to mutate DOM
String code = com.crawljax.plugins.testilizer.Testilizer.mutateDOMTreeCode(stateID);
if (code!= null){
long RandomlySelectedDOMElementID = (long) ((JavascriptExecutor)driver).executeScript(code);
int MutationOperatorCode = com.crawljax.plugins.testilizer.Testilizer.MutationOperatorCode;
int StateToBeMutated = com.crawljax.plugins.testilizer.Testilizer.StateToBeMutated;
com.crawljax.plugins.testilizer.Testilizer.SelectedRandomElementInDOM[MutationOperatorCode][StateToBeMutated]
= (int) RandomlySelectedDOMElementID;
}
}
}
| |
package com.ghstudios.android.components;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.AttributeSet;
import android.view.View;
/*
* Draws a sharpness level by values
*
* Max sharpness units combined should not exceed the value of int maxsharpness
*/
public class DrawSharpness extends View {
// Error Tag
private static final String TAG = "DrawSharpness";
private int mRed1;
private int mOrange1;
private int mYellow1;
private int mGreen1;
private int mBlue1;
private int mWhite1;
private int mPurple1;
private int mRed2;
private int mOrange2;
private int mYellow2;
private int mGreen2;
private int mBlue2;
private int mWhite2;
private int mPurple2;
private int mRed3;
private int mOrange3;
private int mYellow3;
private int mGreen3;
private int mBlue3;
private int mWhite3;
private int mPurple3;
private int mheight;
private int mwidth;
private final int maxsharpness = 45;
public static int orangeColor = Color.rgb(255, 150, 0);
public static int purpleColor = Color.rgb(120, 81, 169);
public static int blueColor = Color.rgb(20,131,208);
Paint paint = new Paint();
public DrawSharpness(Context context, AttributeSet attrs){
super(context, attrs);
// If previewing, set some fake display
if (isInEditMode()) {
init(new int[] {6,5,11,9,4,0,0},
new int[] {6,5,11,9,6,0,0},
new int[]{ 6,5,11,9,6,3,0});
}
}
public void init(int[] sharpness1, int[] sharpness2, int[]sharpness3) {
// Assign sharpness array 1
mRed1 = sharpness1[0];
mOrange1 = sharpness1[1];
mYellow1 = sharpness1[2];
mGreen1 = sharpness1[3];
mBlue1 = sharpness1[4];
mWhite1 = sharpness1[5];
mPurple1 = sharpness1[6];
// Assign sharpness array 2
mRed2 = sharpness2[0];
mOrange2 = sharpness2[1];
mYellow2 = sharpness2[2];
mGreen2 = sharpness2[3];
mBlue2 = sharpness2[4];
mWhite2 = sharpness2[5];
mPurple2 = sharpness2[6];
mRed3 = sharpness3[0];
mOrange3 = sharpness3[1];
mYellow3 = sharpness3[2];
mGreen3 = sharpness3[3];
mBlue3 = sharpness3[4];
mWhite3 = sharpness3[5];
mPurple3 = sharpness3[6];
}
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width, height;
// Width should be no greater than 500px
width = Math.min(500, MeasureSpec.getSize(widthMeasureSpec));
// Height should be no greater than 50px
height = Math.min(60, MeasureSpec.getSize(heightMeasureSpec));
mwidth = width;
mheight = height;
setMeasuredDimension(width, height);
}
@Override
public void requestLayout() {
/*
* Do nothing here
*/
}
@Override
public void onDraw(Canvas canvas) {
super.onDraw(canvas);
// Margins are defined by height/7. 7 can be changed.
int margins = (int) Math.floor(mheight/8);
int outer_margin = margins; //Margin on the outside
int inner_margin = 3; //Margin between elements
// Scale factor is used to multiply sharpness values to make sure full sharpness fills the bar
// Must be a float to retain accuracy until pixel conversion
float scalefactor = (float) (mwidth-(outer_margin*2))/maxsharpness;
// specify the width of each bar
int barwidth = (int) (scalefactor * maxsharpness) + (outer_margin*2);
int totalBarHeight = (mheight - (2*outer_margin) - (2*inner_margin));
int barheight = 0;
int mainBarHeight = 0;
int subBarHeight = 0;
//3 Possible cases of rounding
//The number of pixels available to the 3 bars is
// No extra pixels - Perfect (We give 2 pixels from each of the sub bars to the main bar)
// 1 Extra pixel - We give the extra pixel to the main bar
// 2 Extra pixels - give an extra pixel to each of the sub bars, reducing the diff in size)
if(totalBarHeight % 3 == 0) {
barheight = (int) (totalBarHeight / 3);
mainBarHeight = barheight+4;
subBarHeight = barheight-2;
}
else if(totalBarHeight % 3 == 1){
//1 Extra Pixed - Give it to main bar
barheight = (int) (totalBarHeight / 3);
mainBarHeight = barheight+5;
subBarHeight = barheight-2;
}
else if(totalBarHeight % 3 == 2){
//2 Extra pixel
barheight = (int) (totalBarHeight / 3);
mainBarHeight = barheight+4;
subBarHeight = barheight-1;
}
// Draw the background
paint.setColor(Color.BLACK);
paint.setStrokeWidth(4);
canvas.drawRect(0, 0, barwidth, mheight, paint);
// Draw top bar
int bartop = outer_margin;
int barbottom = (int) Math.floor(outer_margin+mainBarHeight);
drawBar(canvas, outer_margin, scalefactor, bartop, barbottom,
mRed1, mOrange1, mYellow1, mGreen1, mBlue1, mWhite1, mPurple1);
// Draw bottom bar
int bartop2 = (int) Math.floor(barbottom+inner_margin);
int barbottom2 = (int) Math.floor(bartop2+subBarHeight);
drawBar(canvas, outer_margin, scalefactor, bartop2, barbottom2,
mRed2, mOrange2, mYellow2, mGreen2, mBlue2, mWhite2, mPurple2);
int bartop3 = (int) Math.floor(barbottom2+inner_margin);
int barbottom3 = (int) Math.floor(bartop3+subBarHeight);
drawBar(canvas, outer_margin, scalefactor, bartop3, barbottom3,
mRed3, mOrange3, mYellow3, mGreen3, mBlue3, mWhite3, mPurple3);
}
private void drawBar(Canvas canvas, int margins, float scalefactor, int bartop, int barbottom,
int ired, int iorange, int iyellow,
int igreen, int iblue, int iwhite, int ipurple){
// Run through the bar and accumulate sharpness
int start = margins;
int end = start + (int) (ired*scalefactor);
paint.setStrokeWidth(0);
paint.setColor(Color.RED);
canvas.drawRect(start, bartop, end, barbottom, paint);
start = end;
end = end + (int) (iorange*scalefactor);
paint.setColor(orangeColor);
canvas.drawRect(start, bartop, end, barbottom, paint);
start = end;
end = end + (int) (iyellow*scalefactor);
paint.setColor(Color.YELLOW);
canvas.drawRect(start, bartop, end, barbottom, paint);
start = end;
end = end + (int) (igreen*scalefactor);
paint.setColor(Color.GREEN);
canvas.drawRect(start, bartop, end, barbottom, paint);
start = end;
end = end + (int) (iblue*scalefactor);
paint.setColor(blueColor);
canvas.drawRect(start, bartop, end, barbottom, paint);
start = end;
end = end + (int) (iwhite*scalefactor);
paint.setColor(Color.WHITE);
canvas.drawRect(start, bartop, end, barbottom, paint);
start = end;
end = end + (int) (ipurple*scalefactor);
paint.setColor(purpleColor);
canvas.drawRect(start, bartop, end, barbottom, paint);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.indexing;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.ShardId;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
/**
*/
public class ShardIndexingService extends AbstractIndexShardComponent {
private final IndexingSlowLog slowLog;
private final StatsHolder totalStats = new StatsHolder();
private final CopyOnWriteArrayList<IndexingOperationListener> listeners = new CopyOnWriteArrayList<>();
private volatile Map<String, StatsHolder> typesStats = ImmutableMap.of();
public ShardIndexingService(ShardId shardId, Settings indexSettings) {
super(shardId, indexSettings);
this.slowLog = new IndexingSlowLog(shardId, indexSettings);
}
/**
* Returns the stats, including type specific stats. If the types are null/0 length, then nothing
* is returned for them. If they are set, then only types provided will be returned, or
* <tt>_all</tt> for all types.
*/
public IndexingStats stats(String... types) {
IndexingStats.Stats total = totalStats.stats();
Map<String, IndexingStats.Stats> typesSt = null;
if (types != null && types.length > 0) {
typesSt = new HashMap<>(typesStats.size());
if (types.length == 1 && types[0].equals("_all")) {
for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
typesSt.put(entry.getKey(), entry.getValue().stats());
}
} else {
for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
if (Regex.simpleMatch(types, entry.getKey())) {
typesSt.put(entry.getKey(), entry.getValue().stats());
}
}
}
}
return new IndexingStats(total, typesSt);
}
public void addListener(IndexingOperationListener listener) {
listeners.add(listener);
}
public void removeListener(IndexingOperationListener listener) {
listeners.remove(listener);
}
public Engine.Create preCreate(Engine.Create create) {
totalStats.indexCurrent.inc();
typeStats(create.type()).indexCurrent.inc();
for (IndexingOperationListener listener : listeners) {
create = listener.preCreate(create);
}
return create;
}
public void postCreateUnderLock(Engine.Create create) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreateUnderLock(create);
} catch (Exception e) {
logger.warn("postCreateUnderLock listener [{}] failed", e, listener);
}
}
}
public void throttlingActivated() {
totalStats.setThrottled(true);
}
public void throttlingDeactivated() {
totalStats.setThrottled(false);
}
public void postCreate(Engine.Create create) {
long took = create.endTime() - create.startTime();
totalStats.indexMetric.inc(took);
totalStats.indexCurrent.dec();
StatsHolder typeStats = typeStats(create.type());
typeStats.indexMetric.inc(took);
typeStats.indexCurrent.dec();
slowLog.postCreate(create, took);
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreate(create);
} catch (Exception e) {
logger.warn("postCreate listener [{}] failed", e, listener);
}
}
}
public void postCreate(Engine.Create create, Throwable ex) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreate(create, ex);
} catch (Throwable t) {
logger.warn("postCreate listener [{}] failed", t, listener);
}
}
}
public Engine.Index preIndex(Engine.Index index) {
totalStats.indexCurrent.inc();
typeStats(index.type()).indexCurrent.inc();
for (IndexingOperationListener listener : listeners) {
index = listener.preIndex(index);
}
return index;
}
public void postIndexUnderLock(Engine.Index index) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndexUnderLock(index);
} catch (Exception e) {
logger.warn("postIndexUnderLock listener [{}] failed", e, listener);
}
}
}
public void postIndex(Engine.Index index, boolean created) {
long took = index.endTime() - index.startTime();
totalStats.indexMetric.inc(took);
totalStats.indexCurrent.dec();
StatsHolder typeStats = typeStats(index.type());
typeStats.indexMetric.inc(took);
typeStats.indexCurrent.dec();
slowLog.postIndex(index, took);
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndex(index, created);
} catch (Exception e) {
logger.warn("postIndex listener [{}] failed", e, listener);
}
}
}
public void postIndex(Engine.Index index, Throwable ex) {
totalStats.indexCurrent.dec();
typeStats(index.type()).indexCurrent.dec();
totalStats.indexFailed.inc();
typeStats(index.type()).indexFailed.inc();
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndex(index, ex);
} catch (Throwable t) {
logger.warn("postIndex listener [{}] failed", t, listener);
}
}
}
public Engine.Delete preDelete(Engine.Delete delete) {
totalStats.deleteCurrent.inc();
typeStats(delete.type()).deleteCurrent.inc();
for (IndexingOperationListener listener : listeners) {
delete = listener.preDelete(delete);
}
return delete;
}
public void postDeleteUnderLock(Engine.Delete delete) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postDeleteUnderLock(delete);
} catch (Exception e) {
logger.warn("postDeleteUnderLock listener [{}] failed", e, listener);
}
}
}
public void postDelete(Engine.Delete delete) {
long took = delete.endTime() - delete.startTime();
totalStats.deleteMetric.inc(took);
totalStats.deleteCurrent.dec();
StatsHolder typeStats = typeStats(delete.type());
typeStats.deleteMetric.inc(took);
typeStats.deleteCurrent.dec();
for (IndexingOperationListener listener : listeners) {
try {
listener.postDelete(delete);
} catch (Exception e) {
logger.warn("postDelete listener [{}] failed", e, listener);
}
}
}
public void postDelete(Engine.Delete delete, Throwable ex) {
totalStats.deleteCurrent.dec();
typeStats(delete.type()).deleteCurrent.dec();
for (IndexingOperationListener listener : listeners) {
try {
listener. postDelete(delete, ex);
} catch (Throwable t) {
logger.warn("postDelete listener [{}] failed", t, listener);
}
}
}
public void noopUpdate(String type) {
totalStats.noopUpdates.inc();
typeStats(type).noopUpdates.inc();
}
public void clear() {
totalStats.clear();
synchronized (this) {
if (!typesStats.isEmpty()) {
MapBuilder<String, StatsHolder> typesStatsBuilder = MapBuilder.newMapBuilder();
for (Map.Entry<String, StatsHolder> typeStats : typesStats.entrySet()) {
if (typeStats.getValue().totalCurrent() > 0) {
typeStats.getValue().clear();
typesStatsBuilder.put(typeStats.getKey(), typeStats.getValue());
}
}
typesStats = typesStatsBuilder.immutableMap();
}
}
}
private StatsHolder typeStats(String type) {
StatsHolder stats = typesStats.get(type);
if (stats == null) {
synchronized (this) {
stats = typesStats.get(type);
if (stats == null) {
stats = new StatsHolder();
typesStats = MapBuilder.newMapBuilder(typesStats).put(type, stats).immutableMap();
}
}
}
return stats;
}
public void onRefreshSettings(Settings settings) {
slowLog.onRefreshSettings(settings);
}
static class StatsHolder {
public final MeanMetric indexMetric = new MeanMetric();
public final MeanMetric deleteMetric = new MeanMetric();
public final CounterMetric indexCurrent = new CounterMetric();
public final CounterMetric indexFailed = new CounterMetric();
public final CounterMetric deleteCurrent = new CounterMetric();
public final CounterMetric noopUpdates = new CounterMetric();
public final CounterMetric throttleTimeMillisMetric = new CounterMetric();
volatile boolean isThrottled = false;
volatile long startOfThrottleNS;
public IndexingStats.Stats stats() {
long currentThrottleNS = 0;
if (isThrottled && startOfThrottleNS != 0) {
currentThrottleNS += System.nanoTime() - startOfThrottleNS;
if (currentThrottleNS < 0) {
// Paranoia (System.nanoTime() is supposed to be monotonic): time slip must have happened, have to ignore this value
currentThrottleNS = 0;
}
}
return new IndexingStats.Stats(
indexMetric.count(), TimeUnit.NANOSECONDS.toMillis(indexMetric.sum()), indexCurrent.count(), indexFailed.count(),
deleteMetric.count(), TimeUnit.NANOSECONDS.toMillis(deleteMetric.sum()), deleteCurrent.count(),
noopUpdates.count(), isThrottled, TimeUnit.MILLISECONDS.toMillis(throttleTimeMillisMetric.count() + TimeValue.nsecToMSec(currentThrottleNS)));
}
void setThrottled(boolean isThrottled) {
if (!this.isThrottled && isThrottled) {
startOfThrottleNS = System.nanoTime();
} else if (this.isThrottled && !isThrottled) {
assert startOfThrottleNS > 0 : "Bad state of startOfThrottleNS";
long throttleTimeNS = System.nanoTime() - startOfThrottleNS;
if (throttleTimeNS >= 0) {
// Paranoia (System.nanoTime() is supposed to be monotonic): time slip may have occurred but never want to add a negative number
throttleTimeMillisMetric.inc(TimeValue.nsecToMSec(throttleTimeNS));
}
}
this.isThrottled = isThrottled;
}
public long totalCurrent() {
return indexCurrent.count() + deleteMetric.count();
}
public void clear() {
indexMetric.clear();
deleteMetric.clear();
}
}
}
| |
package redis.clients.jedis;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Pattern;
import redis.clients.jedis.BinaryClient.LIST_POSITION;
import redis.clients.util.Hashing;
public class ShardedJedis extends BinaryShardedJedis implements JedisCommands {
public ShardedJedis(List<JedisShardInfo> shards) {
super(shards);
}
public ShardedJedis(List<JedisShardInfo> shards, Hashing algo) {
super(shards, algo);
}
public ShardedJedis(List<JedisShardInfo> shards, Pattern keyTagPattern) {
super(shards, keyTagPattern);
}
public ShardedJedis(List<JedisShardInfo> shards, Hashing algo,
Pattern keyTagPattern) {
super(shards, algo, keyTagPattern);
}
public void disconnect() {
for (Jedis jedis : getAllShards()) {
jedis.quit();
jedis.disconnect();
}
}
public String set(String key, String value) {
Jedis j = getShard(key);
return j.set(key, value);
}
public String get(String key) {
Jedis j = getShard(key);
return j.get(key);
}
public Boolean exists(String key) {
Jedis j = getShard(key);
return j.exists(key);
}
public String type(String key) {
Jedis j = getShard(key);
return j.type(key);
}
public Long expire(String key, int seconds) {
Jedis j = getShard(key);
return j.expire(key, seconds);
}
public Long expireAt(String key, long unixTime) {
Jedis j = getShard(key);
return j.expireAt(key, unixTime);
}
public Long ttl(String key) {
Jedis j = getShard(key);
return j.ttl(key);
}
public Boolean setbit(String key, long offset, boolean value) {
Jedis j = getShard(key);
return j.setbit(key, offset, value);
}
public Boolean getbit(String key, long offset) {
Jedis j = getShard(key);
return j.getbit(key, offset);
}
public Long setrange(String key, long offset, String value) {
Jedis j = getShard(key);
return j.setrange(key, offset, value);
}
public String getrange(String key, long startOffset, long endOffset) {
Jedis j = getShard(key);
return j.getrange(key, startOffset, endOffset);
}
public String getSet(String key, String value) {
Jedis j = getShard(key);
return j.getSet(key, value);
}
public Long setnx(String key, String value) {
Jedis j = getShard(key);
return j.setnx(key, value);
}
public String setex(String key, int seconds, String value) {
Jedis j = getShard(key);
return j.setex(key, seconds, value);
}
public Long decrBy(String key, long integer) {
Jedis j = getShard(key);
return j.decrBy(key, integer);
}
public Long decr(String key) {
Jedis j = getShard(key);
return j.decr(key);
}
public Long incrBy(String key, long integer) {
Jedis j = getShard(key);
return j.incrBy(key, integer);
}
public Long incr(String key) {
Jedis j = getShard(key);
return j.incr(key);
}
public Long append(String key, String value) {
Jedis j = getShard(key);
return j.append(key, value);
}
public String substr(String key, int start, int end) {
Jedis j = getShard(key);
return j.substr(key, start, end);
}
public Long hset(String key, String field, String value) {
Jedis j = getShard(key);
return j.hset(key, field, value);
}
public String hget(String key, String field) {
Jedis j = getShard(key);
return j.hget(key, field);
}
public Long hsetnx(String key, String field, String value) {
Jedis j = getShard(key);
return j.hsetnx(key, field, value);
}
public String hmset(String key, Map<String, String> hash) {
Jedis j = getShard(key);
return j.hmset(key, hash);
}
public List<String> hmget(String key, String... fields) {
Jedis j = getShard(key);
return j.hmget(key, fields);
}
public Long hincrBy(String key, String field, long value) {
Jedis j = getShard(key);
return j.hincrBy(key, field, value);
}
public Boolean hexists(String key, String field) {
Jedis j = getShard(key);
return j.hexists(key, field);
}
public Long del(String key) {
Jedis j = getShard(key);
return j.del(key);
}
public Long hdel(String key, String... fields) {
Jedis j = getShard(key);
return j.hdel(key, fields);
}
public Long hlen(String key) {
Jedis j = getShard(key);
return j.hlen(key);
}
public Set<String> hkeys(String key) {
Jedis j = getShard(key);
return j.hkeys(key);
}
public List<String> hvals(String key) {
Jedis j = getShard(key);
return j.hvals(key);
}
public Map<String, String> hgetAll(String key) {
Jedis j = getShard(key);
return j.hgetAll(key);
}
public Long rpush(String key, String... strings) {
Jedis j = getShard(key);
return j.rpush(key, strings);
}
public Long lpush(String key, String... strings) {
Jedis j = getShard(key);
return j.lpush(key, strings);
}
public Long lpushx(String key, String string) {
Jedis j = getShard(key);
return j.lpushx(key, string);
}
public Long rpushx(String key, String string) {
Jedis j = getShard(key);
return j.rpushx(key, string);
}
public Long llen(String key) {
Jedis j = getShard(key);
return j.llen(key);
}
public List<String> lrange(String key, long start, long end) {
Jedis j = getShard(key);
return j.lrange(key, start, end);
}
public String ltrim(String key, long start, long end) {
Jedis j = getShard(key);
return j.ltrim(key, start, end);
}
public String lindex(String key, long index) {
Jedis j = getShard(key);
return j.lindex(key, index);
}
public String lset(String key, long index, String value) {
Jedis j = getShard(key);
return j.lset(key, index, value);
}
public Long lrem(String key, long count, String value) {
Jedis j = getShard(key);
return j.lrem(key, count, value);
}
public String lpop(String key) {
Jedis j = getShard(key);
return j.lpop(key);
}
public String rpop(String key) {
Jedis j = getShard(key);
return j.rpop(key);
}
public Long sadd(String key, String... members) {
Jedis j = getShard(key);
return j.sadd(key, members);
}
public Set<String> smembers(String key) {
Jedis j = getShard(key);
return j.smembers(key);
}
public Long srem(String key, String... members) {
Jedis j = getShard(key);
return j.srem(key, members);
}
public String spop(String key) {
Jedis j = getShard(key);
return j.spop(key);
}
public Long scard(String key) {
Jedis j = getShard(key);
return j.scard(key);
}
public Boolean sismember(String key, String member) {
Jedis j = getShard(key);
return j.sismember(key, member);
}
public String srandmember(String key) {
Jedis j = getShard(key);
return j.srandmember(key);
}
public Long zadd(String key, double score, String member) {
Jedis j = getShard(key);
return j.zadd(key, score, member);
}
public Long zadd(String key, Map<Double, String> scoreMembers) {
Jedis j = getShard(key);
return j.zadd(key, scoreMembers);
}
public Set<String> zrange(String key, long start, long end) {
Jedis j = getShard(key);
return j.zrange(key, start, end);
}
public Long zrem(String key, String... members) {
Jedis j = getShard(key);
return j.zrem(key, members);
}
public Double zincrby(String key, double score, String member) {
Jedis j = getShard(key);
return j.zincrby(key, score, member);
}
public Long zrank(String key, String member) {
Jedis j = getShard(key);
return j.zrank(key, member);
}
public Long zrevrank(String key, String member) {
Jedis j = getShard(key);
return j.zrevrank(key, member);
}
public Set<String> zrevrange(String key, long start, long end) {
Jedis j = getShard(key);
return j.zrevrange(key, start, end);
}
public Set<Tuple> zrangeWithScores(String key, long start, long end) {
Jedis j = getShard(key);
return j.zrangeWithScores(key, start, end);
}
public Set<Tuple> zrevrangeWithScores(String key, long start, long end) {
Jedis j = getShard(key);
return j.zrevrangeWithScores(key, start, end);
}
public Long zcard(String key) {
Jedis j = getShard(key);
return j.zcard(key);
}
public Double zscore(String key, String member) {
Jedis j = getShard(key);
return j.zscore(key, member);
}
public List<String> sort(String key) {
Jedis j = getShard(key);
return j.sort(key);
}
public List<String> sort(String key, SortingParams sortingParameters) {
Jedis j = getShard(key);
return j.sort(key, sortingParameters);
}
public Long zcount(String key, double min, double max) {
Jedis j = getShard(key);
return j.zcount(key, min, max);
}
public Long zcount(String key, String min, String max) {
Jedis j = getShard(key);
return j.zcount(key, min, max);
}
public Set<String> zrangeByScore(String key, double min, double max) {
Jedis j = getShard(key);
return j.zrangeByScore(key, min, max);
}
public Set<String> zrevrangeByScore(String key, double max, double min) {
Jedis j = getShard(key);
return j.zrevrangeByScore(key, max, min);
}
public Set<String> zrangeByScore(String key, double min, double max,
int offset, int count) {
Jedis j = getShard(key);
return j.zrangeByScore(key, min, max, offset, count);
}
public Set<String> zrevrangeByScore(String key, double max, double min,
int offset, int count) {
Jedis j = getShard(key);
return j.zrevrangeByScore(key, max, min, offset, count);
}
public Set<Tuple> zrangeByScoreWithScores(String key, double min, double max) {
Jedis j = getShard(key);
return j.zrangeByScoreWithScores(key, min, max);
}
public Set<Tuple> zrevrangeByScoreWithScores(String key, double max,
double min) {
Jedis j = getShard(key);
return j.zrevrangeByScoreWithScores(key, max, min);
}
public Set<Tuple> zrangeByScoreWithScores(String key, double min,
double max, int offset, int count) {
Jedis j = getShard(key);
return j.zrangeByScoreWithScores(key, min, max, offset, count);
}
public Set<Tuple> zrevrangeByScoreWithScores(String key, double max,
double min, int offset, int count) {
Jedis j = getShard(key);
return j.zrevrangeByScoreWithScores(key, max, min, offset, count);
}
public Set<String> zrangeByScore(String key, String min, String max) {
Jedis j = getShard(key);
return j.zrangeByScore(key, min, max);
}
public Set<String> zrevrangeByScore(String key, String max, String min) {
Jedis j = getShard(key);
return j.zrevrangeByScore(key, max, min);
}
public Set<String> zrangeByScore(String key, String min, String max,
int offset, int count) {
Jedis j = getShard(key);
return j.zrangeByScore(key, min, max, offset, count);
}
public Set<String> zrevrangeByScore(String key, String max, String min,
int offset, int count) {
Jedis j = getShard(key);
return j.zrevrangeByScore(key, max, min, offset, count);
}
public Set<Tuple> zrangeByScoreWithScores(String key, String min, String max) {
Jedis j = getShard(key);
return j.zrangeByScoreWithScores(key, min, max);
}
public Set<Tuple> zrevrangeByScoreWithScores(String key, String max,
String min) {
Jedis j = getShard(key);
return j.zrevrangeByScoreWithScores(key, max, min);
}
public Set<Tuple> zrangeByScoreWithScores(String key, String min,
String max, int offset, int count) {
Jedis j = getShard(key);
return j.zrangeByScoreWithScores(key, min, max, offset, count);
}
public Set<Tuple> zrevrangeByScoreWithScores(String key, String max,
String min, int offset, int count) {
Jedis j = getShard(key);
return j.zrevrangeByScoreWithScores(key, max, min, offset, count);
}
public Long zremrangeByRank(String key, long start, long end) {
Jedis j = getShard(key);
return j.zremrangeByRank(key, start, end);
}
public Long zremrangeByScore(String key, double start, double end) {
Jedis j = getShard(key);
return j.zremrangeByScore(key, start, end);
}
public Long zremrangeByScore(String key, String start, String end) {
Jedis j = getShard(key);
return j.zremrangeByScore(key, start, end);
}
public Long linsert(String key, LIST_POSITION where, String pivot,
String value) {
Jedis j = getShard(key);
return j.linsert(key, where, pivot, value);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment;
import static org.apache.jackrabbit.oak.plugins.blob.datastore.SharedDataStoreUtils.isShared;
import static org.apache.jackrabbit.oak.segment.SegmentNotFoundExceptionListener.IGNORE_SNFE;
import static org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions.RETAINED_GENERATIONS_DEFAULT;
import static org.apache.jackrabbit.oak.segment.file.FileStoreBuilder.fileStoreBuilder;
import static org.apache.jackrabbit.oak.spi.cluster.ClusterRepositoryInfo.getOrCreateId;
import java.io.ByteArrayInputStream;
import java.io.Closeable;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import com.google.common.io.Closer;
import org.apache.jackrabbit.commons.SimpleValueFactory;
import org.apache.jackrabbit.oak.api.Descriptors;
import org.apache.jackrabbit.oak.api.jmx.CacheStatsMBean;
import org.apache.jackrabbit.oak.api.jmx.CheckpointMBean;
import org.apache.jackrabbit.oak.api.jmx.FileStoreBackupRestoreMBean;
import org.apache.jackrabbit.oak.backup.impl.FileStoreBackupRestoreImpl;
import org.apache.jackrabbit.oak.cache.CacheStats;
import org.apache.jackrabbit.oak.plugins.blob.BlobGC;
import org.apache.jackrabbit.oak.plugins.blob.BlobGCMBean;
import org.apache.jackrabbit.oak.plugins.blob.BlobGarbageCollector;
import org.apache.jackrabbit.oak.plugins.blob.BlobTrackingStore;
import org.apache.jackrabbit.oak.plugins.blob.MarkSweepGarbageCollector;
import org.apache.jackrabbit.oak.plugins.blob.SharedDataStore;
import org.apache.jackrabbit.oak.plugins.blob.datastore.BlobIdTracker;
import org.apache.jackrabbit.oak.plugins.blob.datastore.SharedDataStoreUtils;
import org.apache.jackrabbit.oak.segment.compaction.SegmentGCOptions;
import org.apache.jackrabbit.oak.segment.compaction.SegmentRevisionGC;
import org.apache.jackrabbit.oak.segment.compaction.SegmentRevisionGCMBean;
import org.apache.jackrabbit.oak.segment.file.FileStore;
import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder;
import org.apache.jackrabbit.oak.segment.file.FileStoreGCMonitor;
import org.apache.jackrabbit.oak.segment.file.FileStoreStatsMBean;
import org.apache.jackrabbit.oak.segment.file.InvalidFileStoreVersionException;
import org.apache.jackrabbit.oak.segment.file.MetricsIOMonitor;
import org.apache.jackrabbit.oak.segment.spi.persistence.SegmentNodeStorePersistence;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.apache.jackrabbit.oak.spi.blob.GarbageCollectableBlobStore;
import org.apache.jackrabbit.oak.spi.cluster.ClusterRepositoryInfo;
import org.apache.jackrabbit.oak.spi.commit.ObserverTracker;
import org.apache.jackrabbit.oak.spi.descriptors.GenericDescriptors;
import org.apache.jackrabbit.oak.spi.gc.GCMonitor;
import org.apache.jackrabbit.oak.spi.gc.GCMonitorTracker;
import org.apache.jackrabbit.oak.spi.state.NodeStore;
import org.apache.jackrabbit.oak.spi.state.RevisionGC;
import org.apache.jackrabbit.oak.spi.state.RevisionGCMBean;
import org.apache.jackrabbit.oak.spi.whiteboard.AbstractServiceTracker;
import org.apache.jackrabbit.oak.spi.whiteboard.Registration;
import org.apache.jackrabbit.oak.spi.whiteboard.Whiteboard;
import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardExecutor;
import org.apache.jackrabbit.oak.spi.whiteboard.WhiteboardUtils;
import org.apache.jackrabbit.oak.stats.Clock;
import org.apache.jackrabbit.oak.stats.StatisticsProvider;
import org.osgi.framework.BundleContext;
import org.osgi.framework.Constants;
import org.slf4j.Logger;
class SegmentNodeStoreRegistrar {
static SegmentNodeStore registerSegmentNodeStore(Configuration cfg) throws IOException {
return new SegmentNodeStoreRegistrar(cfg).register();
}
interface Configuration {
boolean isPrimarySegmentStore();
boolean isSecondarySegmentStore();
boolean isStandbyInstance();
String getRole();
int getRetainedGenerations();
int getDefaultRetainedGenerations();
boolean getPauseCompaction();
int getRetryCount();
int getForceCompactionTimeout();
long getSizeDeltaEstimation();
int getMemoryThreshold();
boolean getDisableEstimation();
long getGCProcessLog();
File getSegmentDirectory();
int getSegmentCacheSize();
int getStringCacheSize();
int getTemplateCacheSize();
int getStringDeduplicationCacheSize();
int getTemplateDeduplicationCacheSize();
int getNodeDeduplicationCacheSize();
int getMaxFileSize();
boolean getMemoryMapping();
boolean hasCustomBlobStore();
boolean hasCustomSegmentStore();
boolean registerDescriptors();
String getRepositoryHome();
long getBlobSnapshotInterval();
long getBlobGcMaxAge();
File getBackupDirectory();
Whiteboard getWhiteboard();
Closer getCloser();
Logger getLogger();
StatisticsProvider getStatisticsProvider();
BlobStore getBlobStore();
SegmentNodeStorePersistence getSegmentNodeStorePersistence();
BundleContext getBundleContext();
}
private final Configuration cfg;
private SegmentNodeStoreRegistrar(Configuration cfg) {
this.cfg = cfg;
}
private SegmentNodeStore register() throws IOException {
if (cfg.getBlobStore() == null && (cfg.hasCustomBlobStore() || cfg.isSecondarySegmentStore())) {
cfg.getLogger().info("BlobStore enabled. SegmentNodeStore will be initialized once the blob store becomes available");
return null;
}
if (cfg.getSegmentNodeStorePersistence() == null && cfg.hasCustomSegmentStore()) {
cfg.getLogger().info("customSegmentStore enabled. SegmentNodeStore will be initialized once the custom segment store becomes available");
return null;
}
// Listen for GCMonitor services
GCMonitor gcMonitor = GCMonitor.EMPTY;
if (cfg.isPrimarySegmentStore()) {
GCMonitorTracker tracker = new GCMonitorTracker();
tracker.start(cfg.getWhiteboard());
registerCloseable(tracker);
gcMonitor = tracker;
}
// Create the gc options
if (cfg.getRetainedGenerations() != cfg.getDefaultRetainedGenerations()) {
cfg.getLogger().warn(
"The number of retained generations defaults to {} and can't be " +
"changed. This configuration option is considered deprecated " +
"and will be removed in the future.",
RETAINED_GENERATIONS_DEFAULT
);
}
SegmentGCOptions gcOptions = new SegmentGCOptions(cfg.getPauseCompaction(), cfg.getRetryCount(), cfg.getForceCompactionTimeout())
.setGcSizeDeltaEstimation(cfg.getSizeDeltaEstimation())
.setMemoryThreshold(cfg.getMemoryThreshold())
.setEstimationDisabled(cfg.getDisableEstimation())
.setGCLogInterval(cfg.getGCProcessLog());
if (cfg.isStandbyInstance()) {
gcOptions.setRetainedGenerations(1);
}
// Build the FileStore
FileStoreBuilder builder = fileStoreBuilder(cfg.getSegmentDirectory())
.withSegmentCacheSize(cfg.getSegmentCacheSize())
.withStringCacheSize(cfg.getStringCacheSize())
.withTemplateCacheSize(cfg.getTemplateCacheSize())
.withStringDeduplicationCacheSize(cfg.getStringDeduplicationCacheSize())
.withTemplateDeduplicationCacheSize(cfg.getTemplateDeduplicationCacheSize())
.withNodeDeduplicationCacheSize(cfg.getNodeDeduplicationCacheSize())
.withMaxFileSize(cfg.getMaxFileSize())
.withMemoryMapping(cfg.getMemoryMapping())
.withGCMonitor(gcMonitor)
.withIOMonitor(new MetricsIOMonitor(cfg.getStatisticsProvider()))
.withStatisticsProvider(cfg.getStatisticsProvider())
.withGCOptions(gcOptions);
if (cfg.hasCustomBlobStore() && cfg.getBlobStore() != null) {
cfg.getLogger().info("Initializing SegmentNodeStore with BlobStore [{}]", cfg.getBlobStore());
builder.withBlobStore(cfg.getBlobStore());
}
if (cfg.hasCustomSegmentStore() && cfg.getSegmentNodeStorePersistence() != null) {
cfg.getLogger().info("Initializing SegmentNodeStore with custom persistence [{}]", cfg.getSegmentNodeStorePersistence());
builder.withCustomPersistence(cfg.getSegmentNodeStorePersistence());
}
if (cfg.isStandbyInstance()) {
builder.withSnfeListener(IGNORE_SNFE);
}
FileStore store;
try {
store = builder.build();
} catch (InvalidFileStoreVersionException e) {
cfg.getLogger().error("The storage format is not compatible with this version of Oak Segment Tar", e);
return null;
}
registerCloseable(store);
// Listen for Executor services on the whiteboard
WhiteboardExecutor executor = new WhiteboardExecutor();
executor.start(cfg.getWhiteboard());
registerCloseable(executor);
// Expose stats about the segment cache
CacheStatsMBean segmentCacheStats = store.getSegmentCacheStats();
registerCloseable(registerMBean(
CacheStatsMBean.class,
segmentCacheStats,
CacheStats.TYPE,
segmentCacheStats.getName()
));
// Expose stats about the string and template caches
CacheStatsMBean stringCacheStats = store.getStringCacheStats();
registerCloseable(registerMBean(
CacheStatsMBean.class,
stringCacheStats,
CacheStats.TYPE,
stringCacheStats.getName()
));
CacheStatsMBean templateCacheStats = store.getTemplateCacheStats();
registerCloseable(registerMBean(
CacheStatsMBean.class,
templateCacheStats,
CacheStats.TYPE,
templateCacheStats.getName()
));
WriterCacheManager cacheManager = builder.getCacheManager();
CacheStatsMBean stringDeduplicationCacheStats = cacheManager.getStringCacheStats();
if (stringDeduplicationCacheStats != null) {
registerCloseable(registerMBean(
CacheStatsMBean.class,
stringDeduplicationCacheStats,
CacheStats.TYPE,
stringDeduplicationCacheStats.getName()
));
}
CacheStatsMBean templateDeduplicationCacheStats = cacheManager.getTemplateCacheStats();
if (templateDeduplicationCacheStats != null) {
registerCloseable(registerMBean(
CacheStatsMBean.class,
templateDeduplicationCacheStats,
CacheStats.TYPE,
templateDeduplicationCacheStats.getName()
));
}
CacheStatsMBean nodeDeduplicationCacheStats = cacheManager.getNodeCacheStats();
if (nodeDeduplicationCacheStats != null) {
registerCloseable(registerMBean(
CacheStatsMBean.class,
nodeDeduplicationCacheStats,
CacheStats.TYPE,
nodeDeduplicationCacheStats.getName()
));
}
// Expose an MBean to managing and monitoring garbage collection
FileStoreGCMonitor monitor = new FileStoreGCMonitor(Clock.SIMPLE);
registerCloseable(register(
GCMonitor.class,
monitor
));
if (!cfg.isStandbyInstance()) {
registerCloseable(registerMBean(
SegmentRevisionGC.class,
new SegmentRevisionGCMBean(store, gcOptions, monitor),
SegmentRevisionGC.TYPE,
"Segment node store revision garbage collection"
));
}
registerCloseable(registerMBean(
RevisionGCMBean.class,
new RevisionGC(store.getGCRunner(), store::cancelGC, monitor::getStatus, executor),
RevisionGCMBean.TYPE,
"Revision garbage collection"
));
// Expose statistics about the FileStore
registerCloseable(registerMBean(
FileStoreStatsMBean.class,
store.getStats(),
FileStoreStatsMBean.TYPE,
"FileStore statistics"
));
// register segment node store
SegmentNodeStore.SegmentNodeStoreBuilder segmentNodeStoreBuilder = SegmentNodeStoreBuilders.builder(store).withStatisticsProvider(cfg.getStatisticsProvider());
if (cfg.isStandbyInstance() || !cfg.isPrimarySegmentStore()) {
segmentNodeStoreBuilder.dispatchChanges(false);
}
SegmentNodeStore segmentNodeStore = segmentNodeStoreBuilder.build();
if (cfg.isPrimarySegmentStore()) {
ObserverTracker observerTracker = new ObserverTracker(segmentNodeStore);
observerTracker.start(cfg.getBundleContext());
registerCloseable(observerTracker);
}
if (cfg.isPrimarySegmentStore()) {
registerCloseable(registerMBean(
CheckpointMBean.class,
new SegmentCheckpointMBean(segmentNodeStore),
CheckpointMBean.TYPE,
"Segment node store checkpoint management"
));
}
if (cfg.registerDescriptors()) {
// ensure a clusterId is initialized
// and expose it as 'oak.clusterid' repository descriptor
GenericDescriptors clusterIdDesc = new GenericDescriptors();
clusterIdDesc.put(
ClusterRepositoryInfo.OAK_CLUSTERID_REPOSITORY_DESCRIPTOR_KEY,
new SimpleValueFactory().createValue(getOrCreateId(segmentNodeStore)),
true,
false
);
registerCloseable(register(Descriptors.class, clusterIdDesc));
// Register "discovery lite" descriptors
registerCloseable(register(Descriptors.class, new SegmentDiscoveryLiteDescriptors(segmentNodeStore)));
}
// If a shared data store register the repo id in the data store
if (cfg.isPrimarySegmentStore() && isShared(cfg.getBlobStore())) {
SharedDataStore sharedDataStore = (SharedDataStore) cfg.getBlobStore();
try {
sharedDataStore.addMetadataRecord(new ByteArrayInputStream(new byte[0]), SharedDataStoreUtils.SharedStoreRecordType.REPOSITORY.getNameFromId(getOrCreateId(segmentNodeStore)));
} catch (Exception e) {
throw new IOException("Could not register a unique repositoryId", e);
}
if (cfg.getBlobStore() instanceof BlobTrackingStore) {
BlobTrackingStore trackingStore = (BlobTrackingStore) cfg.getBlobStore();
if (trackingStore.getTracker() != null) {
trackingStore.getTracker().close();
}
trackingStore.addTracker(new BlobIdTracker(cfg.getRepositoryHome(), getOrCreateId(segmentNodeStore), cfg.getBlobSnapshotInterval(), sharedDataStore));
}
}
if (cfg.isPrimarySegmentStore() && cfg.getBlobStore() instanceof GarbageCollectableBlobStore) {
BlobGarbageCollector gc = new MarkSweepGarbageCollector(
new SegmentBlobReferenceRetriever(store),
(GarbageCollectableBlobStore) cfg.getBlobStore(),
executor,
TimeUnit.SECONDS.toMillis(cfg.getBlobGcMaxAge()),
getOrCreateId(segmentNodeStore),
cfg.getWhiteboard(),
cfg.getStatisticsProvider()
);
registerCloseable(registerMBean(
BlobGCMBean.class,
new BlobGC(gc, executor),
BlobGCMBean.TYPE,
"Segment node store blob garbage collection"
));
}
// Expose an MBean for backup/restore operations
registerCloseable(registerMBean(
FileStoreBackupRestoreMBean.class,
new FileStoreBackupRestoreImpl(
segmentNodeStore,
store.getRevisions(),
store.getReader(),
cfg.getBackupDirectory(),
executor
),
FileStoreBackupRestoreMBean.TYPE,
"Segment node store backup/restore"
));
// Expose statistics about the SegmentNodeStore
registerCloseable(registerMBean(
SegmentNodeStoreStatsMBean.class,
segmentNodeStore.getStats(),
SegmentNodeStoreStatsMBean.TYPE,
"SegmentNodeStore statistics"
));
if (cfg.isPrimarySegmentStore()) {
cfg.getLogger().info("Primary SegmentNodeStore initialized");
} else {
cfg.getLogger().info("Secondary SegmentNodeStore initialized, role={}", cfg.getRole());
}
// Register a factory service to expose the FileStore
registerCloseable(register(
SegmentStoreProvider.class,
new DefaultSegmentStoreProvider(store)
));
if (cfg.isStandbyInstance()) {
return segmentNodeStore;
}
if (cfg.isPrimarySegmentStore()) {
Map<String, Object> props = new HashMap<>();
props.put(Constants.SERVICE_PID, SegmentNodeStore.class.getName());
props.put("oak.nodestore.description", new String[] {"nodeStoreType=segment"});
registerCloseable(register(NodeStore.class, segmentNodeStore, props));
}
return segmentNodeStore;
}
private <T> Registration registerMBean(Class<T> clazz, T bean, String type, String name) {
return registerMBean(clazz, bean, type, name, new HashMap<>());
}
private <T> Registration registerMBean(Class<T> clazz, T bean, String type, String name, Map<String, String> attributes) {
return WhiteboardUtils.registerMBean(cfg.getWhiteboard(), clazz, bean, type, maybeAppendRole(name), maybePutRoleAttribute(attributes));
}
private <T> Registration register(Class<T> clazz, T service) {
return register(clazz, service, new HashMap<>());
}
private <T> Registration register(Class<T> clazz, T service, Map<String, Object> properties) {
return cfg.getWhiteboard().register(clazz, service, maybePutRoleProperty(properties));
}
private String maybeAppendRole(String name) {
if (cfg.getRole() != null) {
return name + " - " + cfg.getRole();
}
return name;
}
private String jmxRole() {
return cfg.getRole().replaceAll(":", "-");
}
private Map<String, String> maybePutRoleAttribute(Map<String, String> attributes) {
if (cfg.getRole() != null) {
attributes.put("role", jmxRole());
}
return attributes;
}
private Map<String, Object> maybePutRoleProperty(Map<String, Object> attributes) {
if (cfg.getRole() != null) {
attributes.put("role", cfg.getRole());
}
return attributes;
}
private void registerCloseable(Closeable c) {
cfg.getCloser().register(c);
}
private void registerCloseable(final AbstractServiceTracker<?> t) {
registerCloseable((Closeable) t::stop);
}
private void registerCloseable(final Registration r) {
registerCloseable((Closeable) r::unregister);
}
private void registerCloseable(final ObserverTracker t) {
registerCloseable((Closeable) t::stop);
}
}
| |
/*
* Copyright 2015 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kie.remote.client.jaxb;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlElements;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlSchemaType;
import org.kie.api.command.Command;
import org.kie.remote.jaxb.gen.*;
import org.kie.remote.jaxb.gen.AbortWorkItemCommand;
import org.kie.remote.jaxb.gen.ActivateTaskCommand;
import org.kie.remote.jaxb.gen.AddCommentCommand;
import org.kie.remote.jaxb.gen.AddTaskCommand;
import org.kie.remote.jaxb.gen.AuditCommand;
import org.kie.remote.jaxb.gen.CancelDeadlineCommand;
import org.kie.remote.jaxb.gen.ClaimNextAvailableTaskCommand;
import org.kie.remote.jaxb.gen.ClaimTaskCommand;
import org.kie.remote.jaxb.gen.ClearHistoryLogsCommand;
import org.kie.remote.jaxb.gen.CompleteTaskCommand;
import org.kie.remote.jaxb.gen.CompleteWorkItemCommand;
import org.kie.remote.jaxb.gen.DelegateTaskCommand;
import org.kie.remote.jaxb.gen.DeleteCommand;
import org.kie.remote.jaxb.gen.DeleteCommentCommand;
import org.kie.remote.jaxb.gen.ExecuteTaskRulesCommand;
import org.kie.remote.jaxb.gen.ExitTaskCommand;
import org.kie.remote.jaxb.gen.FailTaskCommand;
import org.kie.remote.jaxb.gen.FindActiveProcessInstancesCommand;
import org.kie.remote.jaxb.gen.FindNodeInstancesCommand;
import org.kie.remote.jaxb.gen.FindProcessInstanceCommand;
import org.kie.remote.jaxb.gen.FindProcessInstancesCommand;
import org.kie.remote.jaxb.gen.FindSubProcessInstancesCommand;
import org.kie.remote.jaxb.gen.FindVariableInstancesByNameCommand;
import org.kie.remote.jaxb.gen.FindVariableInstancesCommand;
import org.kie.remote.jaxb.gen.FireAllRulesCommand;
import org.kie.remote.jaxb.gen.ForwardTaskCommand;
import org.kie.remote.jaxb.gen.GetAllCommentsCommand;
import org.kie.remote.jaxb.gen.GetAttachmentCommand;
import org.kie.remote.jaxb.gen.GetCommentCommand;
import org.kie.remote.jaxb.gen.GetContentByIdCommand;
import org.kie.remote.jaxb.gen.GetContentByIdForUserCommand;
import org.kie.remote.jaxb.gen.GetFactCountCommand;
import org.kie.remote.jaxb.gen.GetGlobalCommand;
import org.kie.remote.jaxb.gen.GetIdCommand;
import org.kie.remote.jaxb.gen.GetProcessIdsCommand;
import org.kie.remote.jaxb.gen.GetProcessInstanceByCorrelationKeyCommand;
import org.kie.remote.jaxb.gen.GetProcessInstanceCommand;
import org.kie.remote.jaxb.gen.GetProcessInstancesCommand;
import org.kie.remote.jaxb.gen.GetTaskAssignedAsBusinessAdminCommand;
import org.kie.remote.jaxb.gen.GetTaskAssignedAsPotentialOwnerCommand;
import org.kie.remote.jaxb.gen.GetTaskByWorkItemIdCommand;
import org.kie.remote.jaxb.gen.GetTaskCommand;
import org.kie.remote.jaxb.gen.GetTaskContentCommand;
import org.kie.remote.jaxb.gen.GetTasksByProcessInstanceIdCommand;
import org.kie.remote.jaxb.gen.GetTasksByStatusByProcessInstanceIdCommand;
import org.kie.remote.jaxb.gen.GetTasksByVariousFieldsCommand;
import org.kie.remote.jaxb.gen.GetTasksOwnedCommand;
import org.kie.remote.jaxb.gen.GetVariableCommand;
import org.kie.remote.jaxb.gen.GetWorkItemCommand;
import org.kie.remote.jaxb.gen.InsertObjectCommand;
import org.kie.remote.jaxb.gen.NominateTaskCommand;
import org.kie.remote.jaxb.gen.ProcessSubTaskCommand;
import org.kie.remote.jaxb.gen.ReleaseTaskCommand;
import org.kie.remote.jaxb.gen.ResumeTaskCommand;
import org.kie.remote.jaxb.gen.SetGlobalCommand;
import org.kie.remote.jaxb.gen.SetProcessInstanceVariablesCommand;
import org.kie.remote.jaxb.gen.SetTaskPropertyCommand;
import org.kie.remote.jaxb.gen.SignalEventCommand;
import org.kie.remote.jaxb.gen.SkipTaskCommand;
import org.kie.remote.jaxb.gen.StartCorrelatedProcessCommand;
import org.kie.remote.jaxb.gen.StartProcessCommand;
import org.kie.remote.jaxb.gen.StartTaskCommand;
import org.kie.remote.jaxb.gen.StopTaskCommand;
import org.kie.remote.jaxb.gen.SuspendTaskCommand;
import org.kie.remote.jaxb.gen.TaskCommand;
import org.kie.remote.jaxb.gen.TaskQueryWhereCommand;
import org.kie.remote.jaxb.gen.UpdateCommand;
import org.kie.services.shared.ServicesVersion;
@XmlRootElement(name = "command-request")
@XmlAccessorType(XmlAccessType.FIELD)
@SuppressWarnings("rawtypes")
public class JaxbCommandsRequest {
@XmlElement(name = "deployment-id")
@XmlSchemaType(name = "string")
private String deploymentId;
@XmlElement(name = "process-instance-id")
@XmlSchemaType(name = "long")
private Long processInstanceId;
@XmlElement(name = "ver")
@XmlSchemaType(name = "string")
private String version = ServicesVersion.VERSION;
@XmlElement
@XmlSchemaType(name = "string")
private String user;
@XmlElement
@XmlSchemaType(name = "string")
private String correlationKeyString;
// This list should match the list in AcceptedCommands
@XmlElements({
@XmlElement(name = "complete-work-item", type = CompleteWorkItemCommand.class),
@XmlElement(name = "abort-work-item", type = AbortWorkItemCommand.class),
@XmlElement(name = "get-workitem", type = GetWorkItemCommand.class),
@XmlElement(name = "abort-process-instance", type = AbortProcessInstanceCommand.class),
@XmlElement(name = "get-process-ids", type = GetProcessIdsCommand.class),
@XmlElement(name = "get-process-instance-by-correlation-key", type = GetProcessInstanceByCorrelationKeyCommand.class),
@XmlElement(name = "get-process-instance", type = GetProcessInstanceCommand.class),
@XmlElement(name = "get-process-instances", type = GetProcessInstancesCommand.class),
@XmlElement(name = "set-process-instance-vars", type = SetProcessInstanceVariablesCommand.class),
@XmlElement(name = "signal-event", type = SignalEventCommand.class),
@XmlElement(name = "start-correlated-process", type = StartCorrelatedProcessCommand.class),
@XmlElement(name = "start-process", type = StartProcessCommand.class),
@XmlElement(name = "get-variable", type = GetVariableCommand.class),
@XmlElement(name = "get-fact-count", type = GetFactCountCommand.class),
@XmlElement(name = "get-global", type = GetGlobalCommand.class),
@XmlElement(name = "get-id", type = GetIdCommand.class),
@XmlElement(name = "set-global", type = SetGlobalCommand.class),
@XmlElement(name = "delete", type = DeleteCommand.class),
@XmlElement(name = "fire-all-rules", type = FireAllRulesCommand.class),
@XmlElement(name = "insert-object", type = InsertObjectCommand.class),
@XmlElement(name = "update", type = UpdateCommand.class),
// task
@XmlElement(name = "activate-task", type = ActivateTaskCommand.class),
@XmlElement(name = "add-task", type = AddTaskCommand.class),
@XmlElement(name = "claim-next-available-task", type = ClaimNextAvailableTaskCommand.class),
@XmlElement(name = "claim-task", type = ClaimTaskCommand.class),
@XmlElement(name = "complete-task", type = CompleteTaskCommand.class),
@XmlElement(name = "delegate-task", type = DelegateTaskCommand.class),
@XmlElement(name = "exit-task", type = ExitTaskCommand.class),
@XmlElement(name = "fail-task", type = FailTaskCommand.class),
@XmlElement(name = "forward-task", type = ForwardTaskCommand.class),
@XmlElement(name = "get-attachment", type = GetAttachmentCommand.class),
@XmlElement(name = "get-content", type = GetContentByIdCommand.class),
@XmlElement(name = "get-task-content", type = GetTaskContentCommand.class),
@XmlElement(name = "delete-comment", type = DeleteCommentCommand.class),
@XmlElement(name = "add-comment", type = AddCommentCommand.class),
@XmlElement(name = "get-all-comments", type = GetAllCommentsCommand.class),
@XmlElement(name = "get-comment", type = GetCommentCommand.class),
@XmlElement(name = "set-task-property", type = SetTaskPropertyCommand.class),
@XmlElement(name = "add-content-from-user", type = AddContentFromUserCommand.class),
@XmlElement(name = "get-content-by-id", type = GetContentByIdForUserCommand.class),
@XmlElement(name = "get-content-map-for-user", type = GetContentMapForUserCommand.class),
@XmlElement(name = "get-task-as-potential-owner", type = GetTaskAssignedAsPotentialOwnerCommand.class),
@XmlElement(name = "get-task-by-workitemid", type = GetTaskByWorkItemIdCommand.class),
@XmlElement(name = "get-task", type = GetTaskCommand.class),
@XmlElement(name = "get-tasks-by-processinstanceid", type = GetTasksByProcessInstanceIdCommand.class),
@XmlElement(name = "get-tasks-by-status-by-processinstanceid", type = GetTasksByStatusByProcessInstanceIdCommand.class),
@XmlElement(name = "get-tasks-by-various", type = GetTasksByVariousFieldsCommand.class),
@XmlElement(name = "get-tasks-owned", type = GetTasksOwnedCommand.class),
@XmlElement(name = "task-query-where", type = TaskQueryWhereCommand.class),
@XmlElement(name = "nominate-task", type = NominateTaskCommand.class),
@XmlElement(name = "release-task", type = ReleaseTaskCommand.class),
@XmlElement(name = "resume-task", type = ResumeTaskCommand.class),
@XmlElement(name = "skip-task", type = SkipTaskCommand.class),
@XmlElement(name = "start-task", type = StartTaskCommand.class),
@XmlElement(name = "stop-task", type = StopTaskCommand.class),
@XmlElement(name = "suspend-task", type = SuspendTaskCommand.class),
@XmlElement(name = "process-sub-tasks", type = ProcessSubTaskCommand.class),
@XmlElement(name = "execute-task-rules", type = ExecuteTaskRulesCommand.class),
@XmlElement(name = "cancel-deadline", type = CancelDeadlineCommand.class),
// audit
@XmlElement(name = "clear-history-logs", type = ClearHistoryLogsCommand.class),
@XmlElement(name = "find-active-process-instances", type = FindActiveProcessInstancesCommand.class),
@XmlElement(name = "find-node-instances", type = FindNodeInstancesCommand.class),
@XmlElement(name = "find-process-instance", type = FindProcessInstanceCommand.class),
@XmlElement(name = "find-process-instances", type = FindProcessInstancesCommand.class),
@XmlElement(name = "find-subprocess-instances", type = FindSubProcessInstancesCommand.class),
@XmlElement(name = "find-variable-instances", type = FindVariableInstancesCommand.class),
@XmlElement(name = "find-variable-instances-by-name", type = FindVariableInstancesByNameCommand.class)
})
protected List<Command> commands;
public JaxbCommandsRequest() {
// Default constructor
}
public JaxbCommandsRequest(Command command) {
checkThatCommandIsAccepted(command);
this.commands = new ArrayList<Command>();
this.commands.add(command);
checkThatCommandsContainDeploymentIdIfNeeded(this.commands);
}
public JaxbCommandsRequest(List<Command> commands) {
checkThatCommandsAreAccepted(commands);
this.commands = new ArrayList<Command>();
this.commands.addAll(commands);
checkThatCommandsContainDeploymentIdIfNeeded(this.commands);
}
private void checkThatCommandsContainDeploymentIdIfNeeded(List<Command> checkCommands) {
for( Object command : checkCommands ) {
if( ! (command instanceof TaskCommand) && ! (command instanceof AuditCommand) ) {
throw new UnsupportedOperationException( "A " + command.getClass().getSimpleName() + " requires that the deployment id has been set!" );
}
}
}
public JaxbCommandsRequest(String deploymentId, Command command) {
checkThatCommandIsAccepted(command);
this.deploymentId = deploymentId;
this.commands = new ArrayList<Command>();
this.commands.add(command);
}
public JaxbCommandsRequest(String deploymentId, List<Command> commands) {
checkThatCommandsAreAccepted(commands);
this.deploymentId = deploymentId;
this.commands = new ArrayList<Command>();
this.commands.addAll(commands);
}
private void checkThatCommandsAreAccepted(Collection<Command> cmds) {
for( Object cmd : cmds ) {
checkThatCommandIsAccepted(cmd);
}
}
private void checkThatCommandIsAccepted(Object cmd) {
if( ! AcceptedClientCommands.isAcceptedCommandClass(cmd.getClass()) ) {
throw new UnsupportedOperationException(cmd.getClass().getName() + " is not an accepted command." );
}
}
public String getDeploymentId() {
return deploymentId;
}
public void setDeploymentId(String deploymentId) {
this.deploymentId = deploymentId;
}
public Long getProcessInstanceId() {
return processInstanceId;
}
public void setProcessInstanceId(Long processInstanceId) {
this.processInstanceId = processInstanceId;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getCorrelationKeyString() {
return correlationKeyString;
}
public void setCorrelationKeyString(String correlationKeyString) {
this.correlationKeyString = correlationKeyString;
}
public void setCommands(List<Command> commands) {
checkThatCommandsAreAccepted(commands);
this.commands = commands;
}
public List<Command> getCommands() {
if( this.commands == null ) {
this.commands = new ArrayList<Command>();
}
return this.commands;
}
public String toString() {
StringBuffer result = new StringBuffer("JaxbCommandsRequest " + deploymentId + "\n");
if (commands != null) {
for (Object command: commands) {
result.append(command.getClass().getSimpleName() + "\n");
}
}
return result.toString();
}
}
| |
/*
* Copyright (c) 2003, 2009, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt.shell;
import java.awt.Image;
import java.awt.Toolkit;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.*;
import javax.swing.SwingConstants;
// NOTE: This class supersedes Win32ShellFolder, which was removed from
// distribution after version 1.4.2.
/**
* Win32 Shell Folders
* <P>
* <BR>
* There are two fundamental types of shell folders : file system folders
* and non-file system folders. File system folders are relatively easy
* to deal with. Non-file system folders are items such as My Computer,
* Network Neighborhood, and the desktop. Some of these non-file system
* folders have special values and properties.
* <P>
* <BR>
* Win32 keeps two basic data structures for shell folders. The first
* of these is called an ITEMIDLIST. Usually a pointer, called an
* LPITEMIDLIST, or more frequently just "PIDL". This structure holds
* a series of identifiers and can be either relative to the desktop
* (an absolute PIDL), or relative to the shell folder that contains them.
* Some Win32 functions can take absolute or relative PIDL values, and
* others can only accept relative values.
* <BR>
* The second data structure is an IShellFolder COM interface. Using
* this interface, one can enumerate the relative PIDLs in a shell
* folder, get attributes, etc.
* <BR>
* All Win32ShellFolder2 objects which are folder types (even non-file
* system folders) contain an IShellFolder object. Files are named in
* directories via relative PIDLs.
*
* @author Michael Martak
* @author Leif Samuelsson
* @author Kenneth Russell
* @since 1.4 */
final class Win32ShellFolder2 extends ShellFolder {
private static native void initIDs();
static {
initIDs();
}
// Win32 Shell Folder Constants
public static final int DESKTOP = 0x0000;
public static final int INTERNET = 0x0001;
public static final int PROGRAMS = 0x0002;
public static final int CONTROLS = 0x0003;
public static final int PRINTERS = 0x0004;
public static final int PERSONAL = 0x0005;
public static final int FAVORITES = 0x0006;
public static final int STARTUP = 0x0007;
public static final int RECENT = 0x0008;
public static final int SENDTO = 0x0009;
public static final int BITBUCKET = 0x000a;
public static final int STARTMENU = 0x000b;
public static final int DESKTOPDIRECTORY = 0x0010;
public static final int DRIVES = 0x0011;
public static final int NETWORK = 0x0012;
public static final int NETHOOD = 0x0013;
public static final int FONTS = 0x0014;
public static final int TEMPLATES = 0x0015;
public static final int COMMON_STARTMENU = 0x0016;
public static final int COMMON_PROGRAMS = 0X0017;
public static final int COMMON_STARTUP = 0x0018;
public static final int COMMON_DESKTOPDIRECTORY = 0x0019;
public static final int APPDATA = 0x001a;
public static final int PRINTHOOD = 0x001b;
public static final int ALTSTARTUP = 0x001d;
public static final int COMMON_ALTSTARTUP = 0x001e;
public static final int COMMON_FAVORITES = 0x001f;
public static final int INTERNET_CACHE = 0x0020;
public static final int COOKIES = 0x0021;
public static final int HISTORY = 0x0022;
// Win32 shell folder attributes
public static final int ATTRIB_CANCOPY = 0x00000001;
public static final int ATTRIB_CANMOVE = 0x00000002;
public static final int ATTRIB_CANLINK = 0x00000004;
public static final int ATTRIB_CANRENAME = 0x00000010;
public static final int ATTRIB_CANDELETE = 0x00000020;
public static final int ATTRIB_HASPROPSHEET = 0x00000040;
public static final int ATTRIB_DROPTARGET = 0x00000100;
public static final int ATTRIB_LINK = 0x00010000;
public static final int ATTRIB_SHARE = 0x00020000;
public static final int ATTRIB_READONLY = 0x00040000;
public static final int ATTRIB_GHOSTED = 0x00080000;
public static final int ATTRIB_HIDDEN = 0x00080000;
public static final int ATTRIB_FILESYSANCESTOR = 0x10000000;
public static final int ATTRIB_FOLDER = 0x20000000;
public static final int ATTRIB_FILESYSTEM = 0x40000000;
public static final int ATTRIB_HASSUBFOLDER = 0x80000000;
public static final int ATTRIB_VALIDATE = 0x01000000;
public static final int ATTRIB_REMOVABLE = 0x02000000;
public static final int ATTRIB_COMPRESSED = 0x04000000;
public static final int ATTRIB_BROWSABLE = 0x08000000;
public static final int ATTRIB_NONENUMERATED = 0x00100000;
public static final int ATTRIB_NEWCONTENT = 0x00200000;
// IShellFolder::GetDisplayNameOf constants
public static final int SHGDN_NORMAL = 0;
public static final int SHGDN_INFOLDER = 1;
public static final int SHGDN_INCLUDE_NONFILESYS= 0x2000;
public static final int SHGDN_FORADDRESSBAR = 0x4000;
public static final int SHGDN_FORPARSING = 0x8000;
// Values for system call LoadIcon()
public enum SystemIcon {
IDI_APPLICATION(32512),
IDI_HAND(32513),
IDI_ERROR(32513),
IDI_QUESTION(32514),
IDI_EXCLAMATION(32515),
IDI_WARNING(32515),
IDI_ASTERISK(32516),
IDI_INFORMATION(32516),
IDI_WINLOGO(32517);
private final int iconID;
SystemIcon(int iconID) {
this.iconID = iconID;
}
public int getIconID() {
return iconID;
}
}
static class FolderDisposer implements sun.java2d.DisposerRecord {
/*
* This is cached as a concession to getFolderType(), which needs
* an absolute PIDL.
*/
long absolutePIDL;
/*
* We keep track of shell folders through the IShellFolder
* interface of their parents plus their relative PIDL.
*/
long pIShellFolder;
long relativePIDL;
boolean disposed;
public void dispose() {
if (disposed) return;
invoke(new Callable<Void>() {
public Void call() {
if (relativePIDL != 0) {
releasePIDL(relativePIDL);
}
if (absolutePIDL != 0) {
releasePIDL(absolutePIDL);
}
if (pIShellFolder != 0) {
releaseIShellFolder(pIShellFolder);
}
return null;
}
});
disposed = true;
}
}
FolderDisposer disposer = new FolderDisposer();
private void setIShellFolder(long pIShellFolder) {
disposer.pIShellFolder = pIShellFolder;
}
private void setRelativePIDL(long relativePIDL) {
disposer.relativePIDL = relativePIDL;
}
/*
* The following are for caching various shell folder properties.
*/
private long pIShellIcon = -1L;
private String folderType = null;
private String displayName = null;
private Image smallIcon = null;
private Image largeIcon = null;
private Boolean isDir = null;
/*
* The following is to identify the My Documents folder as being special
*/
private boolean isPersonal;
private static String composePathForCsidl(int csidl) throws IOException, InterruptedException {
String path = getFileSystemPath(csidl);
return path == null
? ("ShellFolder: 0x" + Integer.toHexString(csidl))
: path;
}
/**
* Create a system special shell folder, such as the
* desktop or Network Neighborhood.
*/
Win32ShellFolder2(final int csidl) throws IOException, InterruptedException {
// Desktop is parent of DRIVES and NETWORK, not necessarily
// other special shell folders.
super(null, composePathForCsidl(csidl));
invoke(new Callable<Void>() {
public Void call() throws InterruptedException {
if (csidl == DESKTOP) {
initDesktop();
} else {
initSpecial(getDesktop().getIShellFolder(), csidl);
// At this point, the native method initSpecial() has set our relativePIDL
// relative to the Desktop, which may not be our immediate parent. We need
// to traverse this ID list and break it into a chain of shell folders from
// the top, with each one having an immediate parent and a relativePIDL
// relative to that parent.
long pIDL = disposer.relativePIDL;
parent = getDesktop();
while (pIDL != 0) {
// Get a child pidl relative to 'parent'
long childPIDL = copyFirstPIDLEntry(pIDL);
if (childPIDL != 0) {
// Get a handle to the the rest of the ID list
// i,e, parent's grandchilren and down
pIDL = getNextPIDLEntry(pIDL);
if (pIDL != 0) {
// Now we know that parent isn't immediate to 'this' because it
// has a continued ID list. Create a shell folder for this child
// pidl and make it the new 'parent'.
parent = new Win32ShellFolder2((Win32ShellFolder2) parent, childPIDL);
} else {
// No grandchildren means we have arrived at the parent of 'this',
// and childPIDL is directly relative to parent.
disposer.relativePIDL = childPIDL;
}
} else {
break;
}
}
}
return null;
}
}, InterruptedException.class);
sun.java2d.Disposer.addRecord(this, disposer);
}
/**
* Create a system shell folder
*/
Win32ShellFolder2(Win32ShellFolder2 parent, long pIShellFolder, long relativePIDL, String path) {
super(parent, (path != null) ? path : "ShellFolder: ");
this.disposer.pIShellFolder = pIShellFolder;
this.disposer.relativePIDL = relativePIDL;
sun.java2d.Disposer.addRecord(this, disposer);
}
/**
* Creates a shell folder with a parent and relative PIDL
*/
Win32ShellFolder2(final Win32ShellFolder2 parent, final long relativePIDL) throws InterruptedException {
super(parent,
invoke(new Callable<String>() {
public String call() {
return getFileSystemPath(parent.getIShellFolder(), relativePIDL);
}
}, RuntimeException.class)
);
this.disposer.relativePIDL = relativePIDL;
sun.java2d.Disposer.addRecord(this, disposer);
}
// Initializes the desktop shell folder
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private native void initDesktop();
// Initializes a special, non-file system shell folder
// from one of the above constants
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private native void initSpecial(long desktopIShellFolder, int csidl);
/** Marks this folder as being the My Documents (Personal) folder */
public void setIsPersonal() {
isPersonal = true;
}
/**
* This method is implemented to make sure that no instances
* of <code>ShellFolder</code> are ever serialized. If <code>isFileSystem()</code> returns
* <code>true</code>, then the object is representable with an instance of
* <code>java.io.File</code> instead. If not, then the object depends
* on native PIDL state and should not be serialized.
*
* @return a <code>java.io.File</code> replacement object. If the folder
* is a not a normal directory, then returns the first non-removable
* drive (normally "C:\").
*/
protected Object writeReplace() throws java.io.ObjectStreamException {
return invoke(new Callable<File>() {
public File call() {
if (isFileSystem()) {
return new File(getPath());
} else {
Win32ShellFolder2 drives = Win32ShellFolderManager2.getDrives();
if (drives != null) {
File[] driveRoots = drives.listFiles();
if (driveRoots != null) {
for (int i = 0; i < driveRoots.length; i++) {
if (driveRoots[i] instanceof Win32ShellFolder2) {
Win32ShellFolder2 sf = (Win32ShellFolder2) driveRoots[i];
if (sf.isFileSystem() && !sf.hasAttribute(ATTRIB_REMOVABLE)) {
return new File(sf.getPath());
}
}
}
}
}
// Ouch, we have no hard drives. Return something "valid" anyway.
return new File("C:\\");
}
}
});
}
/**
* Finalizer to clean up any COM objects or PIDLs used by this object.
*/
protected void dispose() {
disposer.dispose();
}
// Given a (possibly multi-level) relative PIDL (with respect to
// the desktop, at least in all of the usage cases in this code),
// return a pointer to the next entry. Does not mutate the PIDL in
// any way. Returns 0 if the null terminator is reached.
// Needs to be accessible to Win32ShellFolderManager2
static native long getNextPIDLEntry(long pIDL);
// Given a (possibly multi-level) relative PIDL (with respect to
// the desktop, at least in all of the usage cases in this code),
// copy the first entry into a newly-allocated PIDL. Returns 0 if
// the PIDL is at the end of the list.
// Needs to be accessible to Win32ShellFolderManager2
static native long copyFirstPIDLEntry(long pIDL);
// Given a parent's absolute PIDL and our relative PIDL, build an absolute PIDL
private static native long combinePIDLs(long ppIDL, long pIDL);
// Release a PIDL object
// Needs to be accessible to Win32ShellFolderManager2
static native void releasePIDL(long pIDL);
// Release an IShellFolder object
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native void releaseIShellFolder(long pIShellFolder);
/**
* Accessor for IShellFolder
*/
private long getIShellFolder() {
if (disposer.pIShellFolder == 0) {
try {
disposer.pIShellFolder = invoke(new Callable<Long>() {
public Long call() {
assert(isDirectory());
assert(parent != null);
long parentIShellFolder = getParentIShellFolder();
if (parentIShellFolder == 0) {
throw new InternalError("Parent IShellFolder was null for "
+ getAbsolutePath());
}
// We are a directory with a parent and a relative PIDL.
// We want to bind to the parent so we get an
// IShellFolder instance associated with us.
long pIShellFolder = bindToObject(parentIShellFolder,
disposer.relativePIDL);
if (pIShellFolder == 0) {
throw new InternalError("Unable to bind "
+ getAbsolutePath() + " to parent");
}
return pIShellFolder;
}
}, RuntimeException.class);
} catch (InterruptedException e) {
// Ignore error
}
}
return disposer.pIShellFolder;
}
/**
* Get the parent ShellFolder's IShellFolder interface
*/
public long getParentIShellFolder() {
Win32ShellFolder2 parent = (Win32ShellFolder2)getParentFile();
if (parent == null) {
// Parent should only be null if this is the desktop, whose
// relativePIDL is relative to its own IShellFolder.
return getIShellFolder();
}
return parent.getIShellFolder();
}
/**
* Accessor for relative PIDL
*/
public long getRelativePIDL() {
if (disposer.relativePIDL == 0) {
throw new InternalError("Should always have a relative PIDL");
}
return disposer.relativePIDL;
}
private long getAbsolutePIDL() {
if (parent == null) {
// This is the desktop
return getRelativePIDL();
} else {
if (disposer.absolutePIDL == 0) {
disposer.absolutePIDL = combinePIDLs(((Win32ShellFolder2)parent).getAbsolutePIDL(), getRelativePIDL());
}
return disposer.absolutePIDL;
}
}
/**
* Helper function to return the desktop
*/
public Win32ShellFolder2 getDesktop() {
return Win32ShellFolderManager2.getDesktop();
}
/**
* Helper function to return the desktop IShellFolder interface
*/
public long getDesktopIShellFolder() {
return getDesktop().getIShellFolder();
}
private static boolean pathsEqual(String path1, String path2) {
// Same effective implementation as Win32FileSystem
return path1.equalsIgnoreCase(path2);
}
/**
* Check to see if two ShellFolder objects are the same
*/
public boolean equals(Object o) {
if (o == null || !(o instanceof Win32ShellFolder2)) {
// Short-circuit circuitous delegation path
if (!(o instanceof File)) {
return super.equals(o);
}
return pathsEqual(getPath(), ((File) o).getPath());
}
Win32ShellFolder2 rhs = (Win32ShellFolder2) o;
if ((parent == null && rhs.parent != null) ||
(parent != null && rhs.parent == null)) {
return false;
}
if (isFileSystem() && rhs.isFileSystem()) {
// Only folders with identical parents can be equal
return (pathsEqual(getPath(), rhs.getPath()) &&
(parent == rhs.parent || parent.equals(rhs.parent)));
}
if (parent == rhs.parent || parent.equals(rhs.parent)) {
try {
return pidlsEqual(getParentIShellFolder(), disposer.relativePIDL, rhs.disposer.relativePIDL);
} catch (InterruptedException e) {
return false;
}
}
return false;
}
private static boolean pidlsEqual(final long pIShellFolder, final long pidl1, final long pidl2)
throws InterruptedException {
return invoke(new Callable<Boolean>() {
public Boolean call() {
return compareIDs(pIShellFolder, pidl1, pidl2) == 0;
}
}, RuntimeException.class);
}
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native int compareIDs(long pParentIShellFolder, long pidl1, long pidl2);
private volatile Boolean cachedIsFileSystem;
/**
* @return Whether this is a file system shell folder
*/
public boolean isFileSystem() {
if (cachedIsFileSystem == null) {
cachedIsFileSystem = hasAttribute(ATTRIB_FILESYSTEM);
}
return cachedIsFileSystem;
}
/**
* Return whether the given attribute flag is set for this object
*/
public boolean hasAttribute(final int attribute) {
Boolean result = invoke(new Callable<Boolean>() {
public Boolean call() {
// Caching at this point doesn't seem to be cost efficient
return (getAttributes0(getParentIShellFolder(),
getRelativePIDL(), attribute)
& attribute) != 0;
}
});
return result != null && result;
}
/**
* Returns the queried attributes specified in attrsMask.
*
* Could plausibly be used for attribute caching but have to be
* very careful not to touch network drives and file system roots
* with a full attrsMask
* NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
*/
private static native int getAttributes0(long pParentIShellFolder, long pIDL, int attrsMask);
// Return the path to the underlying file system object
// Should be called from the COM thread
private static String getFileSystemPath(final long parentIShellFolder, final long relativePIDL) {
int linkedFolder = ATTRIB_LINK | ATTRIB_FOLDER;
if (parentIShellFolder == Win32ShellFolderManager2.getNetwork().getIShellFolder() &&
getAttributes0(parentIShellFolder, relativePIDL, linkedFolder) == linkedFolder) {
String s =
getFileSystemPath(Win32ShellFolderManager2.getDesktop().getIShellFolder(),
getLinkLocation(parentIShellFolder, relativePIDL, false));
if (s != null && s.startsWith("\\\\")) {
return s;
}
}
return getDisplayNameOf(parentIShellFolder, relativePIDL, SHGDN_FORPARSING);
}
// Needs to be accessible to Win32ShellFolderManager2
static String getFileSystemPath(final int csidl) throws IOException, InterruptedException {
return invoke(new Callable<String>() {
public String call() throws IOException {
return getFileSystemPath0(csidl);
}
}, IOException.class);
}
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native String getFileSystemPath0(int csidl) throws IOException;
// Return whether the path is a network root.
// Path is assumed to be non-null
private static boolean isNetworkRoot(String path) {
return (path.equals("\\\\") || path.equals("\\") || path.equals("//") || path.equals("/"));
}
/**
* @return The parent shell folder of this shell folder, null if
* there is no parent
*/
public File getParentFile() {
return parent;
}
public boolean isDirectory() {
if (isDir == null) {
// Folders with SFGAO_BROWSABLE have "shell extension" handlers and are
// not traversable in JFileChooser.
if (hasAttribute(ATTRIB_FOLDER) && !hasAttribute(ATTRIB_BROWSABLE)) {
isDir = Boolean.TRUE;
} else if (isLink()) {
ShellFolder linkLocation = getLinkLocation(false);
isDir = Boolean.valueOf(linkLocation != null && linkLocation.isDirectory());
} else {
isDir = Boolean.FALSE;
}
}
return isDir.booleanValue();
}
/*
* Functions for enumerating an IShellFolder's children
*/
// Returns an IEnumIDList interface for an IShellFolder. The value
// returned must be released using releaseEnumObjects().
private long getEnumObjects(final boolean includeHiddenFiles) throws InterruptedException {
return invoke(new Callable<Long>() {
public Long call() {
boolean isDesktop = disposer.pIShellFolder == getDesktopIShellFolder();
return getEnumObjects(disposer.pIShellFolder, isDesktop, includeHiddenFiles);
}
}, RuntimeException.class);
}
// Returns an IEnumIDList interface for an IShellFolder. The value
// returned must be released using releaseEnumObjects().
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private native long getEnumObjects(long pIShellFolder, boolean isDesktop,
boolean includeHiddenFiles);
// Returns the next sequential child as a relative PIDL
// from an IEnumIDList interface. The value returned must
// be released using releasePIDL().
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private native long getNextChild(long pEnumObjects);
// Releases the IEnumIDList interface
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private native void releaseEnumObjects(long pEnumObjects);
// Returns the IShellFolder of a child from a parent IShellFolder
// and a relative PIDL. The value returned must be released
// using releaseIShellFolder().
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native long bindToObject(long parentIShellFolder, long pIDL);
/**
* @return An array of shell folders that are children of this shell folder
* object. The array will be empty if the folder is empty. Returns
* <code>null</code> if this shellfolder does not denote a directory.
*/
public File[] listFiles(final boolean includeHiddenFiles) {
SecurityManager security = System.getSecurityManager();
if (security != null) {
security.checkRead(getPath());
}
try {
return invoke(new Callable<File[]>() {
public File[] call() throws InterruptedException {
if (!isDirectory()) {
return null;
}
// Links to directories are not directories and cannot be parents.
// This does not apply to folders in My Network Places (NetHood)
// because they are both links and real directories!
if (isLink() && !hasAttribute(ATTRIB_FOLDER)) {
return new File[0];
}
Win32ShellFolder2 desktop = Win32ShellFolderManager2.getDesktop();
Win32ShellFolder2 personal = Win32ShellFolderManager2.getPersonal();
// If we are a directory, we have a parent and (at least) a
// relative PIDL. We must first ensure we are bound to the
// parent so we have an IShellFolder to query.
long pIShellFolder = getIShellFolder();
// Now we can enumerate the objects in this folder.
ArrayList<Win32ShellFolder2> list = new ArrayList<Win32ShellFolder2>();
long pEnumObjects = getEnumObjects(includeHiddenFiles);
if (pEnumObjects != 0) {
try {
long childPIDL;
int testedAttrs = ATTRIB_FILESYSTEM | ATTRIB_FILESYSANCESTOR;
do {
childPIDL = getNextChild(pEnumObjects);
boolean releasePIDL = true;
if (childPIDL != 0 &&
(getAttributes0(pIShellFolder, childPIDL, testedAttrs) & testedAttrs) != 0) {
Win32ShellFolder2 childFolder;
if (Win32ShellFolder2.this.equals(desktop)
&& personal != null
&& pidlsEqual(pIShellFolder, childPIDL, personal.disposer.relativePIDL)) {
childFolder = personal;
} else {
childFolder = new Win32ShellFolder2(Win32ShellFolder2.this, childPIDL);
releasePIDL = false;
}
list.add(childFolder);
}
if (releasePIDL) {
releasePIDL(childPIDL);
}
} while (childPIDL != 0 && !Thread.currentThread().isInterrupted());
} finally {
releaseEnumObjects(pEnumObjects);
}
}
return Thread.currentThread().isInterrupted()
? new File[0]
: list.toArray(new ShellFolder[list.size()]);
}
}, InterruptedException.class);
} catch (InterruptedException e) {
return new File[0];
}
}
/**
* Look for (possibly special) child folder by it's path
*
* @return The child shellfolder, or null if not found.
*/
Win32ShellFolder2 getChildByPath(final String filePath) throws InterruptedException {
return invoke(new Callable<Win32ShellFolder2>() {
public Win32ShellFolder2 call() throws InterruptedException {
long pIShellFolder = getIShellFolder();
long pEnumObjects = getEnumObjects(true);
Win32ShellFolder2 child = null;
long childPIDL;
while ((childPIDL = getNextChild(pEnumObjects)) != 0) {
if (getAttributes0(pIShellFolder, childPIDL, ATTRIB_FILESYSTEM) != 0) {
String path = getFileSystemPath(pIShellFolder, childPIDL);
if (path != null && path.equalsIgnoreCase(filePath)) {
long childIShellFolder = bindToObject(pIShellFolder, childPIDL);
child = new Win32ShellFolder2(Win32ShellFolder2.this,
childIShellFolder, childPIDL, path);
break;
}
}
releasePIDL(childPIDL);
}
releaseEnumObjects(pEnumObjects);
return child;
}
}, InterruptedException.class);
}
private volatile Boolean cachedIsLink;
/**
* @return Whether this shell folder is a link
*/
public boolean isLink() {
if (cachedIsLink == null) {
cachedIsLink = hasAttribute(ATTRIB_LINK);
}
return cachedIsLink;
}
/**
* @return Whether this shell folder is marked as hidden
*/
public boolean isHidden() {
return hasAttribute(ATTRIB_HIDDEN);
}
// Return the link location of a shell folder
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native long getLinkLocation(long parentIShellFolder,
long relativePIDL, boolean resolve);
/**
* @return The shell folder linked to by this shell folder, or null
* if this shell folder is not a link or is a broken or invalid link
*/
public ShellFolder getLinkLocation() {
return getLinkLocation(true);
}
private ShellFolder getLinkLocation(final boolean resolve) {
return invoke(new Callable<ShellFolder>() {
public ShellFolder call() {
if (!isLink()) {
return null;
}
ShellFolder location = null;
long linkLocationPIDL = getLinkLocation(getParentIShellFolder(),
getRelativePIDL(), resolve);
if (linkLocationPIDL != 0) {
try {
location =
Win32ShellFolderManager2.createShellFolderFromRelativePIDL(getDesktop(),
linkLocationPIDL);
} catch (InterruptedException e) {
// Return null
} catch (InternalError e) {
// Could be a link to a non-bindable object, such as a network connection
// TODO: getIShellFolder() should throw FileNotFoundException instead
}
}
return location;
}
});
}
// Parse a display name into a PIDL relative to the current IShellFolder.
long parseDisplayName(final String name) throws IOException, InterruptedException {
return invoke(new Callable<Long>() {
public Long call() throws IOException {
return parseDisplayName0(getIShellFolder(), name);
}
}, IOException.class);
}
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native long parseDisplayName0(long pIShellFolder, String name) throws IOException;
// Return the display name of a shell folder
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native String getDisplayNameOf(long parentIShellFolder,
long relativePIDL,
int attrs);
/**
* @return The name used to display this shell folder
*/
public String getDisplayName() {
if (displayName == null) {
displayName =
invoke(new Callable<String>() {
public String call() {
return getDisplayNameOf(getParentIShellFolder(),
getRelativePIDL(), SHGDN_NORMAL);
}
});
}
return displayName;
}
// Return the folder type of a shell folder
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native String getFolderType(long pIDL);
/**
* @return The type of shell folder as a string
*/
public String getFolderType() {
if (folderType == null) {
final long absolutePIDL = getAbsolutePIDL();
folderType =
invoke(new Callable<String>() {
public String call() {
return getFolderType(absolutePIDL);
}
});
}
return folderType;
}
// Return the executable type of a file system shell folder
private native String getExecutableType(String path);
/**
* @return The executable type as a string
*/
public String getExecutableType() {
if (!isFileSystem()) {
return null;
}
return getExecutableType(getAbsolutePath());
}
// Icons
private static Map smallSystemImages = new HashMap();
private static Map largeSystemImages = new HashMap();
private static Map smallLinkedSystemImages = new HashMap();
private static Map largeLinkedSystemImages = new HashMap();
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native long getIShellIcon(long pIShellFolder);
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native int getIconIndex(long parentIShellIcon, long relativePIDL);
// Return the icon of a file system shell folder in the form of an HICON
private static native long getIcon(String absolutePath, boolean getLargeIcon);
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native long extractIcon(long parentIShellFolder, long relativePIDL,
boolean getLargeIcon);
// Returns an icon from the Windows system icon list in the form of an HICON
private static native long getSystemIcon(int iconID);
private static native long getIconResource(String libName, int iconID,
int cxDesired, int cyDesired,
boolean useVGAColors);
// Note: useVGAColors is ignored on XP and later
// Return the bits from an HICON. This has a side effect of setting
// the imageHash variable for efficient caching / comparing.
private static native int[] getIconBits(long hIcon, int iconSize);
// Dispose the HICON
private static native void disposeIcon(long hIcon);
static native int[] getStandardViewButton0(int iconIndex);
// Should be called from the COM thread
private long getIShellIcon() {
if (pIShellIcon == -1L) {
pIShellIcon = getIShellIcon(getIShellFolder());
}
return pIShellIcon;
}
private static Image makeIcon(long hIcon, boolean getLargeIcon) {
if (hIcon != 0L && hIcon != -1L) {
// Get the bits. This has the side effect of setting the imageHash value for this object.
int size = getLargeIcon ? 32 : 16;
int[] iconBits = getIconBits(hIcon, size);
if (iconBits != null) {
BufferedImage img = new BufferedImage(size, size, BufferedImage.TYPE_INT_ARGB);
img.setRGB(0, 0, size, size, iconBits, 0, size);
return img;
}
}
return null;
}
/**
* @return The icon image used to display this shell folder
*/
public Image getIcon(final boolean getLargeIcon) {
Image icon = getLargeIcon ? largeIcon : smallIcon;
if (icon == null) {
icon =
invoke(new Callable<Image>() {
public Image call() {
Image newIcon = null;
if (isFileSystem()) {
long parentIShellIcon = (parent != null)
? ((Win32ShellFolder2) parent).getIShellIcon()
: 0L;
long relativePIDL = getRelativePIDL();
// These are cached per type (using the index in the system image list)
int index = getIconIndex(parentIShellIcon, relativePIDL);
if (index > 0) {
Map imageCache;
if (isLink()) {
imageCache = getLargeIcon ? largeLinkedSystemImages : smallLinkedSystemImages;
} else {
imageCache = getLargeIcon ? largeSystemImages : smallSystemImages;
}
newIcon = (Image) imageCache.get(Integer.valueOf(index));
if (newIcon == null) {
long hIcon = getIcon(getAbsolutePath(), getLargeIcon);
newIcon = makeIcon(hIcon, getLargeIcon);
disposeIcon(hIcon);
if (newIcon != null) {
imageCache.put(Integer.valueOf(index), newIcon);
}
}
}
}
if (newIcon == null) {
// These are only cached per object
long hIcon = extractIcon(getParentIShellFolder(),
getRelativePIDL(), getLargeIcon);
newIcon = makeIcon(hIcon, getLargeIcon);
disposeIcon(hIcon);
}
if (newIcon == null) {
newIcon = Win32ShellFolder2.super.getIcon(getLargeIcon);
}
return newIcon;
}
});
if (getLargeIcon) {
largeIcon = icon;
} else {
smallIcon = icon;
}
}
return icon;
}
/**
* Gets an icon from the Windows system icon list as an <code>Image</code>
*/
static Image getSystemIcon(SystemIcon iconType) {
long hIcon = getSystemIcon(iconType.getIconID());
Image icon = makeIcon(hIcon, true);
disposeIcon(hIcon);
return icon;
}
/**
* Gets an icon from the Windows system icon list as an <code>Image</code>
*/
static Image getShell32Icon(int iconID, boolean getLargeIcon) {
boolean useVGAColors = true; // Will be ignored on XP and later
int size = getLargeIcon ? 32 : 16;
Toolkit toolkit = Toolkit.getDefaultToolkit();
String shellIconBPP = (String)toolkit.getDesktopProperty("win.icon.shellIconBPP");
if (shellIconBPP != null) {
useVGAColors = shellIconBPP.equals("4");
}
long hIcon = getIconResource("shell32.dll", iconID, size, size, useVGAColors);
if (hIcon != 0) {
Image icon = makeIcon(hIcon, getLargeIcon);
disposeIcon(hIcon);
return icon;
}
return null;
}
/**
* Returns the canonical form of this abstract pathname. Equivalent to
* <code>new Win32ShellFolder2(getParentFile(), this.{@link java.io.File#getCanonicalPath}())</code>.
*
* @see java.io.File#getCanonicalFile
*/
public File getCanonicalFile() throws IOException {
return this;
}
/*
* Indicates whether this is a special folder (includes My Documents)
*/
public boolean isSpecial() {
return isPersonal || !isFileSystem() || (this == getDesktop());
}
/**
* Compares this object with the specified object for order.
*
* @see sun.awt.shell.ShellFolder#compareTo(File)
*/
public int compareTo(File file2) {
if (!(file2 instanceof Win32ShellFolder2)) {
if (isFileSystem() && !isSpecial()) {
return super.compareTo(file2);
} else {
return -1; // Non-file shellfolders sort before files
}
}
return Win32ShellFolderManager2.compareShellFolders(this, (Win32ShellFolder2) file2);
}
// native constants from commctrl.h
private static final int LVCFMT_LEFT = 0;
private static final int LVCFMT_RIGHT = 1;
private static final int LVCFMT_CENTER = 2;
public ShellFolderColumnInfo[] getFolderColumns() {
return invoke(new Callable<ShellFolderColumnInfo[]>() {
public ShellFolderColumnInfo[] call() {
ShellFolderColumnInfo[] columns = doGetColumnInfo(getIShellFolder());
if (columns != null) {
List<ShellFolderColumnInfo> notNullColumns =
new ArrayList<ShellFolderColumnInfo>();
for (int i = 0; i < columns.length; i++) {
ShellFolderColumnInfo column = columns[i];
if (column != null) {
column.setAlignment(column.getAlignment() == LVCFMT_RIGHT
? SwingConstants.RIGHT
: column.getAlignment() == LVCFMT_CENTER
? SwingConstants.CENTER
: SwingConstants.LEADING);
column.setComparator(new ColumnComparator(getIShellFolder(), i));
notNullColumns.add(column);
}
}
columns = new ShellFolderColumnInfo[notNullColumns.size()];
notNullColumns.toArray(columns);
}
return columns;
}
});
}
public Object getFolderColumnValue(final int column) {
return invoke(new Callable<Object>() {
public Object call() {
return doGetColumnValue(getParentIShellFolder(), getRelativePIDL(), column);
}
});
}
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private native ShellFolderColumnInfo[] doGetColumnInfo(long iShellFolder2);
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private native Object doGetColumnValue(long parentIShellFolder2, long childPIDL, int columnIdx);
// NOTE: this method uses COM and must be called on the 'COM thread'. See ComInvoker for the details
private static native int compareIDsByColumn(long pParentIShellFolder, long pidl1, long pidl2, int columnIdx);
public void sortChildren(final List<? extends File> files) {
// To avoid loads of synchronizations with Invoker and improve performance we
// synchronize the whole code of the sort method once
invoke(new Callable<Void>() {
public Void call() {
Collections.sort(files, new ColumnComparator(getIShellFolder(), 0));
return null;
}
});
}
private static class ColumnComparator implements Comparator<File> {
private final long parentIShellFolder;
private final int columnIdx;
public ColumnComparator(long parentIShellFolder, int columnIdx) {
this.parentIShellFolder = parentIShellFolder;
this.columnIdx = columnIdx;
}
// compares 2 objects within this folder by the specified column
public int compare(final File o, final File o1) {
Integer result = invoke(new Callable<Integer>() {
public Integer call() {
if (o instanceof Win32ShellFolder2
&& o1 instanceof Win32ShellFolder2) {
// delegates comparison to native method
return compareIDsByColumn(parentIShellFolder,
((Win32ShellFolder2) o).getRelativePIDL(),
((Win32ShellFolder2) o1).getRelativePIDL(),
columnIdx);
}
return 0;
}
});
return result == null ? 0 : result;
}
}
}
| |
/*
* Copyright 2006-2012 Amazon Technologies, Inc. or its affiliates.
* Amazon, Amazon.com and Carbonado are trademarks or registered trademarks
* of Amazon Technologies, Inc. or its affiliates. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazon.carbonado.repo.replicated;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.io.ObjectStreamClass;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import com.amazon.carbonado.CorruptEncodingException;
import com.amazon.carbonado.Cursor;
import com.amazon.carbonado.OptimisticLockException;
import com.amazon.carbonado.PersistException;
import com.amazon.carbonado.Repository;
import com.amazon.carbonado.RepositoryBuilder;
import com.amazon.carbonado.Storable;
import com.amazon.carbonado.Storage;
import com.amazon.carbonado.Trigger;
import com.amazon.carbonado.UniqueConstraintException;
import com.amazon.carbonado.capability.ResyncCapability;
import com.amazon.carbonado.layout.StoredLayout;
import com.amazon.carbonado.layout.StoredLayoutProperty;
import com.amazon.carbonado.repo.replicated.ReplicatedRepository;
import com.amazon.carbonado.repo.replicated.ReplicatedRepositoryBuilder;
import com.amazon.carbonado.repo.sleepycat.BDBRepositoryBuilder;
import com.amazon.carbonado.TestUtilities;
import com.amazon.carbonado.stored.StorableTestBasic;
import com.amazon.carbonado.stored.StorableTestMinimal;
import com.amazon.carbonado.stored.StorableVersioned;
import com.amazon.carbonado.layout.TestLayout;
import org.cojen.classfile.TypeDesc;
/**
*
*
* @author Brian S O'Neill
*/
public class TestRepair extends TestCase {
private static final String REPLICA_NAME = "rr-replica";
private static final String MASTER_NAME = "rr-master";
public static void main(String[] args) {
junit.textui.TestRunner.run(suite());
}
public static TestSuite suite() {
return new TestSuite(TestRepair.class);
}
private Repository mReplica;
private Repository mMaster;
private Repository mReplicated;
public TestRepair(String name) {
super(name);
}
protected void setUp() throws Exception {
RepositoryBuilder replica = TestUtilities.newTempRepositoryBuilder(REPLICA_NAME);
RepositoryBuilder master = TestUtilities.newTempRepositoryBuilder(MASTER_NAME);
ReplicatedRepositoryBuilder builder = new ReplicatedRepositoryBuilder();
builder.setName("rr");
builder.setReplicaRepositoryBuilder(replica);
builder.setMasterRepositoryBuilder(master);
ReplicatedRepository rr = (ReplicatedRepository) builder.build();
mReplica = rr.getReplicaRepository();
mMaster = rr.getMasterRepository();
mReplicated = rr;
}
protected void tearDown() throws Exception {
if (mReplicated != null) {
mReplicated.close();
}
if (mReplica != null) {
mReplica.close();
}
if (mMaster != null) {
mMaster.close();
}
mReplica = null;
mMaster = null;
mReplicated = null;
}
/**
* @return repository locations
*/
private String[] reOpenPersistent(String[] locations) throws Exception {
tearDown();
String replicaLocation, masterLocation;
if (locations != null) {
replicaLocation = locations[0];
masterLocation = locations[1];
} else {
replicaLocation = TestUtilities.makeTestDirectoryString(REPLICA_NAME);
masterLocation = TestUtilities.makeTestDirectoryString(MASTER_NAME);
}
{
BDBRepositoryBuilder replica = new BDBRepositoryBuilder();
replica.setName(REPLICA_NAME);
replica.setTransactionNoSync(true);
replica.setEnvironmentHome(replicaLocation);
BDBRepositoryBuilder master = new BDBRepositoryBuilder();
master.setName(MASTER_NAME);
master.setTransactionNoSync(true);
master.setEnvironmentHome(masterLocation);
ReplicatedRepositoryBuilder builder = new ReplicatedRepositoryBuilder();
builder.setName("rr");
builder.setReplicaRepositoryBuilder(replica);
builder.setMasterRepositoryBuilder(master);
ReplicatedRepository rr = (ReplicatedRepository) builder.build();
mReplica = rr.getReplicaRepository();
mMaster = rr.getMasterRepository();
mReplicated = rr;
}
return new String[] {replicaLocation, masterLocation};
}
public void testMissingEntry() throws Exception {
// Insert an entry into master.
{
Storage<StorableTestBasic> storage = mMaster.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(5);
stb.setStringProp("hello");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
// Verify not available from rr.
{
Storage<StorableTestBasic> storage = mReplicated.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(5);
assertFalse(stb.tryLoad());
}
// Insert into rr.
{
Storage<StorableTestBasic> storage = mReplicated.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(5);
stb.setStringProp("world");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
try {
stb.insert();
fail();
} catch (UniqueConstraintException e) {
}
}
// Wait a moment for repair thread to finish.
Thread.sleep(1000);
// Verify available from rr.
{
Storage<StorableTestBasic> storage = mReplicated.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(5);
assertTrue(stb.tryLoad());
assertEquals("hello", stb.getStringProp());
}
}
public void testMissingVersionedEntry() throws Exception {
// Insert an entry into master.
{
Storage<StorableVersioned> storage = mMaster.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.setValue("hello");
sv.insert();
}
// Verify not available from rr.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
assertFalse(sv.tryLoad());
}
// Insert into rr.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.setValue("world");
try {
sv.insert();
fail();
} catch (UniqueConstraintException e) {
}
}
// Wait a moment for repair thread to finish.
Thread.sleep(1000);
// Verify available from rr.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
assertTrue(sv.tryLoad());
assertEquals("hello", sv.getValue());
assertEquals(1, sv.getVersion());
}
}
public void testStaleEntry() throws Exception {
// Insert an entry into rr.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.setValue("hello");
sv.insert();
}
// Update master entry.
{
Storage<StorableVersioned> storage = mMaster.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.load();
sv.setValue("world");
sv.update();
}
// Verify old version in replica.
{
Storage<StorableVersioned> storage = mReplica.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.load();
assertEquals(1, sv.getVersion());
assertEquals("hello", sv.getValue());
}
// Attempt to update rr entry.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.load();
assertEquals(1, sv.getVersion());
assertEquals("hello", sv.getValue());
sv.setValue("ciao");
try {
sv.update();
fail();
} catch (OptimisticLockException e) {
}
}
// Wait a moment for repair thread to finish.
Thread.sleep(1000);
// Verify new version in rr and update it.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.load();
assertEquals(2, sv.getVersion());
assertEquals("world", sv.getValue());
sv.setValue("ciao");
sv.update();
}
}
public void testStaleEntryAndBackoff() throws Exception {
// Insert an entry into rr.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.setValue("hello");
sv.insert();
}
// Update master entry.
{
Storage<StorableVersioned> storage = mMaster.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.load();
sv.setValue("world");
sv.update();
}
// Attempt to update rr entry.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
int failCount = 0;
for (int retryCount = 3;;) {
try {
sv.load();
sv.setValue("ciao");
sv.update();
break;
} catch (OptimisticLockException e) {
failCount++;
retryCount = e.backoff(e, retryCount, 1000);
}
}
assertTrue(failCount > 0);
}
// Verify new version in rr.
{
Storage<StorableVersioned> storage = mReplicated.storageFor(StorableVersioned.class);
StorableVersioned sv = storage.prepare();
sv.setID(5);
sv.load();
assertEquals(3, sv.getVersion());
assertEquals("ciao", sv.getValue());
}
}
public void testDeletedEntry() throws Exception {
// Insert some entries into rr.
for (int i=0; i<10; i++) {
Storage<StorableTestBasic> storage = mReplicated.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(i);
stb.setStringProp("hello");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
// Delete on record from from master.
{
Storage<StorableTestBasic> storage = mMaster.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(5);
stb.delete();
}
// Verify record is still available from rr.
{
Storage<StorableTestBasic> storage = mReplicated.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(5);
assertTrue(stb.tryLoad());
assertEquals("hello", stb.getStringProp());
}
ResyncCapability cap = mReplicated.getCapability(ResyncCapability.class);
cap.resync(StorableTestBasic.class, 1.0, "id=?", 5);
// Verify record is not available from rr anymore.
{
Storage<StorableTestBasic> storage = mReplicated.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(5);
assertFalse(stb.tryLoad());
}
}
public void testCorruptEntry() throws Exception {
testCorruptEntry(false, false);
}
public void testCorruptEntryPreventDelete() throws Exception {
testCorruptEntry(false, true);
}
public void testIndividualCorruptEntry() throws Exception {
testCorruptEntry(true, false);
}
private void testCorruptEntry(boolean individualRepair, boolean preventDelete)
throws Exception
{
// Close and open repository again, this time on disk. We need to close
// and re-open the repository as part of the test.
String[] locations = reOpenPersistent(null);
// Insert different versions of the same record...
final String recordName = "test.TheTestRecord";
final Class<? extends StorableTestMinimal> type0 =
TestLayout.defineStorable(recordName, 1, TypeDesc.INT);
Class<? extends StorableTestMinimal> type1 =
TestLayout.defineStorable(recordName, 2, TypeDesc.INT);
Storage<? extends StorableTestMinimal> storage0 = mReplicated.storageFor(type0);
Storage<? extends StorableTestMinimal> storage1 = mReplicated.storageFor(type1);
final int seed = 5469232;
final int count = 20;
{
Random rnd = new Random(seed);
Method prop_0_of_0 = type0.getMethod("setProp0", int.class);
Method prop_0_of_1 = type1.getMethod("setProp0", int.class);
Method prop_1_of_1 = type1.getMethod("setProp1", int.class);
boolean anyType0 = false;
boolean anyType1 = false;
for (int i=0; i<count; i++) {
StorableTestMinimal stm;
if (rnd.nextBoolean()) {
stm = storage0.prepare();
prop_0_of_0.invoke(stm, i + 1000);
anyType0 = true;
} else {
stm = storage1.prepare();
prop_0_of_1.invoke(stm, i + 2000);
prop_1_of_1.invoke(stm, i + 4000);
anyType1 = true;
}
stm.setId(i);
stm.insert();
}
// Assert mix of types.
assertTrue(anyType0);
assertTrue(anyType1);
}
// Verify records can be read via storage0, which will ignore the new property.
{
Cursor<? extends StorableTestMinimal> cursor = storage0.query().fetch();
while (cursor.hasNext()) {
StorableTestMinimal stm = cursor.next();
//System.out.println(stm);
}
}
// Verify records can be read via storage1, which may have zero for the new property.
{
Cursor<? extends StorableTestMinimal> cursor = storage1.query().fetch();
while (cursor.hasNext()) {
StorableTestMinimal stm = cursor.next();
//System.out.println(stm);
}
}
// Close and open only replica repository and create corruption by
// deleting all information regarding generation 1 in the replica.
locations = reOpenPersistent(locations);
storage0 = mReplicated.storageFor(type0);
// Replace all the masters with only type 0 records.
{
mMaster.storageFor(type0).query().deleteAll();
Method prop_0_of_0 = type0.getMethod("setProp0", int.class);
for (int i=0; i<count; i++) {
StorableTestMinimal stm = mMaster.storageFor(type0).prepare();
prop_0_of_0.invoke(stm, i + 1000);
stm.setId(i);
stm.insert();
}
}
// Delete all knowledge of type 1.
StoredLayout layout = mReplicated.storageFor(StoredLayout.class).prepare();
layout.setStorableTypeName(recordName);
layout.setGeneration(1);
layout.load();
layout.delete();
mReplicated.storageFor(StoredLayoutProperty.class)
.query("layoutID = ?").with(layout.getLayoutID()).deleteAll();
// Close and open to rebuild replicated repository.
locations = reOpenPersistent(locations);
storage0 = mReplicated.storageFor(type0);
// Verify corruption. (Replica is unable to figure out what layout generation 1 is)
try {
Cursor<? extends StorableTestMinimal> cursor = storage0.query().fetch();
while (cursor.hasNext()) {
StorableTestMinimal stm = cursor.next();
}
fail();
} catch (CorruptEncodingException e) {
// Verify serialization of primary key storable.
assertNotNull(e.getStorableWithPrimaryKey());
ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(bout);
out.writeObject(e);
out.close();
byte[] bytes = bout.toByteArray();
ByteArrayInputStream bin = new ByteArrayInputStream(bytes);
ObjectInputStream in = new ObjectInputStream(bin) {
// Special handling to load generated class.
@Override
protected Class<?> resolveClass(ObjectStreamClass desc)
throws IOException, ClassNotFoundException
{
if (desc.getName().equals(recordName)) {
return type0;
}
return super.resolveClass(desc);
}
};
CorruptEncodingException e2 = (CorruptEncodingException) in.readObject();
Storable s1 = e.getStorableWithPrimaryKey();
Storable s2 = e2.getStorableWithPrimaryKey();
assertFalse(s1 == s2);
assertTrue(s1.equalPrimaryKeys(s2));
assertTrue(s2.equalPrimaryKeys(s1));
}
// Resync to repair.
class Prevent extends Trigger {
volatile boolean didRun;
@Override
public Object beforeDelete(Object s) throws PersistException {
didRun = true;
throw new PersistException("Cannot delete me!");
}
}
Prevent prevent = null;
if (preventDelete) {
// This is a partially vestigial test. It used to be the case that
// triggers would run during a resync or replication repair. This
// is no longer the case. Instead, use this as an opportunity to
// ensure the trigger does not run.
prevent = new Prevent();
storage0.addTrigger(prevent);
}
ResyncCapability cap = mReplicated.getCapability(ResyncCapability.class);
if (individualRepair) {
for (int i=0; i<count; i++) {
cap.resync(type0, 1.0, "id=?", i);
}
} else {
cap.resync(type0, 1.0, null);
}
if (preventDelete) {
// Again, this is a partially vestigial test. The trigger should
// not have run at all during the resync.
assertFalse(prevent.didRun);
storage0.removeTrigger(prevent);
cap.resync(type0, 1.0, null);
}
{
// Verify records can be read out now.
Cursor<? extends StorableTestMinimal> cursor = storage0.query().fetch();
int actual = 0;
while (cursor.hasNext()) {
StorableTestMinimal stm = cursor.next();
//System.out.println(stm);
actual++;
}
assertEquals(count, actual);
}
storage1 = mReplicated.storageFor(type1);
{
Cursor<? extends StorableTestMinimal> cursor = storage1.query().fetch();
int actual = 0;
while (cursor.hasNext()) {
StorableTestMinimal stm = cursor.next();
//System.out.println(stm);
actual++;
}
assertEquals(count, actual);
}
}
public void testResyncListener() throws Exception {
prepareOutOfSyncEntries();
final List<Storable> inserted = new ArrayList<Storable>();
final List<Storable[]> updated = new ArrayList<Storable[]>();
final List<Storable> deleted = new ArrayList<Storable>();
ResyncCapability.Listener<Storable> listener = new ResyncCapability.Listener<Storable>() {
@Override
public void afterInsert(Storable newStorable, Object state) {
inserted.add(newStorable);
}
@Override
public Object beforeUpdate(Storable oldStorable, Storable newStorable) {
updated.add(new Storable[] {oldStorable, newStorable});
return null;
}
@Override
public void afterDelete(Storable oldStorable, Object state) {
deleted.add(oldStorable);
}
};
ResyncCapability cap = mReplicated.getCapability(ResyncCapability.class);
cap.resync(StorableTestBasic.class, listener, 1.0, null);
assertEquals(1, inserted.size());
assertEquals(1, ((StorableTestBasic) inserted.get(0)).getId());
assertEquals("hello", ((StorableTestBasic) inserted.get(0)).getStringProp());
assertEquals(1, updated.size());
assertEquals(3, ((StorableTestBasic) (updated.get(0)[0])).getId());
assertEquals("bar", ((StorableTestBasic) (updated.get(0)[0])).getStringProp());
assertEquals(3, ((StorableTestBasic) (updated.get(0)[1])).getId());
assertEquals("foo", ((StorableTestBasic) (updated.get(0)[1])).getStringProp());
assertEquals(1, deleted.size());
assertEquals(2, ((StorableTestBasic) deleted.get(0)).getId());
assertEquals("world", ((StorableTestBasic) deleted.get(0)).getStringProp());
}
public void testResyncListenerUnrepair() throws Exception {
prepareOutOfSyncEntries();
ResyncCapability.Listener<StorableTestBasic> listener =
new ResyncCapability.Listener<StorableTestBasic>()
{
@Override
public Object beforeInsert(StorableTestBasic storable) {
// Force to be partially out of sync.
storable.setDoubleProp(123.456);
return null;
}
@Override
public Object beforeUpdate(StorableTestBasic storable) {
// Force to be partially out of sync.
storable.setDoubleProp(654.321);
return null;
}
};
ResyncCapability cap = mReplicated.getCapability(ResyncCapability.class);
cap.resync(StorableTestBasic.class, listener, 1.0, null);
boolean insertUnrepair = false;
boolean updateUnrepair = false;
Storage<StorableTestBasic> storage = mReplicated.storageFor(StorableTestBasic.class);
Cursor<StorableTestBasic> cursor = storage.query().fetch();
while (cursor.hasNext()) {
StorableTestBasic storable = cursor.next();
insertUnrepair |= storable.getDoubleProp() == 123.456;
updateUnrepair |= storable.getDoubleProp() == 654.321;
}
assertTrue(insertUnrepair);
assertTrue(updateUnrepair);
}
private void prepareOutOfSyncEntries() throws Exception {
// Insert an entry into master.
{
Storage<StorableTestBasic> storage = mMaster.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(1);
stb.setStringProp("hello");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
// Insert an entry into replica.
{
Storage<StorableTestBasic> storage = mReplica.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(2);
stb.setStringProp("world");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
// Insert conflicting entries into master and replica.
{
Storage<StorableTestBasic> storage = mMaster.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(3);
stb.setStringProp("foo");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
{
Storage<StorableTestBasic> storage = mReplica.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(3);
stb.setStringProp("bar");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
// Insert matching entries into master and replica.
{
Storage<StorableTestBasic> storage = mMaster.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(4);
stb.setStringProp("good");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
{
Storage<StorableTestBasic> storage = mReplica.storageFor(StorableTestBasic.class);
StorableTestBasic stb = storage.prepare();
stb.setId(4);
stb.setStringProp("good");
stb.setIntProp(1);
stb.setLongProp(1L);
stb.setDoubleProp(1.0);
stb.insert();
}
}
}
| |
/*
* Copyright 2015-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.pcepio.protocol.ver1;
import java.util.LinkedList;
import java.util.ListIterator;
import org.jboss.netty.buffer.ChannelBuffer;
import org.onosproject.pcepio.exceptions.PcepParseException;
import org.onosproject.pcepio.protocol.PcepCloseMsg;
import org.onosproject.pcepio.protocol.PcepMessageReader;
import org.onosproject.pcepio.protocol.PcepMessageWriter;
import org.onosproject.pcepio.protocol.PcepType;
import org.onosproject.pcepio.protocol.PcepVersion;
import org.onosproject.pcepio.types.PcepObjectHeader;
import org.onosproject.pcepio.types.PcepValueType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.MoreObjects;
/**
* Provides PCEP Close Message.
*/
class PcepCloseMsgVer1 implements PcepCloseMsg {
/*
* RFC : 5440 , section : 6.8
* <Close Message> ::= <Common Header> <CLOSE>
*
0 1 2 3
0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Ver | Flags | Message-Type | Message-Length |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Object-Class | OT |Res|P|I| Object Length (bytes) |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| Reserved | Flags | Reason |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
| |
// Optional TLVs //
| |
+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
*/
protected static final Logger log = LoggerFactory.getLogger(PcepCloseMsgVer1.class);
// Pcep version: 1
public static final byte PACKET_VERSION = 1;
public static final int PACKET_MINIMUM_LENGTH = 12;
public static final PcepType MSG_TYPE = PcepType.CLOSE;
public static final byte CLOSE_OBJ_TYPE = 1;
public static final byte CLOSE_OBJ_CLASS = 15;
public static final byte CLOSE_OBJECT_VERSION = 1;
public static final byte DEFAULT_REASON = 1; // Default reason to close
public static final short CLOSE_OBJ_MINIMUM_LENGTH = 8;
public static final int SHIFT_FLAG = 5;
static final PcepObjectHeader DEFAULT_CLOSE_HEADER = new PcepObjectHeader(CLOSE_OBJ_CLASS, CLOSE_OBJ_TYPE,
PcepObjectHeader.REQ_OBJ_OPTIONAL_PROCESS, PcepObjectHeader.RSP_OBJ_PROCESSED, CLOSE_OBJ_MINIMUM_LENGTH);
private final PcepObjectHeader closeObjHeader;
private byte yReason;
private LinkedList<PcepValueType> llOptionalTlv;
public static final PcepCloseMsgVer1.Reader READER = new Reader();
/**
* Reader class for reading close message for channel buffer.
*/
static class Reader implements PcepMessageReader<PcepCloseMsg> {
PcepObjectHeader closeObjHeader;
byte yReason;
// Optional TLV
private LinkedList<PcepValueType> llOptionalTlv;
@Override
public PcepCloseMsg readFrom(ChannelBuffer cb) throws PcepParseException {
if (cb.readableBytes() < PACKET_MINIMUM_LENGTH) {
throw new PcepParseException("Packet size is less than the minimum length.");
}
// fixed value property version == 1
byte version = cb.readByte();
version = (byte) (version >> SHIFT_FLAG);
if (version != PACKET_VERSION) {
throw new PcepParseException("Wrong version. Expected=PcepVersion.PCEP_1(1), got=" + version);
}
// fixed value property type == 7
byte type = cb.readByte();
if (type != MSG_TYPE.getType()) {
throw new PcepParseException("Wrong type. Expected=PcepType.CLOSE(7), got=" + type);
}
short length = cb.readShort();
if (length < PACKET_MINIMUM_LENGTH) {
throw new PcepParseException("Wrong length. Expected to be >= " + PACKET_MINIMUM_LENGTH + ", was: "
+ length);
}
closeObjHeader = PcepObjectHeader.read(cb);
// Reserved
cb.readShort();
// Flags
cb.readByte();
// Reason
yReason = cb.readByte();
// parse optional TLV
llOptionalTlv = parseOptionalTlv(cb);
return new PcepCloseMsgVer1(closeObjHeader, yReason, llOptionalTlv);
}
}
/**
* Parse the list of Optional Tlvs.
*
* @param cb channel buffer
* @return list of Optional Tlvs
* @throws PcepParseException when fails to parse optional tlvs
*/
public static LinkedList<PcepValueType> parseOptionalTlv(ChannelBuffer cb) throws PcepParseException {
LinkedList<PcepValueType> llOptionalTlv = new LinkedList<>();
/*
rfc 5440:
Optional TLVs may be included within the CLOSE object body. The
specification of such TLVs is outside the scope of this document.
*/
return llOptionalTlv;
}
/**
* constructor to initialize PCEP close Message with all the parameters.
*
* @param closeObjHeader object header for close message
* @param yReason reason for closing the channel
* @param llOptionalTlv list of optional tlvs
*/
PcepCloseMsgVer1(PcepObjectHeader closeObjHeader, byte yReason, LinkedList<PcepValueType> llOptionalTlv) {
this.closeObjHeader = closeObjHeader;
this.yReason = yReason;
this.llOptionalTlv = llOptionalTlv;
}
/**
* Builder class for PCEP close message.
*/
static class Builder implements PcepCloseMsg.Builder {
// PCEP Close message fields
private boolean bIsHeaderSet = false;
private PcepObjectHeader closeObjHeader;
private boolean bIsReasonSet = false;
private byte yReason;
private LinkedList<PcepValueType> llOptionalTlv = new LinkedList<>();
private boolean bIsPFlagSet = false;
private boolean bPFlag;
private boolean bIsIFlagSet = false;
private boolean bIFlag;
@Override
public PcepVersion getVersion() {
return PcepVersion.PCEP_1;
}
@Override
public PcepType getType() {
return PcepType.CLOSE;
}
@Override
public PcepCloseMsg build() {
PcepObjectHeader closeObjHeader = this.bIsHeaderSet ? this.closeObjHeader : DEFAULT_CLOSE_HEADER;
byte yReason = this.bIsReasonSet ? this.yReason : DEFAULT_REASON;
if (bIsPFlagSet) {
closeObjHeader.setPFlag(bPFlag);
}
if (bIsIFlagSet) {
closeObjHeader.setIFlag(bIFlag);
}
return new PcepCloseMsgVer1(closeObjHeader, yReason, this.llOptionalTlv);
}
@Override
public PcepObjectHeader getCloseObjHeader() {
return this.closeObjHeader;
}
@Override
public Builder setCloseObjHeader(PcepObjectHeader obj) {
this.closeObjHeader = obj;
this.bIsHeaderSet = true;
return this;
}
@Override
public byte getReason() {
return this.yReason;
}
@Override
public Builder setReason(byte value) {
this.yReason = value;
this.bIsReasonSet = true;
return this;
}
@Override
public Builder setOptionalTlv(LinkedList<PcepValueType> llOptionalTlv) {
this.llOptionalTlv = llOptionalTlv;
return this;
}
@Override
public LinkedList<PcepValueType> getOptionalTlv() {
return this.llOptionalTlv;
}
@Override
public Builder setPFlag(boolean value) {
this.bPFlag = value;
this.bIsPFlagSet = true;
return this;
}
@Override
public Builder setIFlag(boolean value) {
this.bIFlag = value;
this.bIsIFlagSet = true;
return this;
}
}
@Override
public void writeTo(ChannelBuffer cb) throws PcepParseException {
WRITER.write(cb, this);
}
static final Writer WRITER = new Writer();
/**
* Writer class for writing close message to channel buffer.
*/
static class Writer implements PcepMessageWriter<PcepCloseMsgVer1> {
@Override
public void write(ChannelBuffer cb, PcepCloseMsgVer1 message) throws PcepParseException {
int startIndex = cb.writerIndex();
// first 3 bits set to version
cb.writeByte((byte) (PACKET_VERSION << SHIFT_FLAG));
// message type
cb.writeByte(MSG_TYPE.getType());
// length is length of variable message, will be updated at the end
// Store the position of message
// length in buffer
int msgLenIndex = cb.writerIndex();
cb.writeShort((short) 0);
int objStartIndex = cb.writerIndex();
int objLenIndex = message.closeObjHeader.write(cb);
if (objLenIndex <= 0) {
throw new PcepParseException("Failed to write Close object header.");
}
// first 3 bits set to version
cb.writeShort(0); // Reserved
cb.writeByte(0); // Flags
cb.writeByte(message.yReason);
// Pack optional TLV
packOptionalTlv(cb, message);
int length = cb.writerIndex() - objStartIndex;
cb.setShort(objLenIndex, (short) length);
// will be helpful during print().
message.closeObjHeader.setObjLen((short) length);
// As per RFC the length of object should be
// multiples of 4
int pad = length % 4;
if (pad != 0) {
pad = 4 - pad;
for (int i = 0; i < pad; i++) {
cb.writeByte((byte) 0);
}
length = length + pad;
}
// update message length field
length = cb.writerIndex() - startIndex;
cb.setShort(msgLenIndex, (short) length);
}
public void packOptionalTlv(ChannelBuffer cb, PcepCloseMsgVer1 message) {
LinkedList<PcepValueType> llOptionalTlv = message.llOptionalTlv;
ListIterator<PcepValueType> listIterator = llOptionalTlv.listIterator();
while (listIterator.hasNext()) {
listIterator.next().write(cb);
}
}
}
@Override
public PcepVersion getVersion() {
return PcepVersion.PCEP_1;
}
@Override
public PcepType getType() {
return MSG_TYPE;
}
@Override
public byte getReason() {
return this.yReason;
}
@Override
public void setReason(byte value) {
this.yReason = value;
}
@Override
public LinkedList<PcepValueType> getOptionalTlv() {
return this.llOptionalTlv;
}
@Override
public void setOptionalTlv(LinkedList<PcepValueType> llOptionalTlv) {
this.llOptionalTlv = llOptionalTlv;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass())
.add("closeObjectHeader", closeObjHeader).add("Reason", yReason)
.add("OptionalTlvlist", llOptionalTlv).toString();
}
}
| |
package org.apereo.cas.oidc.profile;
import org.apereo.cas.authentication.principal.Principal;
import org.apereo.cas.authentication.principal.PrincipalFactory;
import org.apereo.cas.authentication.principal.Service;
import org.apereo.cas.oidc.OidcConstants;
import org.apereo.cas.oidc.claims.BaseOidcScopeAttributeReleasePolicy;
import org.apereo.cas.oidc.claims.OidcAddressScopeAttributeReleasePolicy;
import org.apereo.cas.oidc.claims.OidcCustomScopeAttributeReleasePolicy;
import org.apereo.cas.oidc.claims.OidcEmailScopeAttributeReleasePolicy;
import org.apereo.cas.oidc.claims.OidcPhoneScopeAttributeReleasePolicy;
import org.apereo.cas.oidc.claims.OidcProfileScopeAttributeReleasePolicy;
import org.apereo.cas.oidc.claims.mapping.OidcAttributeToScopeClaimMapper;
import org.apereo.cas.services.ChainingAttributeReleasePolicy;
import org.apereo.cas.services.DenyAllAttributeReleasePolicy;
import org.apereo.cas.services.OidcRegisteredService;
import org.apereo.cas.services.RegisteredService;
import org.apereo.cas.services.ServicesManager;
import org.apereo.cas.support.oauth.profile.DefaultOAuth20ProfileScopeToAttributesFilter;
import org.apereo.cas.support.oauth.util.OAuth20Utils;
import org.jooq.lambda.Unchecked;
import org.pac4j.core.context.J2EContext;
import org.reflections.Reflections;
import org.reflections.scanners.SubTypesScanner;
import org.reflections.util.ClasspathHelper;
import org.reflections.util.ConfigurationBuilder;
import org.reflections.util.FilterBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* This is {@link OidcProfileScopeToAttributesFilter}.
*
* @author Misagh Moayyed
* @since 5.1.0
*/
public class OidcProfileScopeToAttributesFilter extends DefaultOAuth20ProfileScopeToAttributesFilter {
private static final Logger LOGGER = LoggerFactory.getLogger(OidcProfileScopeToAttributesFilter.class);
private final Map<String, BaseOidcScopeAttributeReleasePolicy> filters;
private final Collection<BaseOidcScopeAttributeReleasePolicy> userScopes;
private final OidcAttributeToScopeClaimMapper attributeToScopeClaimMapper;
private final PrincipalFactory principalFactory;
private final ServicesManager servicesManager;
public OidcProfileScopeToAttributesFilter(final PrincipalFactory principalFactory,
final ServicesManager servicesManager,
final Collection<BaseOidcScopeAttributeReleasePolicy> userScopes,
final OidcAttributeToScopeClaimMapper attributeToScopeClaimMapper) {
this.attributeToScopeClaimMapper = attributeToScopeClaimMapper;
this.filters = new HashMap<>();
final String packageName = BaseOidcScopeAttributeReleasePolicy.class.getPackage().getName();
final Reflections reflections =
new Reflections(new ConfigurationBuilder()
.filterInputsBy(new FilterBuilder().includePackage(packageName))
.setUrls(ClasspathHelper.forPackage(packageName))
.setScanners(new SubTypesScanner(true)));
final Set<Class<? extends BaseOidcScopeAttributeReleasePolicy>> subTypes =
reflections.getSubTypesOf(BaseOidcScopeAttributeReleasePolicy.class);
subTypes.forEach(Unchecked.consumer(t -> {
final BaseOidcScopeAttributeReleasePolicy ex = t.newInstance();
filters.put(ex.getScopeName(), ex);
}));
userScopes.forEach(t -> filters.put(t.getScopeName(), t));
this.principalFactory = principalFactory;
this.servicesManager = servicesManager;
this.userScopes = userScopes;
}
@Override
public Principal filter(final Service service, final Principal profile,
final RegisteredService registeredService, final J2EContext context) {
final Principal principal = super.filter(service, profile, registeredService, context);
if (registeredService instanceof OidcRegisteredService) {
final OidcRegisteredService oidcService = (OidcRegisteredService) registeredService;
final Collection<String> scopes = new ArrayList<>(OAuth20Utils.getRequestedScopes(context));
scopes.addAll(oidcService.getScopes());
if (!scopes.contains(OidcConstants.OPENID)) {
LOGGER.debug("Request does not indicate a scope [{}] that can identify OpenID Connect", scopes);
return principal;
}
final Map<String, Object> attributes = new HashMap<>();
filterAttributesByScope(scopes, attributes, principal, service, oidcService);
return this.principalFactory.createPrincipal(profile.getId(), attributes);
}
return principal;
}
private void filterAttributesByScope(final Collection<String> stream,
final Map<String, Object> attributes,
final Principal principal,
final Service service,
final RegisteredService registeredService) {
stream.stream()
.distinct()
.filter(s -> this.filters.containsKey(s))
.forEach(s -> {
final BaseOidcScopeAttributeReleasePolicy policy = filters.get(s);
attributes.putAll(policy.getAttributes(principal, service, registeredService));
});
}
@Override
public void reconcile(final RegisteredService service) {
if (!(service instanceof OidcRegisteredService)) {
super.reconcile(service);
return;
}
LOGGER.debug("Reconciling scopes and claims for [{}]", service.getServiceId());
final List<String> otherScopes = new ArrayList<>();
final ChainingAttributeReleasePolicy policy = new ChainingAttributeReleasePolicy();
final OidcRegisteredService oidc = OidcRegisteredService.class.cast(service);
oidc.getScopes().forEach(s -> {
LOGGER.debug("Reviewing scope [{}] for [{}]", s, service.getServiceId());
switch (s.trim().toLowerCase()) {
case OidcConstants.EMAIL:
LOGGER.debug("Mapped [{}] to attribute release policy [{}]", s, OidcEmailScopeAttributeReleasePolicy.class.getSimpleName());
policy.getPolicies().add(new OidcEmailScopeAttributeReleasePolicy());
break;
case OidcConstants.ADDRESS:
LOGGER.debug("Mapped [{}] to attribute release policy [{}]", s,
OidcAddressScopeAttributeReleasePolicy.class.getSimpleName());
policy.getPolicies().add(new OidcAddressScopeAttributeReleasePolicy());
break;
case OidcConstants.PROFILE:
LOGGER.debug("Mapped [{}] to attribute release policy [{}]", s,
OidcProfileScopeAttributeReleasePolicy.class.getSimpleName());
policy.getPolicies().add(new OidcProfileScopeAttributeReleasePolicy());
break;
case OidcConstants.PHONE:
LOGGER.debug("Mapped [{}] to attribute release policy [{}]", s,
OidcProfileScopeAttributeReleasePolicy.class.getSimpleName());
policy.getPolicies().add(new OidcPhoneScopeAttributeReleasePolicy());
break;
case OidcConstants.OFFLINE_ACCESS:
LOGGER.debug("Given scope [{}], service [{}] is marked to generate refresh tokens", s, service.getId());
oidc.setGenerateRefreshToken(Boolean.TRUE);
break;
case OidcCustomScopeAttributeReleasePolicy.SCOPE_CUSTOM:
LOGGER.debug("Found custom scope [{}] for service [{}]", s, service.getId());
otherScopes.add(s.trim());
break;
default:
LOGGER.debug("[{}] appears to be a user-defined scope and does not match any of the predefined standard scopes. "
+ "Checking [{}] against user-defined scopes provided as [{}]", s, s, userScopes);
final BaseOidcScopeAttributeReleasePolicy userPolicy = userScopes.stream()
.filter(t -> t.getScopeName().equals(s.trim()))
.findFirst()
.orElse(null);
if (userPolicy != null) {
LOGGER.debug("Mapped user-defined scope [{}] to attribute release policy [{}]", s, userPolicy);
policy.getPolicies().add(userPolicy);
}
}
});
otherScopes.remove(OidcConstants.OPENID);
if (!otherScopes.isEmpty()) {
LOGGER.debug("Mapped scopes [{}] to attribute release policy [{}]", otherScopes,
OidcCustomScopeAttributeReleasePolicy.class.getSimpleName());
policy.getPolicies().add(new OidcCustomScopeAttributeReleasePolicy(otherScopes));
}
if (policy.getPolicies().isEmpty()) {
LOGGER.warn("No attribute release policy could be determined based on given scopes. "
+ "No claims/attributes will be released to [{}]", service.getId());
oidc.setAttributeReleasePolicy(new DenyAllAttributeReleasePolicy());
} else {
oidc.setAttributeReleasePolicy(policy);
}
LOGGER.debug("Scope/claim reconciliation for service [{}] resulted in the following attribute release policy [{}]",
service.getServiceId(), oidc.getAttributeReleasePolicy());
if (!oidc.equals(service)) {
LOGGER.debug("Saving scope/claim reconciliation results for service [{}] into registry", service.getServiceId());
this.servicesManager.save(oidc);
LOGGER.debug("Saved service [{}] into registry", service.getServiceId());
} else {
LOGGER.debug("No changes detected in service [{}] after scope/claim reconciliation", service.getId());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tvm.android.demo;
import android.Manifest;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.content.res.AssetManager;
import android.app.AlertDialog;
import android.app.ProgressDialog;
import android.content.DialogInterface;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Matrix;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.SystemClock;
import android.provider.MediaStore;
import android.support.v4.content.FileProvider;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.View;
import android.widget.ImageView;
import android.widget.TextView;
import android.widget.Toast;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.InputStream;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Vector;
import org.apache.tvm.Function;
import org.apache.tvm.Module;
import org.apache.tvm.NDArray;
import org.apache.tvm.TVMContext;
import org.apache.tvm.TVMValue;
import org.apache.tvm.TVMType;
public class MainActivity extends AppCompatActivity {
private static final String TAG = MainActivity.class.getSimpleName();
private static final int PERMISSIONS_REQUEST = 100;
private static final int PICTURE_FROM_GALLERY = 101;
private static final int PICTURE_FROM_CAMERA = 102;
private static final int IMAGE_PREVIEW_WIDTH = 960;
private static final int IMAGE_PREVIEW_HEIGHT = 720;
// TVM constants
private static final int OUTPUT_INDEX = 0;
private static final int IMG_CHANNEL = 3;
private static final String INPUT_NAME = "data";
// Configuration values for extraction model. Note that the graph, lib and params is not
// included with TVM and must be manually placed in the assets/ directory by the user.
// Graphs and models downloaded from https://github.com/pjreddie/darknet/blob/ may be
// converted e.g. via define_and_compile_model.py.
private static final boolean EXE_GPU = false;
private static final int MODEL_INPUT_SIZE = 224;
private static final String MODEL_CL_LIB_FILE = "file:///android_asset/deploy_lib_opencl.so";
private static final String MODEL_CPU_LIB_FILE = "file:///android_asset/deploy_lib_cpu.so";
private static final String MODEL_GRAPH_FILE = "file:///android_asset/deploy_graph.json";
private static final String MODEL_PARAM_FILE = "file:///android_asset/deploy_param.params";
private static final String MODEL_LABEL_FILE = "file:///android_asset/imagenet.shortnames.list";
private Uri mCameraImageUri;
private ImageView mImageView;
private TextView mResultView;
private AssetManager assetManager;
private Module graphRuntimeModule;
private Vector<String> labels = new Vector<String>();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Toolbar toolbar = findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
assetManager = getAssets();
mImageView = (ImageView) findViewById(R.id.imageView);
mResultView = (TextView) findViewById(R.id.resultTextView);
findViewById(R.id.btnPickImage).setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
showPictureDialog();
}
});
if (hasPermission()) {
// instantiate tvm runtime and setup environment on background after application begin
new LoadModleAsyncTask().execute();
} else {
requestPermission();
}
}
/*
Load precompiled model on TVM graph runtime and init the system.
*/
private class LoadModleAsyncTask extends AsyncTask<Void, Void, Integer> {
ProgressDialog dialog = new ProgressDialog(MainActivity.this);
@Override
protected Integer doInBackground(Void... args) {
// load synset name
String lableFilename = MODEL_LABEL_FILE.split("file:///android_asset/")[1];
Log.i(TAG, "Reading synset name from: " + lableFilename);
try {
String labelsContent = new String(getBytesFromFile(assetManager, lableFilename));
for (String line : labelsContent.split("\\r?\\n")) {
labels.add(line);
}
} catch (IOException e) {
Log.e(TAG, "Problem reading synset name file!" + e);
return -1;//failure
}
// load json graph
String modelGraph = null;
String graphFilename = MODEL_GRAPH_FILE.split("file:///android_asset/")[1];
Log.i(TAG, "Reading json graph from: " + graphFilename);
try {
modelGraph = new String(getBytesFromFile(assetManager, graphFilename));
} catch (IOException e) {
Log.e(TAG, "Problem reading json graph file!" + e);
return -1;//failure
}
// upload tvm compiled function on application cache folder
String libCacheFilePath = null;
String libFilename = EXE_GPU ? MODEL_CL_LIB_FILE.split("file:///android_asset/")[1] :
MODEL_CPU_LIB_FILE.split("file:///android_asset/")[1];
Log.i(TAG, "Uploading compiled function to cache folder");
try {
libCacheFilePath = getTempLibFilePath(libFilename);
byte[] modelLibByte = getBytesFromFile(assetManager, libFilename);
FileOutputStream fos = new FileOutputStream(libCacheFilePath);
fos.write(modelLibByte);
fos.close();
} catch (IOException e) {
Log.e(TAG, "Problem uploading compiled function!" + e);
return -1;//failure
}
// load parameters
byte[] modelParams = null;
String paramFilename = MODEL_PARAM_FILE.split("file:///android_asset/")[1];
try {
modelParams = getBytesFromFile(assetManager, paramFilename);
} catch (IOException e) {
Log.e(TAG, "Problem reading params file!" + e);
return -1;//failure
}
// create java tvm context
TVMContext tvmCtx = EXE_GPU ? TVMContext.opencl() : TVMContext.cpu();
// tvm module for compiled functions
Module modelLib = Module.load(libCacheFilePath);
// get global function module for graph runtime
Function runtimeCreFun = Function.getFunction("tvm.graph_runtime.create");
TVMValue runtimeCreFunRes = runtimeCreFun.pushArg(modelGraph)
.pushArg(modelLib)
.pushArg(tvmCtx.deviceType)
.pushArg(tvmCtx.deviceId)
.invoke();
graphRuntimeModule = runtimeCreFunRes.asModule();
// get the function from the module(load parameters)
Function loadParamFunc = graphRuntimeModule.getFunction("load_params");
loadParamFunc.pushArg(modelParams).invoke();
// release tvm local variables
modelLib.release();
loadParamFunc.release();
runtimeCreFun.release();
return 0;//success
}
@Override
protected void onPreExecute() {
dialog.setCancelable(false);
dialog.setMessage("Loading Model...");
dialog.show();
super.onPreExecute();
}
@Override
protected void onPostExecute(Integer status) {
if (dialog != null && dialog.isShowing()) {
dialog.dismiss();
}
if (status != 0) {
showDialog("Error", "Fail to initialized model, check compiled model");
}
}
}
/*
Execute prediction for processed decode input bitmap image content on TVM graph runtime.
*/
private class ModelRunAsyncTask extends AsyncTask<Bitmap, Void, Integer> {
ProgressDialog dialog = new ProgressDialog(MainActivity.this);
@Override
protected Integer doInBackground(Bitmap... bitmaps) {
if (null != graphRuntimeModule) {
int count = bitmaps.length;
for (int i = 0 ; i < count ; i++) {
long processingTimeMs = SystemClock.uptimeMillis();
Log.i(TAG, "Decode JPEG image content");
// extract the jpeg content
ByteArrayOutputStream stream = new ByteArrayOutputStream();
bitmaps[i].compress(Bitmap.CompressFormat.JPEG,100,stream);
byte[] byteArray = stream.toByteArray();
Bitmap imageBitmap = BitmapFactory.decodeByteArray(byteArray, 0, byteArray.length);
// crop input image at centre to model input size
// commecial deploy note:: instead of cropying image do resize
// image to model input size so we never lost the image content
Bitmap cropImageBitmap = Bitmap.createBitmap(MODEL_INPUT_SIZE, MODEL_INPUT_SIZE, Bitmap.Config.ARGB_8888);
Matrix frameToCropTransform = getTransformationMatrix(imageBitmap.getWidth(), imageBitmap.getHeight(),
MODEL_INPUT_SIZE, MODEL_INPUT_SIZE, 0, true);
Canvas canvas = new Canvas(cropImageBitmap);
canvas.drawBitmap(imageBitmap, frameToCropTransform, null);
// image pixel int values
int[] pixelValues = new int[MODEL_INPUT_SIZE * MODEL_INPUT_SIZE];
// image RGB float values
float[] imgRgbValues = new float[MODEL_INPUT_SIZE * MODEL_INPUT_SIZE * IMG_CHANNEL];
// image RGB transpose float values
float[] imgRgbTranValues = new float[MODEL_INPUT_SIZE * MODEL_INPUT_SIZE * IMG_CHANNEL];
// pre-process the image data from 0-255 int to normalized float based on the
// provided parameters.
cropImageBitmap.getPixels(pixelValues, 0, MODEL_INPUT_SIZE, 0, 0, MODEL_INPUT_SIZE, MODEL_INPUT_SIZE);
for (int j = 0; j < pixelValues.length; ++j) {
imgRgbValues[j * 3 + 0] = ((pixelValues[j] >> 16) & 0xFF)/255.0f;
imgRgbValues[j * 3 + 1] = ((pixelValues[j] >> 8) & 0xFF)/255.0f;
imgRgbValues[j * 3 + 2] = (pixelValues[j] & 0xFF)/255.0f;
}
// pre-process the image rgb data transpose based on the provided parameters.
for (int k = 0; k < IMG_CHANNEL; ++k) {
for (int l = 0; l < MODEL_INPUT_SIZE; ++l) {
for (int m = 0; m < MODEL_INPUT_SIZE; ++m) {
int dst_index = m + MODEL_INPUT_SIZE*l + MODEL_INPUT_SIZE*MODEL_INPUT_SIZE*k;
int src_index = k + IMG_CHANNEL*m + IMG_CHANNEL*MODEL_INPUT_SIZE*l;
imgRgbTranValues[dst_index] = imgRgbValues[src_index];
}
}
}
// get the function from the module(set input data)
Log.i(TAG, "set input data");
NDArray inputNdArray = NDArray.empty(new long[]{1, IMG_CHANNEL, MODEL_INPUT_SIZE, MODEL_INPUT_SIZE}, new TVMType("float32"));;
inputNdArray.copyFrom(imgRgbTranValues);
Function setInputFunc = graphRuntimeModule.getFunction("set_input");
setInputFunc.pushArg(INPUT_NAME).pushArg(inputNdArray).invoke();
// release tvm local variables
inputNdArray.release();
setInputFunc.release();
// get the function from the module(run it)
Log.i(TAG, "run function on target");
Function runFunc = graphRuntimeModule.getFunction("run");
runFunc.invoke();
// release tvm local variables
runFunc.release();
// get the function from the module(get output data)
Log.i(TAG, "get output data");
NDArray outputNdArray = NDArray.empty(new long[]{1, 1000}, new TVMType("float32"));
Function getOutputFunc = graphRuntimeModule.getFunction("get_output");
getOutputFunc.pushArg(OUTPUT_INDEX).pushArg(outputNdArray).invoke();
float[] output = outputNdArray.asFloatArray();
// release tvm local variables
outputNdArray.release();
getOutputFunc.release();
// display the result from extracted output data
if (null != output) {
int maxPosition = -1;
float maxValue = 0;
for (int j = 0; j < output.length; ++j) {
if (output[j] > maxValue) {
maxValue = output[j];
maxPosition = j;
}
}
processingTimeMs = SystemClock.uptimeMillis() - processingTimeMs;
String label = "Prediction Result : ";
label += labels.size() > maxPosition ? labels.get(maxPosition) : "unknown";
label += "\nPrediction Time : " + processingTimeMs + "ms";
mResultView.setText(label);
}
Log.i(TAG, "prediction finished");
}
return 0;
}
return -1;
}
@Override
protected void onPreExecute() {
dialog.setCancelable(false);
dialog.setMessage("Prediction running on image...");
dialog.show();
super.onPreExecute();
}
@Override
protected void onPostExecute(Integer status) {
if (dialog != null && dialog.isShowing()) {
dialog.dismiss();
}
if (status != 0) {
showDialog("Error", "Fail to predict image, GraphRuntime exception");
}
}
}
@Override
protected void onDestroy() {
// release tvm local variables
if (null != graphRuntimeModule)
graphRuntimeModule.release();
super.onDestroy();
}
/**
* Read file from assets and return byte array.
*
* @param assets The asset manager to be used to load assets.
* @param fileName The filepath of read file.
* @return byte[] file content
* @throws IOException
*/
private byte[] getBytesFromFile(AssetManager assets, String fileName) throws IOException {
InputStream is = assets.open(fileName);
int length = is.available();
byte[] bytes = new byte[length];
// Read in the bytes
int offset = 0;
int numRead = 0;
try {
while (offset < bytes.length
&& (numRead = is.read(bytes, offset, bytes.length - offset)) >= 0) {
offset += numRead;
}
} finally {
is.close();
}
// Ensure all the bytes have been read in
if (offset < bytes.length) {
throw new IOException("Could not completely read file " + fileName);
}
return bytes;
}
/**
* Dialog show pick option for select image from Gallery or Camera.
*/
private void showPictureDialog(){
AlertDialog.Builder pictureDialog = new AlertDialog.Builder(this);
pictureDialog.setTitle("Select Action");
String[] pictureDialogItems = {
"Select photo from gallery",
"Capture photo from camera" };
pictureDialog.setItems(pictureDialogItems,
new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch (which) {
case 0:
choosePhotoFromGallery();
break;
case 1:
takePhotoFromCamera();
break;
}
}
});
pictureDialog.show();
}
/**
* Request to pick image from Gallery.
*/
public void choosePhotoFromGallery() {
Intent galleryIntent = new Intent(Intent.ACTION_PICK,
android.provider.MediaStore.Images.Media.EXTERNAL_CONTENT_URI);
startActivityForResult(galleryIntent, PICTURE_FROM_GALLERY);
}
/**
* Request to capture image from Camera.
*/
private void takePhotoFromCamera() {
Intent intent = new Intent(android.provider.MediaStore.ACTION_IMAGE_CAPTURE);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N) {
mCameraImageUri = Uri.fromFile(createImageFile());
} else {
File file = new File(createImageFile().getPath());
mCameraImageUri = FileProvider.getUriForFile(getApplicationContext(), getApplicationContext().getPackageName() + ".provider", file);
}
intent.putExtra(MediaStore.EXTRA_OUTPUT, mCameraImageUri);
startActivityForResult(intent, PICTURE_FROM_CAMERA);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == this.RESULT_CANCELED) {
return;
}
Uri contentURI = null;
if (requestCode == PICTURE_FROM_GALLERY) {
if (data != null) {
contentURI = data.getData();
}
} else if (requestCode == PICTURE_FROM_CAMERA) {
contentURI = mCameraImageUri;
}
if (null != contentURI) {
try {
Bitmap bitmap = MediaStore.Images.Media.getBitmap(this.getContentResolver(), contentURI);
Bitmap scaled = Bitmap.createScaledBitmap(bitmap, IMAGE_PREVIEW_HEIGHT, IMAGE_PREVIEW_WIDTH, true);
mImageView.setImageBitmap(scaled);
new ModelRunAsyncTask().execute(scaled);
} catch (IOException e) {
e.printStackTrace();
}
}
}
/**
* Get application cache path where to place compiled functions.
*
* @param fileName library file name.
* @return String application cache folder path
* @throws IOException
*/
private final String getTempLibFilePath(String fileName) throws IOException {
File tempDir = File.createTempFile("tvm4j_demo_", "");
if (!tempDir.delete() || !tempDir.mkdir()) {
throw new IOException("Couldn't create directory " + tempDir.getAbsolutePath());
}
return (tempDir + File.separator + fileName);
}
/**
* Create image file under storage where camera application save captured image.
*
* @return File image file under sdcard where camera can save image
*/
private File createImageFile() {
// Create an image file name
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = Environment.getExternalStoragePublicDirectory(
Environment.DIRECTORY_PICTURES);
try {
File image = File.createTempFile(
imageFileName, // prefix
".jpg", // suffix
storageDir // directory
);
return image;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* Show dialog to user.
*
* @param title dialog display title
* @param msg dialog display message
*/
private void showDialog(String title, String msg) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle(title);
builder.setMessage(msg);
builder.setCancelable(true);
builder.setNeutralButton(android.R.string.ok,
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int id) {
dialog.cancel();
finish();
}
});
builder.create().show();
}
@Override
public void onRequestPermissionsResult (final int requestCode, final String[] permissions, final int[] grantResults){
if (requestCode == PERMISSIONS_REQUEST) {
if (grantResults.length > 0
&& grantResults[0] == PackageManager.PERMISSION_GRANTED
&& grantResults[1] == PackageManager.PERMISSION_GRANTED) {
// instantiate tvm runtime and setup environment on background after application begin
new LoadModleAsyncTask().execute();
} else {
requestPermission();
}
}
}
/**
* Whether application has required mandatory permissions to run.
*/
private boolean hasPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
return checkSelfPermission(Manifest.permission.CAMERA) == PackageManager.PERMISSION_GRANTED &&
checkSelfPermission(Manifest.permission.WRITE_EXTERNAL_STORAGE) == PackageManager.PERMISSION_GRANTED;
} else {
return true;
}
}
/**
* Request required mandatory permission for application to run.
*/
private void requestPermission() {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) {
if (shouldShowRequestPermissionRationale(Manifest.permission.CAMERA) ||
shouldShowRequestPermissionRationale(Manifest.permission.WRITE_EXTERNAL_STORAGE)) {
Toast.makeText(this,
"Camera AND storage permission are required for this demo", Toast.LENGTH_LONG).show();
}
requestPermissions(new String[] {Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, PERMISSIONS_REQUEST);
}
}
/**
* Returns a transformation matrix from one reference frame into another.
* Handles cropping (if maintaining aspect ratio is desired) and rotation.
*
* @param srcWidth Width of source frame.
* @param srcHeight Height of source frame.
* @param dstWidth Width of destination frame.
* @param dstHeight Height of destination frame.
* @param applyRotation Amount of rotation to apply from one frame to another.
* Must be a multiple of 90.
* @param maintainAspectRatio If true, will ensure that scaling in x and y remains constant,
* cropping the image if necessary.
* @return The transformation fulfilling the desired requirements.
*/
public static Matrix getTransformationMatrix(
final int srcWidth,
final int srcHeight,
final int dstWidth,
final int dstHeight,
final int applyRotation,
final boolean maintainAspectRatio) {
final Matrix matrix = new Matrix();
if (applyRotation != 0) {
if (applyRotation % 90 != 0) {
Log.w(TAG, "Rotation of %d % 90 != 0 " + applyRotation);
}
// Translate so center of image is at origin.
matrix.postTranslate(-srcWidth / 2.0f, -srcHeight / 2.0f);
// Rotate around origin.
matrix.postRotate(applyRotation);
}
// Account for the already applied rotation, if any, and then determine how
// much scaling is needed for each axis.
final boolean transpose = (Math.abs(applyRotation) + 90) % 180 == 0;
final int inWidth = transpose ? srcHeight : srcWidth;
final int inHeight = transpose ? srcWidth : srcHeight;
// Apply scaling if necessary.
if (inWidth != dstWidth || inHeight != dstHeight) {
final float scaleFactorX = dstWidth / (float) inWidth;
final float scaleFactorY = dstHeight / (float) inHeight;
if (maintainAspectRatio) {
// Scale by minimum factor so that dst is filled completely while
// maintaining the aspect ratio. Some image may fall off the edge.
final float scaleFactor = Math.max(scaleFactorX, scaleFactorY);
matrix.postScale(scaleFactor, scaleFactor);
} else {
// Scale exactly to fill dst from src.
matrix.postScale(scaleFactorX, scaleFactorY);
}
}
if (applyRotation != 0) {
// Translate back from origin centered reference to destination frame.
matrix.postTranslate(dstWidth / 2.0f, dstHeight / 2.0f);
}
return matrix;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.route53.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/route53-2013-04-01/ListHostedZonesByVPC" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListHostedZonesByVPCResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified Amazon
* VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name and ID, and
* information about who owns the hosted zone.
* </p>
*/
private com.amazonaws.internal.SdkInternalList<HostedZoneSummary> hostedZoneSummaries;
/**
* <p>
* The value that you specified for <code>MaxItems</code> in the most recent <code>ListHostedZonesByVPC</code>
* request.
* </p>
*/
private String maxItems;
/**
* <p>
* The value that you will use for <code>NextToken</code> in the next <code>ListHostedZonesByVPC</code> request.
* </p>
*/
private String nextToken;
/**
* <p>
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified Amazon
* VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name and ID, and
* information about who owns the hosted zone.
* </p>
*
* @return A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified
* Amazon VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name
* and ID, and information about who owns the hosted zone.
*/
public java.util.List<HostedZoneSummary> getHostedZoneSummaries() {
if (hostedZoneSummaries == null) {
hostedZoneSummaries = new com.amazonaws.internal.SdkInternalList<HostedZoneSummary>();
}
return hostedZoneSummaries;
}
/**
* <p>
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified Amazon
* VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name and ID, and
* information about who owns the hosted zone.
* </p>
*
* @param hostedZoneSummaries
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified
* Amazon VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name
* and ID, and information about who owns the hosted zone.
*/
public void setHostedZoneSummaries(java.util.Collection<HostedZoneSummary> hostedZoneSummaries) {
if (hostedZoneSummaries == null) {
this.hostedZoneSummaries = null;
return;
}
this.hostedZoneSummaries = new com.amazonaws.internal.SdkInternalList<HostedZoneSummary>(hostedZoneSummaries);
}
/**
* <p>
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified Amazon
* VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name and ID, and
* information about who owns the hosted zone.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setHostedZoneSummaries(java.util.Collection)} or {@link #withHostedZoneSummaries(java.util.Collection)}
* if you want to override the existing values.
* </p>
*
* @param hostedZoneSummaries
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified
* Amazon VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name
* and ID, and information about who owns the hosted zone.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListHostedZonesByVPCResult withHostedZoneSummaries(HostedZoneSummary... hostedZoneSummaries) {
if (this.hostedZoneSummaries == null) {
setHostedZoneSummaries(new com.amazonaws.internal.SdkInternalList<HostedZoneSummary>(hostedZoneSummaries.length));
}
for (HostedZoneSummary ele : hostedZoneSummaries) {
this.hostedZoneSummaries.add(ele);
}
return this;
}
/**
* <p>
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified Amazon
* VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name and ID, and
* information about who owns the hosted zone.
* </p>
*
* @param hostedZoneSummaries
* A list that contains one <code>HostedZoneSummary</code> element for each hosted zone that the specified
* Amazon VPC is associated with. Each <code>HostedZoneSummary</code> element contains the hosted zone name
* and ID, and information about who owns the hosted zone.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListHostedZonesByVPCResult withHostedZoneSummaries(java.util.Collection<HostedZoneSummary> hostedZoneSummaries) {
setHostedZoneSummaries(hostedZoneSummaries);
return this;
}
/**
* <p>
* The value that you specified for <code>MaxItems</code> in the most recent <code>ListHostedZonesByVPC</code>
* request.
* </p>
*
* @param maxItems
* The value that you specified for <code>MaxItems</code> in the most recent
* <code>ListHostedZonesByVPC</code> request.
*/
public void setMaxItems(String maxItems) {
this.maxItems = maxItems;
}
/**
* <p>
* The value that you specified for <code>MaxItems</code> in the most recent <code>ListHostedZonesByVPC</code>
* request.
* </p>
*
* @return The value that you specified for <code>MaxItems</code> in the most recent
* <code>ListHostedZonesByVPC</code> request.
*/
public String getMaxItems() {
return this.maxItems;
}
/**
* <p>
* The value that you specified for <code>MaxItems</code> in the most recent <code>ListHostedZonesByVPC</code>
* request.
* </p>
*
* @param maxItems
* The value that you specified for <code>MaxItems</code> in the most recent
* <code>ListHostedZonesByVPC</code> request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListHostedZonesByVPCResult withMaxItems(String maxItems) {
setMaxItems(maxItems);
return this;
}
/**
* <p>
* The value that you will use for <code>NextToken</code> in the next <code>ListHostedZonesByVPC</code> request.
* </p>
*
* @param nextToken
* The value that you will use for <code>NextToken</code> in the next <code>ListHostedZonesByVPC</code>
* request.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* The value that you will use for <code>NextToken</code> in the next <code>ListHostedZonesByVPC</code> request.
* </p>
*
* @return The value that you will use for <code>NextToken</code> in the next <code>ListHostedZonesByVPC</code>
* request.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* The value that you will use for <code>NextToken</code> in the next <code>ListHostedZonesByVPC</code> request.
* </p>
*
* @param nextToken
* The value that you will use for <code>NextToken</code> in the next <code>ListHostedZonesByVPC</code>
* request.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListHostedZonesByVPCResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getHostedZoneSummaries() != null)
sb.append("HostedZoneSummaries: ").append(getHostedZoneSummaries()).append(",");
if (getMaxItems() != null)
sb.append("MaxItems: ").append(getMaxItems()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListHostedZonesByVPCResult == false)
return false;
ListHostedZonesByVPCResult other = (ListHostedZonesByVPCResult) obj;
if (other.getHostedZoneSummaries() == null ^ this.getHostedZoneSummaries() == null)
return false;
if (other.getHostedZoneSummaries() != null && other.getHostedZoneSummaries().equals(this.getHostedZoneSummaries()) == false)
return false;
if (other.getMaxItems() == null ^ this.getMaxItems() == null)
return false;
if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getHostedZoneSummaries() == null) ? 0 : getHostedZoneSummaries().hashCode());
hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListHostedZonesByVPCResult clone() {
try {
return (ListHostedZonesByVPCResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.mapper;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Maps;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.search.DocIdSet;
import org.apache.lucene.search.DocIdSetIterator;
import org.apache.lucene.search.Filter;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchGenerationException;
import org.elasticsearch.Version;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.text.StringAndBytesText;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.concurrent.ReleasableLock;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.Mapping.SourceTransform;
import org.elasticsearch.index.mapper.internal.AllFieldMapper;
import org.elasticsearch.index.mapper.internal.FieldNamesFieldMapper;
import org.elasticsearch.index.mapper.internal.IdFieldMapper;
import org.elasticsearch.index.mapper.internal.IndexFieldMapper;
import org.elasticsearch.index.mapper.internal.ParentFieldMapper;
import org.elasticsearch.index.mapper.internal.RoutingFieldMapper;
import org.elasticsearch.index.mapper.internal.SourceFieldMapper;
import org.elasticsearch.index.mapper.internal.TTLFieldMapper;
import org.elasticsearch.index.mapper.internal.TimestampFieldMapper;
import org.elasticsearch.index.mapper.internal.TypeFieldMapper;
import org.elasticsearch.index.mapper.internal.UidFieldMapper;
import org.elasticsearch.index.mapper.internal.VersionFieldMapper;
import org.elasticsearch.index.mapper.object.ObjectMapper;
import org.elasticsearch.index.mapper.object.RootObjectMapper;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptService;
import org.elasticsearch.script.ScriptService.ScriptType;
import org.elasticsearch.search.internal.SearchContext;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.ReentrantReadWriteLock;
/**
*
*/
public class DocumentMapper implements ToXContent {
public static class Builder {
private Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> rootMappers = new LinkedHashMap<>();
private List<SourceTransform> sourceTransforms = new ArrayList<>(1);
private final Settings indexSettings;
private final RootObjectMapper rootObjectMapper;
private ImmutableMap<String, Object> meta = ImmutableMap.of();
private final Mapper.BuilderContext builderContext;
public Builder(Settings indexSettings, RootObjectMapper.Builder builder, MapperService mapperService) {
this.indexSettings = indexSettings;
this.builderContext = new Mapper.BuilderContext(indexSettings, new ContentPath(1));
this.rootObjectMapper = builder.build(builderContext);
// TODO: find a cleaner way to handle existing root mappings and using their field type as the default.
// the vast majority of these root mappers only need the existing type for backwards compatibility, since
// the pre 2.0 field type settings could be modified
// UID first so it will be the first stored field to load (so will benefit from "fields: []" early termination
this.rootMappers.put(UidFieldMapper.class, new UidFieldMapper(indexSettings, mapperService.fullName(UidFieldMapper.NAME)));
this.rootMappers.put(IdFieldMapper.class, new IdFieldMapper(indexSettings, mapperService.fullName(IdFieldMapper.NAME)));
this.rootMappers.put(RoutingFieldMapper.class, new RoutingFieldMapper(indexSettings, mapperService.fullName(RoutingFieldMapper.NAME)));
// add default mappers, order is important (for example analyzer should come before the rest to set context.analyzer)
this.rootMappers.put(IndexFieldMapper.class, new IndexFieldMapper(indexSettings, mapperService.fullName(IndexFieldMapper.NAME)));
this.rootMappers.put(SourceFieldMapper.class, new SourceFieldMapper(indexSettings));
this.rootMappers.put(TypeFieldMapper.class, new TypeFieldMapper(indexSettings, mapperService.fullName(TypeFieldMapper.NAME)));
this.rootMappers.put(AllFieldMapper.class, new AllFieldMapper(indexSettings, mapperService.fullName(AllFieldMapper.NAME)));
this.rootMappers.put(TimestampFieldMapper.class, new TimestampFieldMapper(indexSettings, mapperService.fullName(TimestampFieldMapper.NAME)));
this.rootMappers.put(TTLFieldMapper.class, new TTLFieldMapper(indexSettings));
this.rootMappers.put(VersionFieldMapper.class, new VersionFieldMapper(indexSettings));
this.rootMappers.put(ParentFieldMapper.class, new ParentFieldMapper(indexSettings, mapperService.fullName(ParentFieldMapper.NAME), /* parent type */builder.name()));
// _field_names last so that it can see all other fields
this.rootMappers.put(FieldNamesFieldMapper.class, new FieldNamesFieldMapper(indexSettings, mapperService.fullName(FieldNamesFieldMapper.NAME)));
}
public Builder meta(ImmutableMap<String, Object> meta) {
this.meta = meta;
return this;
}
public Builder put(MetadataFieldMapper.Builder<?, ?> mapper) {
MetadataFieldMapper metadataMapper = mapper.build(builderContext);
rootMappers.put(metadataMapper.getClass(), metadataMapper);
return this;
}
public Builder transform(ScriptService scriptService, Script script) {
sourceTransforms.add(new ScriptTransform(scriptService, script));
return this;
}
/**
* @deprecated Use {@link #transform(ScriptService, Script)} instead.
*/
@Deprecated
public Builder transform(ScriptService scriptService, String script, ScriptType scriptType, String language,
Map<String, Object> parameters) {
sourceTransforms.add(new ScriptTransform(scriptService, new Script(script, scriptType, language, parameters)));
return this;
}
public DocumentMapper build(MapperService mapperService, DocumentMapperParser docMapperParser) {
Preconditions.checkNotNull(rootObjectMapper, "Mapper builder must have the root object mapper set");
return new DocumentMapper(mapperService, indexSettings, docMapperParser, rootObjectMapper, meta, rootMappers, sourceTransforms, mapperService.mappingLock);
}
}
private final MapperService mapperService;
private final String type;
private final StringAndBytesText typeText;
private volatile CompressedXContent mappingSource;
private final Mapping mapping;
private final DocumentParser documentParser;
private volatile DocumentFieldMappers fieldMappers;
private volatile ImmutableMap<String, ObjectMapper> objectMappers = ImmutableMap.of();
private boolean hasNestedObjects = false;
private final ReleasableLock mappingWriteLock;
private final ReentrantReadWriteLock mappingLock;
public DocumentMapper(MapperService mapperService, @Nullable Settings indexSettings, DocumentMapperParser docMapperParser,
RootObjectMapper rootObjectMapper,
ImmutableMap<String, Object> meta,
Map<Class<? extends MetadataFieldMapper>, MetadataFieldMapper> rootMappers,
List<SourceTransform> sourceTransforms,
ReentrantReadWriteLock mappingLock) {
this.mapperService = mapperService;
this.type = rootObjectMapper.name();
this.typeText = new StringAndBytesText(this.type);
this.mapping = new Mapping(
Version.indexCreated(indexSettings),
rootObjectMapper,
rootMappers.values().toArray(new MetadataFieldMapper[rootMappers.values().size()]),
sourceTransforms.toArray(new SourceTransform[sourceTransforms.size()]),
meta);
this.documentParser = new DocumentParser(indexSettings, docMapperParser, this, new ReleasableLock(mappingLock.readLock()));
this.mappingWriteLock = new ReleasableLock(mappingLock.writeLock());
this.mappingLock = mappingLock;
if (rootMapper(ParentFieldMapper.class).active()) {
// mark the routing field mapper as required
rootMapper(RoutingFieldMapper.class).markAsRequired();
}
// collect all the mappers for this type
List<ObjectMapper> newObjectMappers = new ArrayList<>();
List<FieldMapper> newFieldMappers = new ArrayList<>();
for (MetadataFieldMapper metadataMapper : this.mapping.metadataMappers) {
if (metadataMapper instanceof FieldMapper) {
newFieldMappers.add(metadataMapper);
}
}
MapperUtils.collect(this.mapping.root, newObjectMappers, newFieldMappers);
this.fieldMappers = new DocumentFieldMappers(docMapperParser.analysisService).copyAndAllAll(newFieldMappers);
this.objectMappers = Maps.uniqueIndex(newObjectMappers, new Function<ObjectMapper, String>() {
@Override
public String apply(ObjectMapper mapper) {
return mapper.fullPath();
}
});
for (ObjectMapper objectMapper : newObjectMappers) {
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
}
refreshSource();
}
public Mapping mapping() {
return mapping;
}
public String type() {
return this.type;
}
public Text typeText() {
return this.typeText;
}
public ImmutableMap<String, Object> meta() {
return mapping.meta;
}
public CompressedXContent mappingSource() {
return this.mappingSource;
}
public RootObjectMapper root() {
return mapping.root;
}
public UidFieldMapper uidMapper() {
return rootMapper(UidFieldMapper.class);
}
@SuppressWarnings({"unchecked"})
public <T extends MetadataFieldMapper> T rootMapper(Class<T> type) {
return mapping.rootMapper(type);
}
public IndexFieldMapper indexMapper() {
return rootMapper(IndexFieldMapper.class);
}
public TypeFieldMapper typeMapper() {
return rootMapper(TypeFieldMapper.class);
}
public SourceFieldMapper sourceMapper() {
return rootMapper(SourceFieldMapper.class);
}
public AllFieldMapper allFieldMapper() {
return rootMapper(AllFieldMapper.class);
}
public IdFieldMapper idFieldMapper() {
return rootMapper(IdFieldMapper.class);
}
public RoutingFieldMapper routingFieldMapper() {
return rootMapper(RoutingFieldMapper.class);
}
public ParentFieldMapper parentFieldMapper() {
return rootMapper(ParentFieldMapper.class);
}
public TimestampFieldMapper timestampFieldMapper() {
return rootMapper(TimestampFieldMapper.class);
}
public TTLFieldMapper TTLFieldMapper() {
return rootMapper(TTLFieldMapper.class);
}
public IndexFieldMapper IndexFieldMapper() {
return rootMapper(IndexFieldMapper.class);
}
public Query typeFilter() {
return typeMapper().fieldType().termQuery(type, null);
}
public boolean hasNestedObjects() {
return hasNestedObjects;
}
public DocumentFieldMappers mappers() {
return this.fieldMappers;
}
public ImmutableMap<String, ObjectMapper> objectMappers() {
return this.objectMappers;
}
public ParsedDocument parse(String index, String type, String id, BytesReference source) throws MapperParsingException {
return parse(SourceToParse.source(source).index(index).type(type).id(id));
}
public ParsedDocument parse(SourceToParse source) throws MapperParsingException {
return documentParser.parseDocument(source);
}
/**
* Returns the best nested {@link ObjectMapper} instances that is in the scope of the specified nested docId.
*/
public ObjectMapper findNestedObjectMapper(int nestedDocId, SearchContext sc, LeafReaderContext context) throws IOException {
ObjectMapper nestedObjectMapper = null;
for (ObjectMapper objectMapper : objectMappers().values()) {
if (!objectMapper.nested().isNested()) {
continue;
}
Filter filter = objectMapper.nestedTypeFilter();
if (filter == null) {
continue;
}
// We can pass down 'null' as acceptedDocs, because nestedDocId is a doc to be fetched and
// therefor is guaranteed to be a live doc.
DocIdSet nestedTypeSet = filter.getDocIdSet(context, null);
if (nestedTypeSet == null) {
continue;
}
DocIdSetIterator iterator = nestedTypeSet.iterator();
if (iterator == null) {
continue;
}
if (iterator.advance(nestedDocId) == nestedDocId) {
if (nestedObjectMapper == null) {
nestedObjectMapper = objectMapper;
} else {
if (nestedObjectMapper.fullPath().length() < objectMapper.fullPath().length()) {
nestedObjectMapper = objectMapper;
}
}
}
}
return nestedObjectMapper;
}
/**
* Returns the parent {@link ObjectMapper} instance of the specified object mapper or <code>null</code> if there
* isn't any.
*/
// TODO: We should add: ObjectMapper#getParentObjectMapper()
public ObjectMapper findParentObjectMapper(ObjectMapper objectMapper) {
int indexOfLastDot = objectMapper.fullPath().lastIndexOf('.');
if (indexOfLastDot != -1) {
String parentNestObjectPath = objectMapper.fullPath().substring(0, indexOfLastDot);
return objectMappers().get(parentNestObjectPath);
} else {
return null;
}
}
/**
* Transform the source when it is expressed as a map. This is public so it can be transformed the source is loaded.
* @param sourceAsMap source to transform. This may be mutated by the script.
* @return transformed version of transformMe. This may actually be the same object as sourceAsMap
*/
public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
return DocumentParser.transformSourceAsMap(mapping, sourceAsMap);
}
public boolean isParent(String type) {
return mapperService.getParentTypes().contains(type);
}
private void addMappers(Collection<ObjectMapper> objectMappers, Collection<FieldMapper> fieldMappers, boolean updateAllTypes) {
assert mappingLock.isWriteLockedByCurrentThread();
// first ensure we don't have any incompatible new fields
mapperService.checkNewMappersCompatibility(objectMappers, fieldMappers, updateAllTypes);
// update mappers for this document type
MapBuilder<String, ObjectMapper> builder = MapBuilder.newMapBuilder(this.objectMappers);
for (ObjectMapper objectMapper : objectMappers) {
builder.put(objectMapper.fullPath(), objectMapper);
if (objectMapper.nested().isNested()) {
hasNestedObjects = true;
}
}
this.objectMappers = builder.immutableMap();
this.fieldMappers = this.fieldMappers.copyAndAllAll(fieldMappers);
// finally update for the entire index
mapperService.addMappers(objectMappers, fieldMappers);
}
public MergeResult merge(Mapping mapping, boolean simulate, boolean updateAllTypes) {
try (ReleasableLock lock = mappingWriteLock.acquire()) {
final MergeResult mergeResult = new MergeResult(simulate, updateAllTypes);
this.mapping.merge(mapping, mergeResult);
if (simulate == false) {
addMappers(mergeResult.getNewObjectMappers(), mergeResult.getNewFieldMappers(), updateAllTypes);
refreshSource();
}
return mergeResult;
}
}
private void refreshSource() throws ElasticsearchGenerationException {
try {
mappingSource = new CompressedXContent(this, XContentType.JSON, ToXContent.EMPTY_PARAMS);
} catch (Exception e) {
throw new ElasticsearchGenerationException("failed to serialize source for type [" + type + "]", e);
}
}
public void close() {
documentParser.close();
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return mapping.toXContent(builder, params);
}
/**
* Script based source transformation.
*/
private static class ScriptTransform implements SourceTransform {
private final ScriptService scriptService;
/**
* The script to transform the source document before indexing.
*/
private final Script script;
public ScriptTransform(ScriptService scriptService, Script script) {
this.scriptService = scriptService;
this.script = script;
}
@Override
@SuppressWarnings("unchecked")
public Map<String, Object> transformSourceAsMap(Map<String, Object> sourceAsMap) {
try {
// We use the ctx variable and the _source name to be consistent with the update api.
ExecutableScript executable = scriptService.executable(script, ScriptContext.Standard.MAPPING, null);
Map<String, Object> ctx = new HashMap<>(1);
ctx.put("_source", sourceAsMap);
executable.setNextVar("ctx", ctx);
executable.run();
ctx = (Map<String, Object>) executable.unwrap(ctx);
return (Map<String, Object>) ctx.get("_source");
} catch (Exception e) {
throw new IllegalArgumentException("failed to execute script", e);
}
}
@Override
public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
return script.toXContent(builder, params);
}
}
}
| |
/*
* Copyright (c) 2008, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.carbon.registry.core.jdbc;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.registry.core.*;
import org.wso2.carbon.registry.core.config.StaticConfiguration;
import org.wso2.carbon.registry.core.dao.*;
import org.wso2.carbon.registry.core.dataaccess.DataAccessManager;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import org.wso2.carbon.registry.core.jdbc.dataobjects.ResourceDO;
import org.wso2.carbon.registry.core.jdbc.utils.VersionInputStream;
import org.wso2.carbon.registry.core.jdbc.utils.VersionRetriever;
import org.wso2.carbon.registry.core.secure.AuthorizationFailedException;
import org.wso2.carbon.registry.core.session.CurrentSession;
import org.wso2.carbon.registry.core.utils.AuthorizationUtils;
import org.wso2.carbon.registry.core.utils.RegistryUtils;
import org.wso2.carbon.registry.core.utils.VersionedPath;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
/**
* Repository for versioned resources.
*/
public class VersionRepository {
private static Log log = LogFactory.getLog(VersionRepository.class);
private DataAccessManager dataAccessManager;
private ResourceDAO resourceDAO;
private ResourceVersionDAO resourceVersionDAO;
private CommentsDAO commentsDAO;
private RatingsDAO ratingsDAO;
private AssociationDAO associationDAO;
private TagsDAO tagsDAO;
/**
* Constructor accepting data access manager.
*
* @param dataAccessManager the data access manager that is used for database communications.
*/
public VersionRepository(DataAccessManager dataAccessManager) {
this.dataAccessManager = dataAccessManager;
this.resourceDAO = dataAccessManager.getDAOManager().getResourceDAO();
this.resourceVersionDAO = dataAccessManager.getDAOManager().getResourceVersionDAO();
this.commentsDAO = dataAccessManager.getDAOManager().getCommentsDAO(
StaticConfiguration.isVersioningComments());
this.ratingsDAO = dataAccessManager.getDAOManager().getRatingsDAO(
StaticConfiguration.isVersioningRatings());
this.associationDAO = dataAccessManager.getDAOManager().getAssociationDAO();
this.tagsDAO = dataAccessManager.getDAOManager().getTagsDAO(
StaticConfiguration.isVersioningTags());
}
/**
* Method used to create a snapshot of a given resource.
*
* @param resource the resource.
* @param isRenewing whether we are renewing.
* @param keepProperties whether to preserve properties.
*
* @throws RegistryException if the operation failed.
*/
public void createSnapshot(Resource resource,
boolean isRenewing,
boolean keepProperties) throws RegistryException {
ResourceImpl resourceImpl = (ResourceImpl) resource;
// archiving the old root resource
createVersionForResource(resourceImpl, isRenewing, keepProperties);
long version = resourceImpl.getVersionNumber();
boolean isCollection = resourceImpl instanceof CollectionImpl;
ResourceIDImpl rootResourceID = resourceImpl.getResourceIDImpl();
ArrayList<Long> versionList = new ArrayList<Long>();
versionList.add(version);
if (isCollection) {
// for collection we have to iterate through children
addDescendants(rootResourceID, versionList, isRenewing, keepProperties);
}
// wrap the array list into stream
InputStream versionsInputStream = new VersionInputStream(versionList);
int pathId = rootResourceID.getPathID();
String resourceName = rootResourceID.getName();
long snapshotID =
resourceVersionDAO.createSnapshot(pathId, resourceName, versionsInputStream);
// Associations can be created only once we have created the snapshot, since we need to know
// the snapshotID.
if (snapshotID != -1) {
VersionedPath versionedPath = new VersionedPath();
versionedPath.setVersion(snapshotID);
versionedPath.setPath(resourceImpl.getPath());
associationDAO.copyAssociations(resourceImpl.getPath(), versionedPath.toString());
}
}
/**
* Method used to create a snapshot of a given resource.
*
* @param resourcePath the resource path.
* @param isRenewing whether we are renewing.
* @param keepProperties whether to preserve properties.
*
* @throws RegistryException if the operation failed.
*/
public void createSnapshot(ResourcePath resourcePath,
boolean isRenewing,
boolean keepProperties) throws RegistryException {
if (!resourcePath.isCurrentVersion()) {
String msg = "Failed to create snapshot of the resource " + resourcePath +
". Given path refers to an archived version of the resource.";
log.error(msg);
throw new RegistryException(msg);
}
String path = resourcePath.getPath();
path = RegistryUtils.getPureResourcePath(path);
ResourceIDImpl resourceID = resourceDAO.getResourceID(path);
if (resourceID == null) {
String msg = "Failed to get resource id to create a snapshot to the resource " +
path + ". ";
throw new RegistryException(msg);
}
ResourceImpl resource = resourceDAO.getResourceMetaData(resourceID);
createSnapshot(resource, isRenewing, keepProperties);
}
/**
* Here the versions of the descendants of the resource is added to the versionList array.
* immediate descendants will be added to the list before others
*
* @param resourceID the resource identifier
* @param versionList a list of versions.
* @param isRenewing whether we are renewing.
* @param keepProperties whether to preserve properties.
*
* @throws RegistryException if the operation failed.
*/
public void addDescendants(ResourceIDImpl resourceID,
ArrayList<Long> versionList,
boolean isRenewing,
boolean keepProperties) throws RegistryException {
List<ResourceIDImpl> childIds = resourceDAO.getChildPathIds(resourceID);
// immediate children will be added before others
for (ResourceIDImpl childId : childIds) {
ResourceImpl childResourceImpl = resourceDAO.getResourceWithNoUpdate(childId);
long version = childResourceImpl.getVersionNumber();
versionList.add(version);
// we are archiving all the resources
createVersionForResource(childResourceImpl, isRenewing, keepProperties);
}
// then next immediate levels will be added..
for (ResourceIDImpl childId : childIds) {
if (childId.isCollection()) {
// add descendants recursively
addDescendants(childId, versionList, isRenewing, keepProperties);
}
}
// we have to explicitly copy the collections to the history table
ResourceDO resourceDO = resourceDAO.getResourceDO(resourceID);
if (resourceID.isCollection() &&
!resourceVersionDAO.isResourceHistoryExist(resourceDO.getVersion())) {
resourceVersionDAO.putResourceToHistory(resourceDO);
}
}
// Method to create version for a resource.
private void createVersionForResource(ResourceImpl resourceImpl,
boolean isRenewing,
boolean keepProperties) throws RegistryException {
ResourceDO resourceDO = resourceImpl.getResourceDO();
if (resourceDO.getVersion() <= 0) {
// we need to fetch the resource from the database
resourceDO = resourceDAO.getResourceDO(resourceImpl.getResourceIDImpl());
isRenewing = false;
}
if (isRenewing) {
// retrieve the old content and properties before versioning
if (!(resourceImpl instanceof CollectionImpl)) {
// we have to load the content
resourceDAO.fillResourceContentWithNoUpdate(resourceImpl);
}
if (StaticConfiguration.isVersioningProperties() || !keepProperties) {
// we need to load the properties as well
resourceDAO.fillResourcePropertiesWithNoUpdate(resourceImpl);
}
}
resourceVersionDAO.versionResource(resourceDO, keepProperties);
if (isRenewing) {
// we add a new copy to the resource table with a newer version
ResourceImpl renewedResourceImpl = resourceImpl.getShallowCopy();
if (!(renewedResourceImpl instanceof CollectionImpl)) {
resourceDAO.addContent(renewedResourceImpl);
}
resourceDAO.addResourceWithNoUpdate(renewedResourceImpl);
if (StaticConfiguration.isVersioningProperties() || !keepProperties) {
// if the properties are not versioned, we can here safely assume
// the properties are already there, so no need to add twice
resourceDAO.addProperties(renewedResourceImpl);
}
commentsDAO.copyComments(resourceImpl, renewedResourceImpl);
tagsDAO.copyTags(resourceImpl, renewedResourceImpl);
ratingsDAO.copyRatings(resourceImpl, renewedResourceImpl);
}
}
/**
* Method to obtain a list of versioned paths. for a given path.
*
* @param resourcePath the resource path.
*
* @return array of version paths.
* @throws RegistryException if the operation failed.
*/
public String[] getVersions(String resourcePath) throws RegistryException {
resourcePath = RegistryUtils.getPureResourcePath(resourcePath);
Long[] snapshotNumbers = resourceVersionDAO.getSnapshotIDs(resourcePath);
List<String> versionPaths = new ArrayList<String>();
for (Long snapshotNumber : snapshotNumbers) {
String versionPath = resourcePath + RegistryConstants.URL_SEPARATOR +
"version:" + snapshotNumber;
versionPaths.add(versionPath);
}
return versionPaths.toArray(new String[versionPaths.size()]);
}
/**
* Gets the meta data of resource referred by the given path.
*
* @param versionedPath Path of a versioned resource.
*
* @return Resource referred by the given path. Resource can be a file or a collection.
* @throws RegistryException if the operation failed.
*/
public Resource getMetaData(VersionedPath versionedPath) throws RegistryException {
ResourceIDImpl resourceID = resourceDAO.getResourceID(versionedPath.getPath());
if (!AuthorizationUtils.authorize(versionedPath.getPath(), ActionConstants.GET)) {
String msg = "User " + CurrentSession.getUser() + " is not authorized to " +
"read the resource " + versionedPath + ".";
log.warn(msg);
throw new AuthorizationFailedException(msg);
}
long snapshotID = versionedPath.getVersion();
ResourceImpl resourceImpl = resourceVersionDAO.get(resourceID, snapshotID);
if (resourceImpl == null) {
String msg = "Resource " + versionedPath.getPath() +
" does not have a version " + versionedPath.getVersion();
log.error(msg);
throw new RegistryException(msg);
}
resourceImpl.setDataAccessManager(dataAccessManager);
resourceImpl.setPath(versionedPath.getPath());
resourceImpl.setSnapshotID(snapshotID);
resourceImpl.setMatchingSnapshotID(snapshotID);
return resourceImpl;
}
/**
* Checks if a pure resource exists in the given path.
*
* @param versionedPath Path of a versioned resource.
*
* @return true if a resource exists in the given path. false otherwise.
* @throws RegistryException if the operation failed.
*/
public boolean resourceExists(VersionedPath versionedPath) throws RegistryException {
ResourceIDImpl resourceID = resourceDAO.getResourceID(versionedPath.getPath());
long snapshotID = versionedPath.getVersion();
return resourceVersionDAO.resourceExists(resourceID, snapshotID);
}
/**
* Gets the pure resource referred by the given path.
*
* @param versionedPath Path of a versioned resource.
*
* @return Resource referred by the given path. Resource can be a file or a collection.
* @throws RegistryException if the operation failed.
*/
public Resource get(VersionedPath versionedPath) throws RegistryException {
ResourceIDImpl resourceID = resourceDAO.getResourceID(versionedPath.getPath());
if (!AuthorizationUtils.authorize(versionedPath.getPath(), ActionConstants.GET)) {
String msg = "User " + CurrentSession.getUser() + " is not authorized to " +
"read the resource " + versionedPath + ".";
log.warn(msg);
throw new AuthorizationFailedException(msg);
}
long snapshotID = versionedPath.getVersion();
ResourceImpl resourceImpl = resourceVersionDAO.get(resourceID, snapshotID);
if (resourceImpl == null) {
String msg = "Resource " + versionedPath.getPath() +
" does not have a version " + versionedPath.getVersion();
log.error(msg);
throw new RegistryException(msg);
}
int contentId = resourceImpl.getDbBasedContentID();
if (contentId > 0) {
resourceVersionDAO.fillResourceContentArchived(resourceImpl);
}
resourceDAO.fillResourcePropertiesWithNoUpdate(resourceImpl);
resourceImpl.setDataAccessManager(dataAccessManager);
resourceImpl.setUserName(CurrentSession.getUser());
resourceImpl.setTenantId(CurrentSession.getTenantId());
resourceImpl.setUserRealm(CurrentSession.getUserRealm());
resourceImpl.setPath(versionedPath.getPath());
resourceImpl.setSnapshotID(snapshotID);
resourceImpl.setMatchingSnapshotID(snapshotID);
return resourceImpl;
}
/**
* Method to get a paged collection.
*
* @param versionedPath the collection path (which also contains the version).
* @param start the starting index.
* @param pageLen the page length.
*
* @return collection with resources on the given page.
* @throws RegistryException if the operation failed.
*/
public Collection get(VersionedPath versionedPath, int start, int pageLen)
throws RegistryException {
ResourceIDImpl resourceID = resourceDAO.getResourceID(versionedPath.getPath());
long snapshotID = versionedPath.getVersion();
CollectionImpl collectionImpl =
resourceVersionDAO.get(resourceID, snapshotID, start, pageLen);
if (collectionImpl == null) {
String msg = "Resource " + versionedPath.getPath() +
" does not have a version " + versionedPath.getVersion();
log.error(msg);
throw new RegistryException(msg);
}
collectionImpl.setDataAccessManager(dataAccessManager);
collectionImpl.setUserName(CurrentSession.getUser());
collectionImpl.setTenantId(CurrentSession.getTenantId());
collectionImpl.setUserRealm(CurrentSession.getUserRealm());
collectionImpl.setPath(versionedPath.getPath());
collectionImpl.setSnapshotID(snapshotID);
collectionImpl.setMatchingSnapshotID(snapshotID);
return collectionImpl;
}
/**
* Method to restore a version.
*
* @param resourcePath the resource path (which also contains the version).
*
* @throws RegistryException if the operation failed.
*/
public void restoreVersion(ResourcePath resourcePath) throws RegistryException {
VersionedPath versionedPath = RegistryUtils.getVersionedPath(resourcePath);
if (versionedPath.getVersion() == -1) {
String msg = "Failed to restore resource. " +
versionedPath + " is not a valid version path.";
log.error(msg);
throw new RegistryException(msg);
}
String versionedResourcePath = versionedPath.getPath();
long snapshotID = versionedPath.getVersion();
ResourceIDImpl resourceID = resourceDAO.getResourceID(versionedResourcePath);
if (!AuthorizationUtils.authorize(versionedResourcePath, ActionConstants.PUT)) {
String msg = "User " + CurrentSession.getUser() + " is not authorized to " +
"restore the resource " + versionedResourcePath + ".";
log.warn(msg);
throw new AuthorizationFailedException(msg);
}
restoreSnapshotNetwork(resourceID, snapshotID);
}
// Utility method to restore version-by-version using a version retriever.
private void restoreSnapshotNetwork(ResourceIDImpl resourceID, long snapshotID)
throws RegistryException {
// the algorithm to restore snapshot now..
// check if resource exist in the the snapshot
// if no return failure
// delete the current resource.
// restore the versioned things
// get each resource from resource_history and copy to resource
// restore contents
// That's all
VersionRetriever versionRetriever =
resourceVersionDAO.getVersionList(resourceID, snapshotID);
if (versionRetriever == null) {
String msg = "The snapshot " + snapshotID + " doesn't contain the " +
"resource " + resourceID.getPath();
log.warn(msg);
throw new AuthorizationFailedException(msg);
}
int versionIndex = 0;
while (true) {
long version = versionRetriever.getVersion(versionIndex);
if (version == -1) {
// no more stream
break;
}
// restore resource and content
resourceVersionDAO.restoreResources(version, snapshotID);
versionIndex++;
}
}
public boolean removeVersionHistory(String path, long snapshotId)
throws RegistryException {
if (!AuthorizationUtils.authorize(path, ActionConstants.DELETE)) {
String msg = "User " + CurrentSession.getUser() + " is not authorized to " +
"remove the version of the resource " + path + ".";
log.warn(msg);
throw new AuthorizationFailedException(msg);
}
VersionRetriever versionRetriever =
resourceVersionDAO.getVersionList(snapshotId);
if (versionRetriever == null) {
String msg = "The snapshot with the ID: " + snapshotId + " doesn't exists";
log.warn(msg);
throw new AuthorizationFailedException(msg);
}
int versionIndex = 0;
long regVersion = versionRetriever.getVersion(versionIndex);
if(regVersion == -1) {
return false;
}
// Remove the tags /
tagsDAO.removeVersionTags(regVersion);
// Remove the comments /
commentsDAO.removeVersionComments(regVersion);
// Remove the ratings /
ratingsDAO.removeVersionRatings(regVersion);
// Remove the Property
resourceVersionDAO.removePropertyValues(regVersion);
// Remove the snapshot
resourceVersionDAO.removeSnapshot(snapshotId);
return true;
}
}
| |
package org.opencb.opencga.storage.core.variant.search;
import htsjdk.variant.vcf.VCFHeader;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.collections.MapUtils;
import org.apache.commons.lang.StringUtils;
import org.junit.Rule;
import org.junit.Test;
import org.opencb.biodata.models.variant.Variant;
import org.opencb.biodata.models.variant.avro.ConsequenceType;
import org.opencb.biodata.models.variant.avro.Score;
import org.opencb.biodata.models.variant.avro.VariantAnnotation;
import org.opencb.biodata.tools.variant.VariantVcfHtsjdkReader;
import org.opencb.cellbase.client.rest.CellBaseClient;
import org.opencb.commons.datastore.core.*;
import org.opencb.commons.datastore.solr.FacetQueryParser;
import org.opencb.commons.utils.ListUtils;
import org.opencb.opencga.core.common.JacksonUtils;
import org.opencb.opencga.core.response.VariantQueryResult;
import org.opencb.opencga.storage.core.metadata.VariantStorageMetadataManager;
import org.opencb.opencga.storage.core.metadata.models.StudyMetadata;
import org.opencb.opencga.storage.core.variant.VariantStorageBaseTest;
import org.opencb.opencga.storage.core.variant.adaptors.VariantQueryParam;
import org.opencb.opencga.storage.core.variant.dummy.DummyVariantStorageTest;
import org.opencb.opencga.storage.core.variant.search.solr.VariantSearchManager;
import org.opencb.opencga.storage.core.variant.solr.VariantSolrExternalResource;
import java.io.IOException;
import java.nio.file.Paths;
import java.util.*;
import java.util.stream.Collectors;
import static org.junit.Assert.fail;
public class VariantSearchTest extends VariantStorageBaseTest implements DummyVariantStorageTest {
@Rule
public VariantSolrExternalResource solr = new VariantSolrExternalResource();
@Test
public void testTranscriptInfo() throws Exception {
int limit = 500;
VariantStorageMetadataManager scm = variantStorageEngine.getMetadataManager();
solr.configure(variantStorageEngine);
VariantSearchManager variantSearchManager = variantStorageEngine.getVariantSearchManager();
System.out.println(smallInputUri.getPath());
StudyMetadata studyMetadata = metadataManager.createStudy("s1");
List<Variant> variants = getVariants(limit);
List<Variant> annotatedVariants = annotatedVariants(variants, studyMetadata.getName());
String collection = solr.coreName;
variantSearchManager.create(collection);
variantSearchManager.insert(collection, annotatedVariants);
VariantQueryResult<Variant> results = variantSearchManager.query(collection, new Query(),
new QueryOptions(QueryOptions.LIMIT, limit));
for (int i = 0; i < limit; i++) {
Map<String, ConsequenceType> inMap = getConsequenceTypeMap(annotatedVariants.get(i));
Map<String, ConsequenceType> outMap = getConsequenceTypeMap(results.getResults().get(i));
System.out.println(inMap.size() + " vs " + outMap.size());
assert(inMap.size() == outMap.size());
for (String key: inMap.keySet()) {
ConsequenceType inCT = inMap.get(key);
ConsequenceType outCT = outMap.get(key);
// Check biotype
System.out.println(inCT.getBiotype() + " vs " + outCT.getBiotype());
assert(inCT.getBiotype().equals(outCT.getBiotype()));
// Check annotation flags
System.out.println("inCT, annotation flags:");
if (ListUtils.isNotEmpty(inCT.getTranscriptAnnotationFlags())) {
System.out.println("\t" + StringUtils.join(inCT.getTranscriptAnnotationFlags(), ","));
}
System.out.println();
System.out.println("outCT, annotation flags:");
if (ListUtils.isNotEmpty(outCT.getTranscriptAnnotationFlags())) {
System.out.println("\t" + StringUtils.join(outCT.getTranscriptAnnotationFlags(), ","));
}
System.out.println();
if (ListUtils.isNotEmpty(inCT.getTranscriptAnnotationFlags())
&& ListUtils.isNotEmpty(outCT.getTranscriptAnnotationFlags())) {
if (inCT.getTranscriptAnnotationFlags().size() == outCT.getTranscriptAnnotationFlags().size()) {
for (int j = 0; j < inCT.getTranscriptAnnotationFlags().size(); j++) {
if (!inCT.getTranscriptAnnotationFlags().get(j)
.equals(outCT.getTranscriptAnnotationFlags().get(j))) {
fail("Annotation flags mismatch: " + inCT.getTranscriptAnnotationFlags().get(j) + " vs "
+ outCT.getTranscriptAnnotationFlags().get(j));
}
}
} else {
fail("Annotation flags mismatch (size)");
}
} else if (ListUtils.isNotEmpty(inCT.getTranscriptAnnotationFlags())
|| ListUtils.isNotEmpty(outCT.getTranscriptAnnotationFlags())) {
fail("Annotation flags mismatch");
}
// Check cdnaPosition, cdsPostion and codon
int inCdnaPosition = inCT.getCdnaPosition() == null ? 0 : inCT.getCdnaPosition();
int inCdsPosition = inCT.getCdsPosition() == null ? 0 : inCT.getCdsPosition();
int outCdnaPosition = outCT.getCdnaPosition() == null ? 0 : outCT.getCdnaPosition();
int outCdsPosition = outCT.getCdsPosition() == null ? 0 : outCT.getCdsPosition();
String inCodon = inCT.getCodon() == null ? "" : inCT.getCodon().trim();
String outCodon = outCT.getCodon() == null ? "" : outCT.getCodon().trim();
System.out.println(inCdnaPosition + " vs " + outCdnaPosition
+ " ; " + inCdsPosition + " vs " + outCdsPosition
+ " ; " + inCodon + " vs " + outCodon);
assert(inCdnaPosition == outCdnaPosition);
assert(inCdsPosition == outCdsPosition);
assert(inCodon.equals(outCodon));
if (inCT.getProteinVariantAnnotation() != null && outCT.getProteinVariantAnnotation() != null) {
// Check sift and polyphen values
checkScore(inCT.getProteinVariantAnnotation().getSubstitutionScores(), outCT.getProteinVariantAnnotation().getSubstitutionScores(), "sift");
checkScore(inCT.getProteinVariantAnnotation().getSubstitutionScores(), outCT.getProteinVariantAnnotation().getSubstitutionScores(), "polyphen");
String inUniprotAccession = inCT.getProteinVariantAnnotation().getUniprotAccession() == null ? "" : inCT.getProteinVariantAnnotation().getUniprotAccession();
String outUniprotAccession = outCT.getProteinVariantAnnotation().getUniprotAccession() == null ? "" : outCT.getProteinVariantAnnotation().getUniprotAccession();
String inUniprotName = inCT.getProteinVariantAnnotation().getUniprotName() == null ? "" : inCT.getProteinVariantAnnotation().getUniprotName();
String outUniprotName = outCT.getProteinVariantAnnotation().getUniprotName() == null ? "" : outCT.getProteinVariantAnnotation().getUniprotName();
String inUniprotVariantId = inCT.getProteinVariantAnnotation().getUniprotVariantId() == null ? "" : inCT.getProteinVariantAnnotation().getUniprotVariantId();
String outUniprotVariantId = outCT.getProteinVariantAnnotation().getUniprotVariantId() == null ? "" : outCT.getProteinVariantAnnotation().getUniprotVariantId();
System.out.println(inUniprotAccession + " vs " + outUniprotAccession
+ " ; " + inUniprotName + " vs " + outUniprotName
+ " ; " + inUniprotVariantId + " vs " + outUniprotVariantId);
assert(inUniprotAccession.equals(outUniprotAccession));
assert(inUniprotName.equals(outUniprotName));
assert(inUniprotVariantId.equals(outUniprotVariantId));
int inPosition = inCT.getProteinVariantAnnotation().getPosition() == null ? 0 : inCT.getProteinVariantAnnotation().getPosition();
int outPosition = outCT.getProteinVariantAnnotation().getPosition() == null ? 0 : outCT.getProteinVariantAnnotation().getPosition();
String inRef = inCT.getProteinVariantAnnotation().getReference() == null ? "" : inCT.getProteinVariantAnnotation().getReference();
String outRef = outCT.getProteinVariantAnnotation().getReference() == null ? "" : outCT.getProteinVariantAnnotation().getReference();
String inAlt = inCT.getProteinVariantAnnotation().getAlternate() == null ? "" : inCT.getProteinVariantAnnotation().getAlternate();
String outAlt = outCT.getProteinVariantAnnotation().getAlternate() == null ? "" : outCT.getProteinVariantAnnotation().getAlternate();
System.out.println(inPosition + " vs " + outPosition
+ " ; " + inRef + " vs " + outRef
+ " ; " + inAlt + " vs " + outAlt);
assert(inPosition == outPosition);
assert(inRef.equals(outRef));
assert(inAlt.equals(outAlt));
} else if (inCT.getProteinVariantAnnotation() == null && outCT.getProteinVariantAnnotation() == null) {
continue;
} else {
fail("Mismatch protein variant annotation");
}
}
}
System.out.println("#variants = " + variants.size());
System.out.println("#annotations = " + annotatedVariants.size());
System.out.println("#variants from Solr = " + results.getResults().size());
}
@Test
public void testSpecialCharacter() throws Exception {
int limit = 1;
VariantStorageMetadataManager scm = variantStorageEngine.getMetadataManager();
solr.configure(variantStorageEngine);
VariantSearchManager variantSearchManager = variantStorageEngine.getVariantSearchManager();
System.out.println(smallInputUri.getPath());
List<Variant> variants = getVariants(limit);
List<Variant> annotatedVariants = annotatedVariants(variants);
String studyId = "abyu12";
String fileId = "a.vcf";
variants.get(0).getStudies().get(0).getFiles().get(0).setFileId(fileId);
System.out.println(variants.get(0).getStudies().get(0).getFiles().get(0).getFileId());
//System.exit(-1);
scm.createStudy(studyId);
String collection = solr.coreName;
variantSearchManager.create(collection);
LinkedHashMap<String, Integer> samplePosition = new LinkedHashMap<>();
samplePosition.put("A-A", 0);
samplePosition.put("B", 1);
samplePosition.put("C", 2);
samplePosition.put("D", 3);
annotatedVariants.get(0).getStudies().get(0).setStudyId(studyId).setSortedSamplesPosition(samplePosition);
variantSearchManager.insert(collection, annotatedVariants);
Query query = new Query();
query.put(VariantQueryParam.STUDY.key(), studyId);
// query.put(VariantQueryParam.SAMPLE.key(), samplePosition.keySet().toArray()[0]);
query.put(VariantQueryParam.FILE.key(), fileId);
query.put(VariantQueryParam.FILTER.key(), "PASS");
query.put(VariantQueryParam.ANNOT_CLINICAL_SIGNIFICANCE.key(), "benign");
VariantQueryResult<Variant> results = variantSearchManager.query(collection, query,
new QueryOptions(QueryOptions.LIMIT, limit));
if (results.getResults().size() > 0) {
System.out.println(results.getResults().get(0).toJson());
} else {
System.out.println("Not found!!!!");
}
}
@Test
public void testGeneFacet() throws Exception {
int limit = 500;
VariantStorageMetadataManager scm = variantStorageEngine.getMetadataManager();
solr.configure(variantStorageEngine);
VariantSearchManager variantSearchManager = variantStorageEngine.getVariantSearchManager();
System.out.println(smallInputUri.getPath());
List<Variant> variants = getVariants(limit);
List<Variant> annotatedVariants = annotatedVariants(variants);
metadataManager.createStudy("s1");
String collection = solr.coreName;
variantSearchManager.create(collection);
variantSearchManager.insert(collection, annotatedVariants);
QueryOptions queryOptions = new QueryOptions();
//String facet = "type[SNV,TOTO]>>biotypes";
String facet = "genes[CDK11A,WDR78,ENSG00000115183,TOTO]>>type[INDEL,DELETION,SNV]";
queryOptions.put(QueryOptions.FACET, facet);
DataResult<FacetField> facetQueryResult = variantSearchManager.facetedQuery(collection, new Query(), queryOptions);
String s = JacksonUtils.getDefaultObjectMapper().writerWithDefaultPrettyPrinter().writeValueAsString(facetQueryResult);
System.out.println(s);
// System.out.println(facetQueryResult.toString());
}
public void regex() throws Exception {
String facet = "genes[G1,G2]>>type[INDEL,SNV];aggr(genes);biotypes";
Map<String, Set<String>> includeMap = new FacetQueryParser().getIncludingValuesMap(facet);
System.out.println(facet);
if (MapUtils.isNotEmpty(includeMap)) {
for (String key : includeMap.keySet()) {
System.out.println("key: " + key);
if (includeMap.containsKey(key) && CollectionUtils.isNotEmpty(includeMap.get(key))) {
for (String value : includeMap.get(key)) {
System.out.println("\t" + value);
}
}
}
}
}
private Map<String, ConsequenceType> getConsequenceTypeMap (Variant variant){
Map<String, ConsequenceType> map = new HashMap<>();
if (variant.getAnnotation() != null && ListUtils.isNotEmpty(variant.getAnnotation().getConsequenceTypes())) {
for (ConsequenceType consequenceType: variant.getAnnotation().getConsequenceTypes()) {
if (StringUtils.isNotEmpty(consequenceType.getEnsemblTranscriptId())) {
map.put(consequenceType.getEnsemblTranscriptId(), consequenceType);
}
}
}
return map;
}
private Score getScore(List<Score> scores, String source) {
if (ListUtils.isNotEmpty(scores) && org.apache.commons.lang3.StringUtils.isNotEmpty(source)) {
for (Score score: scores) {
if (source.equals(score.getSource())) {
return score;
}
}
}
return null;
}
private void checkScore(List<Score> inScores, List<Score> outScores, String source) {
Score inScore = getScore(inScores, source);
Score outScore = getScore(outScores, source);
if (inScore != null && outScore != null) {
double inValue = inScore.getScore() == null ? 0 : inScore.getScore();
double outValue = outScore.getScore() == null ? 0 : outScore.getScore();
String inDescription = inScore.getDescription() == null ? "" : inScore.getDescription();
String outDescription = outScore.getDescription() == null ? "" : outScore.getDescription();
System.out.println(source + ": " + inValue + " vs " + outValue
+ " ; " + inDescription + " vs " + outDescription);
} else if (inScore != null || outScore != null) {
fail("Mismatchtch " + source + " values");
}
}
private List<Variant> getVariants(int limit) throws Exception {
VariantVcfHtsjdkReader reader = variantReaderUtils.getVariantVcfReader(Paths.get(smallInputUri.getPath()), null);
reader.open();
reader.pre();
VCFHeader vcfHeader = reader.getVCFHeader();
List<Variant> variants = reader.read(limit);
reader.post();
reader.close();
return variants;
}
private List<Variant> annotatedVariants(List<Variant> variants) throws IOException {
return annotatedVariants(variants, "");
}
private List<Variant> annotatedVariants(List<Variant> variants, String studyId) throws IOException {
CellBaseClient cellBaseClient = new CellBaseClient(variantStorageEngine.getConfiguration().getCellbase().toClientConfiguration());
QueryResponse<VariantAnnotation> queryResponse = cellBaseClient.getVariantClient().getAnnotationByVariantIds(variants.stream().map(Variant::toString).collect(Collectors.toList()), QueryOptions.empty());
// Set annotations
for (int i = 0; i < variants.size(); i++) {
variants.get(i).setAnnotation(queryResponse.getResponse().get(i).first());
variants.get(i).getStudies().get(0).setStudyId(studyId);
}
return variants;
}
}
| |
package synergynet3.tracking.applications;
import java.lang.management.ManagementFactory;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.UUID;
import multiplicity3.appsystem.IQueueOwner;
import multiplicity3.config.display.DisplayPrefsItem;
import multiplicity3.csys.factory.ContentTypeNotBoundException;
import multiplicity3.csys.items.border.IRoundedBorder;
import multiplicity3.input.IMultiTouchEventListener;
import multiplicity3.input.MultiTouchInputComponent;
import multiplicity3.input.events.MultiTouchCursorEvent;
import multiplicity3.input.events.MultiTouchObjectEvent;
import synergynet3.SynergyNetApp;
import synergynet3.cluster.SynergyNetCluster;
import synergynet3.positioning.SynergyNetPositioning;
import synergynet3.tracking.network.TrackedAppSync;
import synergynet3.tracking.network.core.TrackingControlComms;
import synergynet3.tracking.network.core.TrackingDeviceControl;
import synergynet3.tracking.network.shared.CombinedUserEntity;
import synergynet3.tracking.network.shared.PointDirection;
import synergynet3.tracking.network.shared.UserColourUtils;
import synergynet3.tracking.network.shared.UserLocation;
import synergynet3.tracking.network.shared.UserLocation.USERSTATE;
import com.jme3.collision.CollisionResults;
import com.jme3.math.ColorRGBA;
import com.jme3.math.Ray;
import com.jme3.math.Vector2f;
import com.jme3.math.Vector3f;
import com.jme3.scene.Geometry;
import com.jme3.scene.Spatial;
import com.jme3.scene.shape.Box;
/**
* The Class TrackedApp.
*/
abstract public class TrackedApp extends SynergyNetApp implements IMultiTouchEventListener
{
/** The body threshold distance. */
public static float BODY_THRESHOLD_DISTANCE = 1;
/** The hand threshold distance. */
public static float HAND_THRESHOLD_DISTANCE = 0.5f;
/** The table height. */
public static float TABLE_HEIGHT = 1f;
/** The table orientation. */
public static float TABLE_LOCATION_X, TABLE_LOCATION_Y, TABLE_ORIENTATION = 0;
/** The Constant SLEEP_TIME. */
private static final int SLEEP_TIME = 2000;
/** The tracking table identity. */
private static String trackingTableIdentity;
/** The gesture mode. */
private boolean gestureMode = false;
/** The individual mode. */
private boolean individualMode = false;
/** The is selected. */
private boolean isSelected = false;
/** The selection action made recently. */
private boolean selectionActionMadeRecently = false;
/** The selection action made timer. */
private Runnable selectionActionMadeTimer = new Runnable()
{
@Override
public void run()
{
try
{
Thread.sleep(SLEEP_TIME);
selectionActionMadeRecently = false;
}
catch (InterruptedException ie)
{
}
}
};
/** The selection border. */
private IRoundedBorder selectionBorder;
/** The table rep. */
private Spatial tableRep;
/** The tracking sync. */
private TrackedAppSync trackingSync;
/** The display height. */
protected int displayHeight = 768;
/** The display width. */
protected int displayWidth = 1024;
/** The input. */
protected MultiTouchInputComponent input;
/** The touches. */
protected HashMap<Long, Integer> touches = new HashMap<Long, Integer>();
/** The user locations. */
protected ArrayList<CombinedUserEntity> userLocations = new ArrayList<CombinedUserEntity>();
/**
* Gets the real value.
*
* @param value
* the value
* @return the real value
*/
public static float getRealValue(float value)
{
DisplayPrefsItem displayPrefs = new DisplayPrefsItem();
return value * (displayPrefs.getRealWidth() / displayPrefs.getWidth());
}
/**
* Initialise tracking app args.
*
* @param args
* the args
*/
public static void initialiseTrackingAppArgs(String[] args)
{
try
{
BODY_THRESHOLD_DISTANCE = Float.parseFloat(ManagementFactory.getRuntimeMXBean().getSystemProperties().get("bodythreshold"));
}
catch (Exception e)
{
System.out.println("No body threshold argument given, using default.");
}
BODY_THRESHOLD_DISTANCE = SynergyNetPositioning.getPixelValue(BODY_THRESHOLD_DISTANCE);
System.out.println("Body Threshold: " + BODY_THRESHOLD_DISTANCE);
try
{
HAND_THRESHOLD_DISTANCE = Float.parseFloat(ManagementFactory.getRuntimeMXBean().getSystemProperties().get("handthreshold"));
}
catch (Exception e)
{
System.out.println("No hand threshold argument given, using default.");
}
HAND_THRESHOLD_DISTANCE = SynergyNetPositioning.getPixelValue(HAND_THRESHOLD_DISTANCE);
System.out.println("Hand Threshold: " + HAND_THRESHOLD_DISTANCE);
}
/*
* (non-Javadoc)
* @see
* multiplicity3.input.IMultiTouchEventListener#cursorChanged(multiplicity3
* .input.events.MultiTouchCursorEvent)
*/
@Override
public void cursorChanged(MultiTouchCursorEvent event)
{
}
/*
* (non-Javadoc)
* @see
* multiplicity3.input.IMultiTouchEventListener#cursorClicked(multiplicity3
* .input.events.MultiTouchCursorEvent)
*/
@Override
public void cursorClicked(MultiTouchCursorEvent event)
{
}
/*
* (non-Javadoc)
* @see
* multiplicity3.input.IMultiTouchEventListener#cursorPressed(multiplicity3
* .input.events.MultiTouchCursorEvent)
*/
@Override
public void cursorPressed(MultiTouchCursorEvent event)
{
CombinedUserEntity userToStore = null;
float distance = -1;
for (CombinedUserEntity user : userLocations)
{
UserLocation userLocation = user.getUserLocation();
float[] userLocs = userLocation.getLocationOfUser();
Vector3f touchLoc = new Vector3f(event.getPosition().x * displayWidth, event.getPosition().y * displayHeight, TABLE_HEIGHT);
if (userLocation.getUserState() == USERSTATE.BODY)
{
Vector2f userLoc = new Vector2f(userLocs[0], userLocs[1]);
float distanceTemp = userLoc.distance(new Vector2f(touchLoc.x, touchLoc.y));
if (distanceTemp < BODY_THRESHOLD_DISTANCE)
{
if ((distance == -1) || (distanceTemp < distance))
{
distance = distanceTemp;
userToStore = user;
}
}
}
else if (userLocation.getUserState() == USERSTATE.ONE_HAND)
{
Vector3f userLoc = new Vector3f(userLocs[0], userLocs[1], userLocs[2]);
float distanceTemp = userLoc.distance(touchLoc);
if (distanceTemp < HAND_THRESHOLD_DISTANCE)
{
if ((distance == -1) || (distanceTemp < distance))
{
distance = distanceTemp;
userToStore = user;
}
}
}
else if (userLocation.getUserState() == USERSTATE.TWO_HANDS)
{
Vector3f handOneLoc = new Vector3f(userLocs[0], userLocs[1], userLocs[2]);
Vector3f handTwoLoc = new Vector3f(userLocs[3], userLocs[4], userLocs[5]);
float distanceTemp = handOneLoc.distance(touchLoc);
float distanceTempTwo = handTwoLoc.distance(touchLoc);
if (distanceTempTwo < distanceTemp)
{
distanceTemp = distanceTempTwo;
}
if (distanceTemp < HAND_THRESHOLD_DISTANCE)
{
if ((distance == -1) || (distanceTemp < distance))
{
distance = distanceTemp;
userToStore = user;
}
}
}
}
boolean isTeacher = false;
if (userToStore != null)
{
isTeacher = userToStore.isTeacher();
if (gestureMode && isTeacher)
{
if (individualMode)
{
if (isSelected)
{
deselected();
}
else
{
selected();
}
}
else
{
gestureIndividualModeEnabled();
selected();
}
}
touches.put(event.getCursorID(), userToStore.getUniqueID());
}
}
/*
* (non-Javadoc)
* @see
* multiplicity3.input.IMultiTouchEventListener#cursorReleased(multiplicity3
* .input.events.MultiTouchCursorEvent)
*/
@Override
public void cursorReleased(MultiTouchCursorEvent event)
{
if (touches.containsKey(event.getCursorID()))
{
touches.remove(event.getCursorID());
}
}
/**
* Gesturea all mode enabled.
*/
public void gestureaAllModeEnabled()
{
gestureMode = true;
isSelected = true;
individualMode = false;
showSelectionBorder(new ColorRGBA(0.5f, 0.5f, 0.5f, 0.5f));
}
/**
* Gesture individual mode enabled.
*/
public void gestureIndividualModeEnabled()
{
if (!individualMode)
{
gestureMode = true;
individualMode = true;
hideSelectionBorder();
}
}
/**
* Gesture mode disabled.
*/
public void gestureModeDisabled()
{
gestureMode = false;
isSelected = false;
individualMode = false;
hideSelectionBorder();
}
/**
* Localise vector.
*
* @param vec
* the vec
* @return the vector2f
*/
public Vector2f localiseVector(Vector2f vec)
{
vec.setX(vec.x - TABLE_LOCATION_X);
vec.setY(vec.y - TABLE_LOCATION_Y);
vec.setX(SynergyNetPositioning.getPixelValue(vec.x));
vec.setY(SynergyNetPositioning.getPixelValue(vec.y));
vec.setX(vec.x - (displayWidth / 2));
vec.setY(vec.y - (displayHeight / 2));
vec.rotateAroundOrigin(-TABLE_ORIENTATION, true);
vec.setX(vec.x + (displayWidth / 2));
vec.setY(vec.y + (displayHeight / 2));
return vec;
}
/*
* (non-Javadoc)
* @see
* multiplicity3.input.IMultiTouchEventListener#objectAdded(multiplicity3
* .input.events.MultiTouchObjectEvent)
*/
@Override
public void objectAdded(MultiTouchObjectEvent event)
{
}
/*
* (non-Javadoc)
* @see
* multiplicity3.input.IMultiTouchEventListener#objectChanged(multiplicity3
* .input.events.MultiTouchObjectEvent)
*/
@Override
public void objectChanged(MultiTouchObjectEvent event)
{
}
/*
* (non-Javadoc)
* @see
* multiplicity3.input.IMultiTouchEventListener#objectRemoved(multiplicity3
* .input.events.MultiTouchObjectEvent)
*/
@Override
public void objectRemoved(MultiTouchObjectEvent event)
{
}
/*
* (non-Javadoc)
* @see synergynet3.SynergyNetApp#onDestroy()
*/
@Override
public void onDestroy()
{
if (trackingSync != null)
{
trackingSync.stop();
}
super.onDestroy();
}
/**
* Sets the user locations.
*
* @param remoteUserLocations
* the new user locations
*/
public void setUserLocations(ArrayList<CombinedUserEntity> remoteUserLocations)
{
userLocations = new ArrayList<CombinedUserEntity>();
for (CombinedUserEntity user : remoteUserLocations)
{
UserLocation userLocation = localiseUserLocation(user.getUserLocation());
user.setUserLocation(userLocation);
userLocations.add(user);
}
}
/*
* (non-Javadoc)
* @see synergynet3.SynergyNetApp#shouldStart(multiplicity3.input.
* MultiTouchInputComponent, multiplicity3.appsystem.IQueueOwner)
*/
@Override
public void shouldStart(MultiTouchInputComponent input, IQueueOwner iqo)
{
super.shouldStart(input, iqo);
this.input = input;
displayWidth = (int) (stage.getWorldLocation().x * 2);
displayHeight = (int) (stage.getWorldLocation().y * 2);
trackingTableIdentity = SynergyNetCluster.get().getIdentity();
TrackingDeviceControl trackingDeviceController = new TrackingDeviceControl(trackingTableIdentity);
trackingSync = new TrackedAppSync(trackingDeviceController, this);
TABLE_LOCATION_X = localDevicePosition.getXinMetres();
TABLE_LOCATION_Y = localDevicePosition.getYinMetres();
TABLE_ORIENTATION = localDevicePosition.getOrientation();
TABLE_HEIGHT = SynergyNetPositioning.getPixelValue(localDevicePosition.getInterfaceHeightFromFloorinMetres());
Box box = new Box(new Vector3f(), displayWidth / 2, displayHeight / 2, 100);
tableRep = new Geometry("localTable", box);
tableRep.setLocalTranslation(displayWidth / 2, displayHeight / 2, TABLE_HEIGHT);
}
/**
* User pointing.
*
* @param pointDirection
* the point direction
*/
public void userPointing(PointDirection pointDirection)
{
if (!selectionActionMadeRecently)
{
Vector3f startPoint = generateLocalised3dVector(pointDirection.getStartPoint());
Vector3f endPoint = generateLocalised3dVector(pointDirection.getEndPoint());
startPoint.setX(startPoint.getX() - (displayWidth / 2));
startPoint.setY(startPoint.getY() - (displayHeight / 2));
endPoint.setX(endPoint.getX() - (displayWidth / 2));
endPoint.setY(endPoint.getY() - (displayHeight / 2));
Vector3f direction = generatorVectorBetween(startPoint, endPoint).normalize();
Ray ray = new Ray(endPoint, direction);
CollisionResults results = new CollisionResults();
ray.collideWith(tableRep.getWorldBound(), results);
if (results.size() > 0)
{
if (gestureMode)
{
if (individualMode)
{
if (isSelected)
{
deselected();
}
else
{
selected();
}
}
else
{
gestureIndividualModeEnabled();
selected();
}
}
}
}
}
/**
* Deselected.
*/
private void deselected()
{
if (!selectionActionMadeRecently)
{
isSelected = false;
hideSelectionBorder();
TrackingControlComms.get().announceDeSelection(trackingTableIdentity);
selectionActionMade();
}
}
/**
* Generate localised3d vector.
*
* @param vectorRep
* the vector rep
* @return the vector3f
*/
private Vector3f generateLocalised3dVector(float[] vectorRep)
{
Vector2f originTemp = new Vector2f(vectorRep[0], vectorRep[1]);
originTemp = localiseVector(originTemp);
return new Vector3f(originTemp.x, originTemp.y, SynergyNetPositioning.getPixelValue(vectorRep[2]));
}
/**
* Generator vector between.
*
* @param from
* the from
* @param to
* the to
* @return the vector3f
*/
private Vector3f generatorVectorBetween(Vector3f from, Vector3f to)
{
return new Vector3f(to.getX() - from.getX(), to.getY() - from.getY(), to.getZ() - from.getZ());
}
/**
* Hide selection border.
*/
private void hideSelectionBorder()
{
if (selectionBorder != null)
{
stage.removeItem(selectionBorder);
selectionBorder = null;
}
}
/**
* Localise user location.
*
* @param userLocation
* the user location
* @return the user location
*/
private UserLocation localiseUserLocation(UserLocation userLocation)
{
if (userLocation.getUserState() == USERSTATE.BODY)
{
Vector2f bodyLoc = new Vector2f(userLocation.getLocationOfUser()[0], userLocation.getLocationOfUser()[1]);
bodyLoc = localiseVector(bodyLoc);
userLocation.setUserBodyLocation(bodyLoc.x, bodyLoc.y);
}
else if (userLocation.getUserState() == USERSTATE.ONE_HAND)
{
Vector2f handLoc = new Vector2f(userLocation.getLocationOfUser()[0], userLocation.getLocationOfUser()[1]);
handLoc = localiseVector(handLoc);
userLocation.setSingleUserHandLocation(handLoc.x, handLoc.y, SynergyNetPositioning.getPixelValue(userLocation.getLocationOfUser()[2]));
}
else if (userLocation.getUserState() == USERSTATE.TWO_HANDS)
{
Vector2f handOneLoc = new Vector2f(userLocation.getLocationOfUser()[0], userLocation.getLocationOfUser()[1]);
handOneLoc = localiseVector(handOneLoc);
Vector2f handTwoLoc = new Vector2f(userLocation.getLocationOfUser()[3], userLocation.getLocationOfUser()[4]);
handTwoLoc = localiseVector(handTwoLoc);
userLocation.setBothUserHandLocations(handOneLoc.x, handOneLoc.y, SynergyNetPositioning.getPixelValue(userLocation.getLocationOfUser()[2]), handTwoLoc.x, handTwoLoc.y, SynergyNetPositioning.getPixelValue(userLocation.getLocationOfUser()[5]));
}
return userLocation;
}
/**
* Selected.
*/
private void selected()
{
if (!selectionActionMadeRecently)
{
isSelected = true;
showSelectionBorder(new ColorRGBA(1f, 1f, 1f, 1f));
TrackingControlComms.get().announceSelection(trackingTableIdentity);
selectionActionMade();
}
}
/**
* Selection action made.
*/
private void selectionActionMade()
{
if (!selectionActionMadeRecently)
{
selectionActionMadeRecently = true;
new Thread(selectionActionMadeTimer).start();
}
}
/**
* Show selection border.
*
* @param colour
* the colour
*/
private void showSelectionBorder(ColorRGBA colour)
{
if (selectionBorder != null)
{
hideSelectionBorder();
}
try
{
selectionBorder = stage.getContentFactory().create(IRoundedBorder.class, "selectionBorder", UUID.randomUUID());
selectionBorder.setBorderWidth(30f);
selectionBorder.setSize(displayWidth - 30, displayHeight - 30);
selectionBorder.setColor(colour);
stage.addItem(selectionBorder);
selectionBorder.setInteractionEnabled(false);
}
catch (ContentTypeNotBoundException e)
{
e.printStackTrace();
}
}
/**
* Gets the user.
*
* @param userID
* the user id
* @return the user
*/
protected CombinedUserEntity getUser(int userID)
{
for (CombinedUserEntity user : userLocations)
{
if (user.getUniqueID() == userID)
{
return user;
}
}
return null;
}
/**
* Gets the user colour.
*
* @param userID
* the user id
* @return the user colour
*/
protected ColorRGBA getUserColour(int userID)
{
return UserColourUtils.getRGBAColour(userID);
}
/**
* Checks if is teacher.
*
* @param userID
* the user id
* @return true, if is teacher
*/
protected boolean isTeacher(int userID)
{
return getUser(userID).isTeacher();
}
}
| |
/*
* Copyright (c) 2010, 2011, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
*******************************************************************************
* Copyright (C) 2009-2010, International Business Machines Corporation and *
* others. All Rights Reserved. *
*******************************************************************************
*/
package sun.util.locale;
public final class BaseLocale {
public static final String SEP = "_";
private static final Cache CACHE = new Cache();
private final String language;
private final String script;
private final String region;
private final String variant;
private volatile int hash = 0;
// This method must be called only when creating the Locale.* constants.
private BaseLocale(String language, String region) {
this.language = language;
this.script = "";
this.region = region;
this.variant = "";
}
private BaseLocale(String language, String script, String region, String variant) {
this.language = (language != null) ? LocaleUtils.toLowerString(language).intern() : "";
this.script = (script != null) ? LocaleUtils.toTitleString(script).intern() : "";
this.region = (region != null) ? LocaleUtils.toUpperString(region).intern() : "";
this.variant = (variant != null) ? variant.intern() : "";
}
// Called for creating the Locale.* constants. No argument
// validation is performed.
public static BaseLocale createInstance(String language, String region) {
BaseLocale base = new BaseLocale(language, region);
CACHE.put(new Key(language, region), base);
return base;
}
public static BaseLocale getInstance(String language, String script,
String region, String variant) {
// JDK uses deprecated ISO639.1 language codes for he, yi and id
if (language != null) {
if (LocaleUtils.caseIgnoreMatch(language, "he")) {
language = "iw";
} else if (LocaleUtils.caseIgnoreMatch(language, "yi")) {
language = "ji";
} else if (LocaleUtils.caseIgnoreMatch(language, "id")) {
language = "in";
}
}
Key key = new Key(language, script, region, variant);
BaseLocale baseLocale = CACHE.get(key);
return baseLocale;
}
public String getLanguage() {
return language;
}
public String getScript() {
return script;
}
public String getRegion() {
return region;
}
public String getVariant() {
return variant;
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!(obj instanceof BaseLocale)) {
return false;
}
BaseLocale other = (BaseLocale)obj;
return language == other.language
&& script == other.script
&& region == other.region
&& variant == other.variant;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
if (language.length() > 0) {
buf.append("language=");
buf.append(language);
}
if (script.length() > 0) {
if (buf.length() > 0) {
buf.append(", ");
}
buf.append("script=");
buf.append(script);
}
if (region.length() > 0) {
if (buf.length() > 0) {
buf.append(", ");
}
buf.append("region=");
buf.append(region);
}
if (variant.length() > 0) {
if (buf.length() > 0) {
buf.append(", ");
}
buf.append("variant=");
buf.append(variant);
}
return buf.toString();
}
@Override
public int hashCode() {
int h = hash;
if (h == 0) {
// Generating a hash value from language, script, region and variant
h = language.hashCode();
h = 31 * h + script.hashCode();
h = 31 * h + region.hashCode();
h = 31 * h + variant.hashCode();
hash = h;
}
return h;
}
private static final class Key implements Comparable<Key> {
private final String lang;
private final String scrt;
private final String regn;
private final String vart;
private final boolean normalized;
private final int hash;
/**
* Creates a Key. language and region must be normalized
* (intern'ed in the proper case).
*/
private Key(String language, String region) {
assert language.intern() == language
&& region.intern() == region;
lang = language;
scrt = "";
regn = region;
vart = "";
this.normalized = true;
int h = language.hashCode();
if (region != "") {
int len = region.length();
for (int i = 0; i < len; i++) {
h = 31 * h + LocaleUtils.toLower(region.charAt(i));
}
}
hash = h;
}
public Key(String language, String script, String region, String variant) {
this(language, script, region, variant, false);
}
private Key(String language, String script, String region,
String variant, boolean normalized) {
int h = 0;
if (language != null) {
lang = language;
int len = language.length();
for (int i = 0; i < len; i++) {
h = 31*h + LocaleUtils.toLower(language.charAt(i));
}
} else {
lang = "";
}
if (script != null) {
scrt = script;
int len = script.length();
for (int i = 0; i < len; i++) {
h = 31*h + LocaleUtils.toLower(script.charAt(i));
}
} else {
scrt = "";
}
if (region != null) {
regn = region;
int len = region.length();
for (int i = 0; i < len; i++) {
h = 31*h + LocaleUtils.toLower(region.charAt(i));
}
} else {
regn = "";
}
if (variant != null) {
vart = variant;
int len = variant.length();
for (int i = 0; i < len; i++) {
h = 31*h + variant.charAt(i);
}
} else {
vart = "";
}
hash = h;
this.normalized = normalized;
}
@Override
public boolean equals(Object obj) {
return (this == obj) ||
(obj instanceof Key)
&& this.hash == ((Key)obj).hash
&& LocaleUtils.caseIgnoreMatch(((Key)obj).lang, this.lang)
&& LocaleUtils.caseIgnoreMatch(((Key)obj).scrt, this.scrt)
&& LocaleUtils.caseIgnoreMatch(((Key)obj).regn, this.regn)
&& ((Key)obj).vart.equals(vart); // variant is case sensitive in JDK!
}
@Override
public int compareTo(Key other) {
int res = LocaleUtils.caseIgnoreCompare(this.lang, other.lang);
if (res == 0) {
res = LocaleUtils.caseIgnoreCompare(this.scrt, other.scrt);
if (res == 0) {
res = LocaleUtils.caseIgnoreCompare(this.regn, other.regn);
if (res == 0) {
res = this.vart.compareTo(other.vart);
}
}
}
return res;
}
@Override
public int hashCode() {
return hash;
}
public static Key normalize(Key key) {
if (key.normalized) {
return key;
}
String lang = LocaleUtils.toLowerString(key.lang).intern();
String scrt = LocaleUtils.toTitleString(key.scrt).intern();
String regn = LocaleUtils.toUpperString(key.regn).intern();
String vart = key.vart.intern(); // preserve upper/lower cases
return new Key(lang, scrt, regn, vart, true);
}
}
private static class Cache extends LocaleObjectCache<Key, BaseLocale> {
public Cache() {
}
@Override
protected Key normalizeKey(Key key) {
return Key.normalize(key);
}
@Override
protected BaseLocale createObject(Key key) {
return new BaseLocale(key.lang, key.scrt, key.regn, key.vart);
}
}
}
| |
/* Generic definitions */
/* Assertions (useful to generate conditional code) */
/* Current type and class (and size, if applicable) */
/* Value methods */
/* Interfaces (keys) */
/* Interfaces (values) */
/* Abstract implementations (keys) */
/* Abstract implementations (values) */
/* Static containers (keys) */
/* Static containers (values) */
/* Implementations */
/* Synchronized wrappers */
/* Unmodifiable wrappers */
/* Other wrappers */
/* Methods (keys) */
/* Methods (values) */
/* Methods (keys/values) */
/* Methods that have special names depending on keys (but the special names depend on values) */
/* Equality */
/* Object/Reference-only definitions (keys) */
/* Object/Reference-only definitions (values) */
/*
* Copyright (C) 2002-2013 Sebastiano Vigna
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.unimi.dsi.fastutil.objects;
import it.unimi.dsi.fastutil.BigArrays;
import it.unimi.dsi.fastutil.Hash;
import it.unimi.dsi.fastutil.Size64;
import it.unimi.dsi.fastutil.HashCommon;
import it.unimi.dsi.fastutil.booleans.BooleanBigArrays;
import static it.unimi.dsi.fastutil.HashCommon.bigArraySize;
import static it.unimi.dsi.fastutil.HashCommon.maxFill;
import java.util.Collection;
import java.util.NoSuchElementException;
/** A type-specific hash big set with with a fast, small-footprint implementation.
*
* <P>Instances of this class use a hash table to represent a big set: the number
* of elements in the set is limited only by the amount of core memory. The table is
* backed by a {@linkplain it.unimi.dsi.fastutil.BigArrays big array} and is
* enlarged as needed by doubling its size when new entries are created, but it is <em>never</em> made
* smaller (even on a {@link #clear()}). A family of {@linkplain #trim(long) trimming
* method} lets you control the size of the table; this is particularly useful
* if you reuse instances of this class.
*
* <p>The methods of this class are about 30% slower than those of the corresponding non-big set.
*
* @see Hash
* @see HashCommon
*/
public class ObjectOpenHashBigSet <K> extends AbstractObjectSet <K> implements java.io.Serializable, Cloneable, Hash, Size64 {
private static final long serialVersionUID = 0L;
private static final boolean ASSERTS = false;
/** The big array of keys. */
protected transient K[][] key;
/** The big array telling whether a position is used. */
protected transient boolean[][] used;
/** The acceptable load factor. */
protected final float f;
/** The current table size (always a power of 2). */
protected transient long n;
/** Threshold after which we rehash. It must be the table size times {@link #f}. */
protected transient long maxFill;
/** The mask for wrapping a position counter. */
protected transient long mask;
/** The mask for wrapping a segment counter. */
protected transient int segmentMask;
/** The mask for wrapping a base counter. */
protected transient int baseMask;
/** Number of entries in the set. */
protected long size;
/** Initialises the mask values. */
private void initMasks() {
mask = n - 1;
/* Note that either we have more than one segment, and in this case all segments
* are BigArrays.SEGMENT_SIZE long, or we have exactly one segment whose length
* is a power of two. */
segmentMask = key[ 0 ].length - 1;
baseMask = key.length - 1;
}
/** Creates a new hash big set.
*
* <p>The actual table size will be the least power of two greater than <code>expected</code>/<code>f</code>.
*
* @param expected the expected number of elements in the set.
* @param f the load factor.
*/
@SuppressWarnings("unchecked")
public ObjectOpenHashBigSet( final long expected, final float f ) {
if ( f <= 0 || f > 1 ) throw new IllegalArgumentException( "Load factor must be greater than 0 and smaller than or equal to 1" );
if ( n < 0 ) throw new IllegalArgumentException( "The expected number of elements must be nonnegative" );
this.f = f;
n = bigArraySize( expected, f );
maxFill = maxFill( n, f );
key = (K[][]) ObjectBigArrays.newBigArray( n );
used = BooleanBigArrays.newBigArray( n );
initMasks();
}
/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*
* @param expected the expected number of elements in the hash big set.
*/
public ObjectOpenHashBigSet( final long expected ) {
this( expected, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash big set with initial expected {@link Hash#DEFAULT_INITIAL_SIZE} elements
* and {@link Hash#DEFAULT_LOAD_FACTOR} as load factor.
*/
public ObjectOpenHashBigSet() {
this( DEFAULT_INITIAL_SIZE, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash big set copying a given collection.
*
* @param c a {@link Collection} to be copied into the new hash big set.
* @param f the load factor.
*/
public ObjectOpenHashBigSet( final Collection<? extends K> c, final float f ) {
this( c.size(), f );
addAll( c );
}
/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* copying a given collection.
*
* @param c a {@link Collection} to be copied into the new hash big set.
*/
public ObjectOpenHashBigSet( final Collection<? extends K> c ) {
this( c, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash big set copying a given type-specific collection.
*
* @param c a type-specific collection to be copied into the new hash big set.
* @param f the load factor.
*/
public ObjectOpenHashBigSet( final ObjectCollection <? extends K> c, final float f ) {
this( c.size(), f );
addAll( c );
}
/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* copying a given type-specific collection.
*
* @param c a type-specific collection to be copied into the new hash big set.
*/
public ObjectOpenHashBigSet( final ObjectCollection <? extends K> c ) {
this( c, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash big set using elements provided by a type-specific iterator.
*
* @param i a type-specific iterator whose elements will fill the new hash big set.
* @param f the load factor.
*/
public ObjectOpenHashBigSet( final ObjectIterator <K> i, final float f ) {
this( DEFAULT_INITIAL_SIZE, f );
while( i.hasNext() ) add( i.next() );
}
/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor using elements provided by a type-specific iterator.
*
* @param i a type-specific iterator whose elements will fill the new hash big set.
*/
public ObjectOpenHashBigSet( final ObjectIterator <K> i ) {
this( i, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash big set and fills it with the elements of a given array.
*
* @param a an array whose elements will be used to fill the new hash big set.
* @param offset the first element to use.
* @param length the number of elements to use.
* @param f the load factor.
*/
public ObjectOpenHashBigSet( final K[] a, final int offset, final int length, final float f ) {
this( length < 0 ? 0 : length, f );
ObjectArrays.ensureOffsetLength( a, offset, length );
for( int i = 0; i < length; i++ ) add( a[ offset + i ] );
}
/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor and fills it with the elements of a given array.
*
* @param a an array whose elements will be used to fill the new hash big set.
* @param offset the first element to use.
* @param length the number of elements to use.
*/
public ObjectOpenHashBigSet( final K[] a, final int offset, final int length ) {
this( a, offset, length, DEFAULT_LOAD_FACTOR );
}
/** Creates a new hash big set copying the elements of an array.
*
* @param a an array to be copied into the new hash big set.
* @param f the load factor.
*/
public ObjectOpenHashBigSet( final K[] a, final float f ) {
this( a, 0, a.length, f );
}
/** Creates a new hash big set with {@link Hash#DEFAULT_LOAD_FACTOR} as load factor
* copying the elements of an array.
*
* @param a an array to be copied into the new hash big set.
*/
public ObjectOpenHashBigSet( final K[] a ) {
this( a, DEFAULT_LOAD_FACTOR );
}
public boolean add( final K k ) {
final long h = ( (k) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(k).hashCode() ) );
// The starting point.
int displ = (int)( h & segmentMask );
int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
// There's always an unused entry.
while( used[ base ][ displ ] ) {
if ( ( (key[ base ][ displ ]) == null ? (k) == null : (key[ base ][ displ ]).equals(k) ) ) return false;
base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
}
used[ base ][ displ ] = true;
key[ base ][ displ ] = k;
if ( ++size >= maxFill ) rehash( 2 * n );
if ( ASSERTS ) checkTable();
return true;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final long shiftKeys( long pos ) {
// Shift entries with the same hash.
long last, slot;
/*
for( int i = 0; i < 10; i++ ) System.err.print( key[ ( t + i ) & mask ] + "(" + (avalanche( (long)KEY2INT( key[ ( t + i ) & mask ] ) ) & mask) + "; " + used[ ( t + i ) & mask ] + ") ");
System.err.println();
*/
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( BooleanBigArrays.get( used, pos ) ) {
slot = ( (ObjectBigArrays.get( key, pos )) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(ObjectBigArrays.get( key, pos )).hashCode() ) ) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! BooleanBigArrays.get( used, pos ) ) break;
ObjectBigArrays.set( key, last, ObjectBigArrays.get( key, pos ) );
}
BooleanBigArrays.set( used, last, false );
ObjectBigArrays.set( key, last, null );
return last;
}
@SuppressWarnings("unchecked")
public boolean remove( final Object k ) {
final long h = ( (k) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(k).hashCode() ) );
// The starting point.
int displ = (int)( h & segmentMask );
int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
// There's always an unused entry.
while( used[ base ][ displ ] ) {
if ( ( (key[ base ][ displ ]) == null ? (k) == null : (key[ base ][ displ ]).equals(k) ) ) {
size--;
shiftKeys( base * (long)BigArrays.SEGMENT_SIZE + displ );
if ( ASSERTS ) checkTable();
return true;
}
base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
}
return false;
}
@SuppressWarnings("unchecked")
public boolean contains( final Object k ) {
final long h = ( (k) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(k).hashCode() ) );
// The starting point.
int displ = (int)( h & segmentMask );
int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
// There's always an unused entry.
while( used[ base ][ displ ] ) {
if ( ( (key[ base ][ displ ]) == null ? (k) == null : (key[ base ][ displ ]).equals(k) ) ) return true;
base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
}
return false;
}
/** Returns the element of this set that is equal to the given key, or <code>null</code>.
* @return the element of this set that is equal to the given key, or <code>null</code>.
*/
public K get( final Object k ) {
final long h = ( (k) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(k).hashCode() ) );
// The starting point.
int displ = (int)( h & segmentMask );
int base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
// There's always an unused entry.
while( used[ base ][ displ ] ) {
if ( ( (key[ base ][ displ ]) == null ? (k) == null : (key[ base ][ displ ]).equals(k) ) ) return key[ base ][ displ ];
base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
}
return null;
}
/* Removes all elements from this set.
*
* <P>To increase object reuse, this method does not change the table size.
* If you want to reduce the table size, you must use {@link #trim(long)}.
*
*/
public void clear() {
if ( size == 0 ) return;
size = 0;
BooleanBigArrays.fill( used, false );
ObjectBigArrays.fill( key, null );
}
/** An iterator over a hash big set. */
private class SetIterator extends AbstractObjectIterator <K> {
/** The base of the next entry to be returned, if positive or zero. If negative, the next entry to be
returned, if any, is that of index -base -2 from the {@link #wrapped} list. */
int base;
/** The displacement of the next entry to be returned. */
int displ;
/** The base of the last entry that has been returned. It is -1 if either
we did not return an entry yet, or the last returned entry has been removed. */
int lastBase;
/** The displacement of the last entry that has been returned. It is undefined if either
we did not return an entry yet, or the last returned entry has been removed. */
int lastDispl;
/** A downward counter measuring how many entries must still be returned. */
long c = size;
/** A lazily allocated list containing elements that have wrapped around the table because of removals; such elements
would not be enumerated (other elements would be usually enumerated twice in their place). */
ObjectArrayList <K> wrapped;
{
base = key.length;
lastBase = -1;
final boolean used[][] = ObjectOpenHashBigSet.this.used;
if ( c != 0 ) do
if ( displ-- == 0 ) {
base--;
displ = (int)mask;
}
while( ! used[ base ][ displ ] );
}
public boolean hasNext() {
return c != 0;
}
public K next() {
if ( ! hasNext() ) throw new NoSuchElementException();
c--;
// We are just enumerating elements from the wrapped list.
if ( base < 0 ) return wrapped.get( - ( lastBase = --base ) - 2 );
final K retVal = key[ lastBase = base ][ lastDispl = displ ];
if ( c != 0 ) {
final boolean used[][] = ObjectOpenHashBigSet.this.used;
do
if ( displ-- == 0 ) {
if ( base-- == 0 ) break;
displ = (int)mask;
}
while( ! used[ base ][ displ ] );
// When here base < 0 there are no more elements to be enumerated by scanning, but wrapped might be nonempty.
}
return retVal;
}
/** Shifts left entries with the specified hash code, starting at the specified position,
* and empties the resulting free entry. If any entry wraps around the table, instantiates
* lazily {@link #wrapped} and stores the entry.
*
* @param pos a starting position.
* @return the position cleared by the shifting process.
*/
protected final long shiftKeys( long pos ) {
// Shift entries with the same hash.
long last, slot;
/*
for( int i = 0; i < 10; i++ ) System.err.print( key[ ( t + i ) & mask ] + "(" + (avalanche( (long)KEY2INT( key[ ( t + i ) & mask ] ) ) & mask) + "; " + used[ ( t + i ) & mask ] + ") ");
System.err.println();
*/
for(;;) {
pos = ( ( last = pos ) + 1 ) & mask;
while( BooleanBigArrays.get( used, pos ) ) {
slot = ( (ObjectBigArrays.get( key, pos )) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(ObjectBigArrays.get( key, pos )).hashCode() ) ) & mask;
if ( last <= pos ? last >= slot || slot > pos : last >= slot && slot > pos ) break;
pos = ( pos + 1 ) & mask;
}
if ( ! BooleanBigArrays.get( used, pos ) ) break;
if ( pos < last ) {
// Wrapped entry.
if ( wrapped == null ) wrapped = new ObjectArrayList <K>();
wrapped.add( ObjectBigArrays.get( key, pos ) );
}
ObjectBigArrays.set( key, last, ObjectBigArrays.get( key, pos ) );
}
BooleanBigArrays.set( used, last, false );
ObjectBigArrays.set( key, last, null );
return last;
}
@SuppressWarnings("unchecked")
public void remove() {
if ( lastBase == -1 ) throw new IllegalStateException();
if ( base < -1 ) {
// We're removing wrapped entries.
ObjectOpenHashBigSet.this.remove( wrapped.set( - base - 2, null ) );
lastBase = -1;
return;
}
size--;
if ( shiftKeys( lastBase * (long)BigArrays.SEGMENT_SIZE + lastDispl ) == base * (long)BigArrays.SEGMENT_SIZE + displ && c > 0 ) {
c++;
next();
}
lastBase = -1; // You can no longer remove this entry.
if ( ASSERTS ) checkTable();
}
}
public ObjectIterator <K> iterator() {
return new SetIterator();
}
/** A no-op for backward compatibility. The kind of tables implemented by
* this class never need rehashing.
*
* <P>If you need to reduce the table size to fit exactly
* this set, use {@link #trim()}.
*
* @return true.
* @see #trim()
* @deprecated A no-op.
*/
@Deprecated
public boolean rehash() {
return true;
}
/** Rehashes this set, making the table as small as possible.
*
* <P>This method rehashes the table to the smallest size satisfying the
* load factor. It can be used when the set will not be changed anymore, so
* to optimize access speed and size.
*
* <P>If the table size is already the minimum possible, this method
* does nothing.
*
* @return true if there was enough memory to trim the set.
* @see #trim(long)
*/
public boolean trim() {
final long l = bigArraySize( size, f );
if ( l >= n ) return true;
try {
rehash( l );
}
catch(OutOfMemoryError cantDoIt) { return false; }
return true;
}
/** Rehashes this set if the table is too large.
*
* <P>Let <var>N</var> be the smallest table size that can hold
* <code>max(n,{@link #size64()})</code> entries, still satisfying the load factor. If the current
* table size is smaller than or equal to <var>N</var>, this method does
* nothing. Otherwise, it rehashes this set in a table of size
* <var>N</var>.
*
* <P>This method is useful when reusing sets. {@linkplain #clear() Clearing a
* set} leaves the table size untouched. If you are reusing a set
* many times, you can call this method with a typical
* size to avoid keeping around a very large table just
* because of a few large transient sets.
*
* @param n the threshold for the trimming.
* @return true if there was enough memory to trim the set.
* @see #trim()
*/
public boolean trim( final long n ) {
final long l = bigArraySize( n, f );
if ( this.n <= l ) return true;
try {
rehash( l );
}
catch( OutOfMemoryError cantDoIt ) { return false; }
return true;
}
/** Resizes the set.
*
* <P>This method implements the basic rehashing strategy, and may be
* overriden by subclasses implementing different rehashing strategies (e.g.,
* disk-based rehashing). However, you should not override this method
* unless you understand the internal workings of this class.
*
* @param newN the new size
*/
@SuppressWarnings("unchecked")
protected void rehash( final long newN ) {
final boolean used[][] = this.used;
final K key[][] = this.key;
final boolean newUsed[][] = BooleanBigArrays.newBigArray( newN );
final K newKey[][] = (K[][]) ObjectBigArrays.newBigArray( newN );
final long newMask = newN - 1;
final int newSegmentMask = newKey[ 0 ].length - 1;
final int newBaseMask = newKey.length - 1;
int base = 0, displ = 0;
long h;
K k;
for( long i = size; i-- != 0; ) {
while( ! used[ base ][ displ ] ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
k = key[ base ][ displ ];
h = ( (k) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(k).hashCode() ) );
// The starting point.
int d = (int)( h & newSegmentMask );
int b = (int)( ( h & newMask ) >>> BigArrays.SEGMENT_SHIFT );
while( newUsed[ b ][ d ] ) b = ( b + ( ( d = ( d + 1 ) & newSegmentMask ) == 0 ? 1 : 0 ) ) & newBaseMask;
newUsed[ b ][ d ] = true;
newKey[ b ][ d ] = k;
base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
}
this.n = newN;
this.key = newKey;
this.used = newUsed;
initMasks();
maxFill = maxFill( n, f );
}
@Deprecated
public int size() {
return (int)Math.min( Integer.MAX_VALUE, size );
}
public long size64() {
return size;
}
public boolean isEmpty() {
return size == 0;
}
/** Returns a deep copy of this big set.
*
* <P>This method performs a deep copy of this big hash set; the data stored in the
* set, however, is not cloned. Note that this makes a difference only for object keys.
*
* @return a deep copy of this big set.
*/
@SuppressWarnings("unchecked")
public ObjectOpenHashBigSet <K> clone() {
ObjectOpenHashBigSet <K> c;
try {
c = (ObjectOpenHashBigSet <K>)super.clone();
}
catch(CloneNotSupportedException cantHappen) {
throw new InternalError();
}
c.key = ObjectBigArrays.copy( key );
c.used = BooleanBigArrays.copy( used );
return c;
}
/** Returns a hash code for this set.
*
* This method overrides the generic method provided by the superclass.
* Since <code>equals()</code> is not overriden, it is important
* that the value returned by this method is the same value as
* the one returned by the overriden method.
*
* @return a hash code for this set.
*/
public int hashCode() {
final boolean used[][] = this.used;
final K key[][] = this.key;
int h = 0;
int base = 0, displ = 0;
for( long j = size; j-- != 0; ) {
while( ! used[ base ][ displ ] ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
if ( this != key[ base ][ displ ] )
h += ( (key[ base ][ displ ]) == null ? 0 : (key[ base ][ displ ]).hashCode() );
base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) );
}
return h;
}
private void writeObject(java.io.ObjectOutputStream s) throws java.io.IOException {
final ObjectIterator <K> i = iterator();
s.defaultWriteObject();
for( long j = size; j-- != 0; ) s.writeObject( i.next() );
}
@SuppressWarnings("unchecked")
private void readObject(java.io.ObjectInputStream s) throws java.io.IOException, ClassNotFoundException {
s.defaultReadObject();
n = bigArraySize( size, f );
maxFill = maxFill( n, f );
final K[][] key = this.key = (K[][]) ObjectBigArrays.newBigArray( n );
final boolean used[][] = this.used = BooleanBigArrays.newBigArray( n );
initMasks();
long h;
K k;
int base, displ;
for( long i = size; i-- != 0; ) {
k = (K) s.readObject();
h = ( (k) == null ? 0x810879608e4259ccL : it.unimi.dsi.fastutil.HashCommon.murmurHash3( (long)(k).hashCode() ) );
base = (int)( ( h & mask ) >>> BigArrays.SEGMENT_SHIFT );
displ = (int)( h & segmentMask );
while( used[ base ][ displ ] ) base = ( base + ( ( displ = ( displ + 1 ) & segmentMask ) == 0 ? 1 : 0 ) ) & baseMask;
used[ base ][ displ ] = true;
key[ base ][ displ ] = k;
}
if ( ASSERTS ) checkTable();
}
private void checkTable() {}
}
| |
/**
* Copyright 2010 Wealthfront Inc. Licensed under the Apache License,
* Version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law
* or agreed to in writing, software distributed under the License is
* distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package com.kaching.platform.converters;
import static com.kaching.platform.converters.Instantiators.createConverter;
import static com.kaching.platform.converters.Instantiators.createInstantiator;
import static java.lang.String.format;
import static java.util.Arrays.asList;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.junit.Test;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.inject.TypeLiteral;
import com.kaching.platform.common.Option;
import com.kaching.platform.converters.someotherpackage.ValueWithConverterAsInnerClass;
public class InstantiatorsTest {
static class ConstructMe1 {}
@Test
public void constructMe1() {
assertNotNull(Instantiators
.createInstantiator(ConstructMe1.class)
.newInstance());
}
static class ConstructMe2 {
private final String name;
ConstructMe2(String name) {
this.name = name;
}
}
@Test
public void constructMe2() {
ConstructMe2 instance = Instantiators
.createInstantiator(ConstructMe2.class)
.newInstance("Jack Bauer");
assertNotNull(instance);
assertEquals("Jack Bauer", instance.name);
}
@Test
public void constructMe2ByName() {
ConstructMe2 instance = Instantiators
.createInstantiator(ConstructMe2.class)
.newInstance(ImmutableMap.of("name", "Jack Bauer"));
assertNotNull(instance);
assertEquals("Jack Bauer", instance.name);
}
static class ConstructMe3 {
private final WrappedString name;
private final ConvertedPair pair;
ConstructMe3(WrappedString name, ConvertedPair pair) {
this.name = name;
this.pair = pair;
}
}
@Test
public void constructMe3() {
Instantiator<ConstructMe3> instantiator = Instantiators
.createInstantiator(ConstructMe3.class);
ConstructMe3 instance = instantiator
.newInstance("Jack Bauer", "First:Last");
assertNotNull(instance);
assertEquals("Jack Bauer", instance.name.content);
assertEquals("First", instance.pair.first);
assertEquals("Last", instance.pair.last);
assertEquals(
asList("Jack Bauer", "First:Last"),
instantiator.fromInstance(instance));
}
@Test
public void constructMe3ByName() {
Instantiator<ConstructMe3> instantiator = Instantiators
.createInstantiator(ConstructMe3.class);
ConstructMe3 instance = instantiator
.newInstance(ImmutableMap.of(
"name", "Jack Bauer",
"pair", "First:Last"));
assertNotNull(instance);
assertEquals("Jack Bauer", instance.name.content);
assertEquals("First", instance.pair.first);
assertEquals("Last", instance.pair.last);
}
static class ConstructMe4Optionality {
private final String name;
ConstructMe4Optionality(@Optional String name) {
this.name = name;
}
}
@Test
public void constructMe4() {
ConstructMe4Optionality instance = Instantiators
.createInstantiator(ConstructMe4Optionality.class)
.newInstance((String) null);
assertNotNull(instance);
assertNull(instance.name);
}
static class ConstructMe5OptionalityWithDefaultValue {
private final Integer number;
ConstructMe5OptionalityWithDefaultValue(
@Optional("90") Integer number) {
this.number = number;
}
}
@Test
public void constructMe5() {
ConstructMe5OptionalityWithDefaultValue instance = Instantiators
.createInstantiator(ConstructMe5OptionalityWithDefaultValue.class)
.newInstance((String) null);
assertNotNull(instance);
assertEquals((Integer) 90, instance.number);
}
static class ConstructMe6OptionalType {
private final Option<String> hero;
private final Option<String> sidekick;
ConstructMe6OptionalType(Option<String> hero, Option<String> sidekick) {
this.hero = hero;
this.sidekick = sidekick;
}
}
@Test
public void constructMe6() {
Instantiator<ConstructMe6OptionalType> instantiator = Instantiators
.createInstantiator(ConstructMe6OptionalType.class);
ConstructMe6OptionalType instance = instantiator
.newInstance((String) null, (String) null);
assertNotNull(instance);
assertTrue(instance.hero.isEmpty());
assertTrue(instance.sidekick.isEmpty());
assertEquals(
asList(null, null),
instantiator.fromInstance(instance));
}
@Test
public void constructMe6WithValue() {
Instantiator<ConstructMe6OptionalType> instantiator = Instantiators
.createInstantiator(ConstructMe6OptionalType.class);
ConstructMe6OptionalType instance = instantiator
.newInstance("Jack Bauer", "Chloe O'Brian");
assertNotNull(instance);
assertEquals("Jack Bauer", instance.hero.getOrThrow());
assertEquals("Chloe O'Brian", instance.sidekick.getOrThrow());
assertEquals(
asList("Jack Bauer", "Chloe O'Brian"),
instantiator.fromInstance(instance));
}
static class ArgumentAreNotSavedToFields {
private final int is;
private final int isToo;
ArgumentAreNotSavedToFields(int is, int isNot, int isToo) {
this.is = is;
this.isToo = isToo;
}
}
@Test
public void argumentAreNotSavedToFields() {
Instantiator<ArgumentAreNotSavedToFields> instantiator = Instantiators
.createInstantiator(ArgumentAreNotSavedToFields.class);
ArgumentAreNotSavedToFields instance = instantiator
.newInstance("1", "2", "3");
assertNotNull(instance);
assertEquals(1, instance.is);
assertEquals(3, instance.isToo);
assertEquals(
asList("1", null, "3"),
instantiator.fromInstance(instance));
}
static class ObjectWithMapOfIntToString {
ObjectWithMapOfIntToString(Map<Integer, String> numbers) {
}
}
@Test(expected = RuntimeException.class)
public void objectWithMapOfIntToStringNoSpecificBindingWillFail() {
Instantiators.createInstantiator(ObjectWithMapOfIntToString.class);
}
static class ObjectWithListOfInt {
final List<Integer> numbers;
ObjectWithListOfInt(List<Integer> numbers) {
this.numbers = numbers;
}
}
@Test
public void objectWithListOfIntUsingInstances() {
Instantiator<ObjectWithListOfInt> instantiator = createInstantiator(
ObjectWithListOfInt.class,
new AbstractInstantiatorModule() {
@Override
protected void configure() {
registerFor(new TypeLiteral<List<Integer>>() {})
.converter(new ListOfIntConverter());
}
});
checkObjectWithListOfInt(instantiator);
}
@Test
public void objectWithListOfIntUsingInstancesAndTypeScheme() {
Instantiator<ObjectWithListOfInt> instantiator = createInstantiator(
ObjectWithListOfInt.class,
new AbstractInstantiatorModule() {
@Override
protected void configure() {
registerFor(new TypeLiteral<List<? extends Integer>>() {})
.converter(new ListOfIntConverter());
}
});
checkObjectWithListOfInt(instantiator);
}
@Test
public void objectWithListOfIntUsingBindings() {
Instantiator<ObjectWithListOfInt> instantiator = createInstantiator(
ObjectWithListOfInt.class,
new AbstractInstantiatorModule() {
@Override
protected void configure() {
registerFor(new TypeLiteral<List<Integer>>() {})
.converter(ListOfIntConverter.class);
}
});
checkObjectWithListOfInt(instantiator);
}
@Test
public void objectWithListOfIntUsingBindingsAndTypeScheme() {
Instantiator<ObjectWithListOfInt> instantiator = createInstantiator(
ObjectWithListOfInt.class,
new AbstractInstantiatorModule() {
@Override
protected void configure() {
registerFor(new TypeLiteral<List<? extends Integer>>() {})
.converter(ListOfIntConverter.class);
}
});
checkObjectWithListOfInt(instantiator);
}
private void checkObjectWithListOfInt(
Instantiator<ObjectWithListOfInt> instantiator) {
ObjectWithListOfInt instance = instantiator.newInstance("1|2|3");
assertEquals(asList(1, 2, 3), instance.numbers);
assertEquals(
asList("1|2|3"),
instantiator.fromInstance(instance));
}
static class ObjectWithListOfIntAndListOfBoolean {
final List<Integer> numbers;
final List<Boolean> booleans;
ObjectWithListOfIntAndListOfBoolean(
List<Integer> numbers, List<Boolean> booleans) {
this.numbers = numbers;
this.booleans = booleans;
}
}
@Test
public void objectWithListOfIntAndListOfBooleanViaBindings() {
Instantiator<ObjectWithListOfIntAndListOfBoolean> instantiator = createInstantiator(
ObjectWithListOfIntAndListOfBoolean.class,
new AbstractInstantiatorModule() {
@Override
protected void configure() {
registerFor(new TypeLiteral<List<Integer>>() {})
.converter(ListOfIntConverter.class);
registerFor(new TypeLiteral<List<Boolean>>() {})
.converter(ListOfBooleanConverter.class);
}
});
checkObjectWithListOfIntAndListOfBoolean(instantiator);
}
@Test
public void objectWithListOfIntAndListOfBooleanViaInstances() {
Instantiator<ObjectWithListOfIntAndListOfBoolean> instantiator = createInstantiator(
ObjectWithListOfIntAndListOfBoolean.class,
new AbstractInstantiatorModule() {
@Override
protected void configure() {
registerFor(new TypeLiteral<List<Integer>>() {})
.converter(new ListOfIntConverter());
registerFor(new TypeLiteral<List<Boolean>>() {})
.converter(new ListOfBooleanConverter());
}
});
checkObjectWithListOfIntAndListOfBoolean(instantiator);
}
static class ObjectUsesValueWithConverterAsInnerClass {
private final ValueWithConverterAsInnerClass value;
ObjectUsesValueWithConverterAsInnerClass(
ValueWithConverterAsInnerClass value) {
this.value = value;
}
}
@Test
public void objectUsesValueWithConverterAsInnerClass() {
Instantiator<ObjectUsesValueWithConverterAsInnerClass> instantiator = Instantiators
.createInstantiator(ObjectUsesValueWithConverterAsInnerClass.class);
ObjectUsesValueWithConverterAsInnerClass instance = instantiator
.newInstance("5");
assertNotNull(instance);
assertEquals((Integer) 5, instance.value.getId());
}
private void checkObjectWithListOfIntAndListOfBoolean(
Instantiator<ObjectWithListOfIntAndListOfBoolean> instantiator) {
ObjectWithListOfIntAndListOfBoolean instance = instantiator.newInstance(
"1|2|3", "true|false|true");
assertEquals(asList(1, 2, 3), instance.numbers);
assertEquals(asList(true, false, true), instance.booleans);
assertEquals(
asList("1|2|3", "true|false|true"),
instantiator.fromInstance(instance));
}
static class LocalConstant {
static final String MY_CONSTANT = "this text is long and great for a test";
final String message;
LocalConstant(@Optional(constant = "MY_CONSTANT") String message) {
this.message = message;
}
}
@Test
public void localConstant() {
Instantiator<LocalConstant> instantiator = Instantiators
.createInstantiator(LocalConstant.class);
LocalConstant instance = instantiator
.newInstance((String) null);
assertNotNull(instance);
assertEquals(LocalConstant.MY_CONSTANT, instance.message);
}
static class FullyQualifiedConstant {
final String message;
FullyQualifiedConstant(@Optional(constant = "com.kaching.platform.converters.InstantiatorsTest$LocalConstant#MY_CONSTANT") String message) {
this.message = message;
}
}
@Test
public void fullyQualifiedConstant() {
Instantiator<FullyQualifiedConstant> instantiator = Instantiators
.createInstantiator(FullyQualifiedConstant.class);
FullyQualifiedConstant instance = instantiator
.newInstance((String) null);
assertNotNull(instance);
assertEquals(LocalConstant.MY_CONSTANT, instance.message);
}
@Test
public void createUriConverter() throws URISyntaxException {
assertEquals(
new URI("www.kaching.com"),
createConverter(URI.class).fromString("www.kaching.com"));
}
static class OptionalStringWithEmptyStringAsDefault {
private final String name;
OptionalStringWithEmptyStringAsDefault(@Optional("") String name) {
this.name = name;
}
}
@Test
public void optionalStringWithEmptyStringAsDefault() {
assertEquals(
"",
createInstantiator(OptionalStringWithEmptyStringAsDefault.class).newInstance((String) null).name);
}
@Test
public void createConverterPairConverter() throws URISyntaxException {
Converter<ConvertedPair> converter = createConverter(ConvertedPair.class);
assertEquals(
"1:2",
converter.toString(converter.fromString("1:2")));
}
static class WrappedString {
private final String content;
WrappedString(String content) {
this.content = content;
}
@Override
public String toString() {
return content;
}
}
@ConvertedBy(ConvertedPairConverter.class)
static class ConvertedPair {
private final String first;
private final String last;
ConvertedPair(String first, String last) {
this.first = first;
this.last = last;
}
}
static class ConvertedPairConverter implements Converter<ConvertedPair> {
@Override
public String toString(ConvertedPair value) {
return format("%s:%s", value.first, value.last);
}
@Override
public ConvertedPair fromString(String representation) {
String[] parts = representation.split(":");
return new ConvertedPair(parts[0], parts[1]);
}
}
abstract static class CsvValuesListConverter<T> implements Converter<List<T>> {
private final Converter<T> elementConverter;
CsvValuesListConverter(Converter<T> elementConverter) {
this.elementConverter = elementConverter;
}
@Override
public String toString(List<T> value) {
// NOTE using element.toString instead of elementConverter.toString(element)
// which is equivalent in the context of this text but certainly not for
// production code.
return Joiner.on("|").join(value);
}
@Override
public List<T> fromString(String representation) {
ArrayList<T> fromString = Lists.newArrayList();
for (String single : representation.split("\\|")) {
fromString.add(elementConverter.fromString(single));
}
return fromString;
}
}
static class ListOfIntConverter extends CsvValuesListConverter<Integer> {
ListOfIntConverter() { super(NativeConverters.C_INT); }
}
static class ListOfBooleanConverter extends CsvValuesListConverter<Boolean> {
ListOfBooleanConverter() { super(NativeConverters.C_BOOLEAN); }
}
}
| |
package tundra;
// -----( IS Java Code Template v1.2
// -----( CREATED: 2021-11-10 05:52:42 EST
// -----( ON-HOST: -
import com.wm.data.*;
import com.wm.util.Values;
import com.wm.app.b2b.server.Service;
import com.wm.app.b2b.server.ServiceException;
// --- <<IS-START-IMPORTS>> ---
import com.wm.app.b2b.server.Package;
import java.io.IOException;
import java.nio.charset.Charset;
import java.text.MessageFormat;
import java.util.EnumSet;
import permafrost.tundra.content.ValidationResult;
import permafrost.tundra.data.IDataHelper;
import permafrost.tundra.data.IDataJSONParser;
import permafrost.tundra.data.IDataXMLParser;
import permafrost.tundra.data.IDataYAMLParser;
import permafrost.tundra.data.transform.string.Squeezer;
import permafrost.tundra.data.transform.string.Trimmer;
import permafrost.tundra.data.transform.Transformer;
import permafrost.tundra.flow.PipelineHelper;
import permafrost.tundra.flow.InputOutputSignature;
import permafrost.tundra.flow.variable.SubstitutionHelper;
import permafrost.tundra.flow.variable.SubstitutionType;
import permafrost.tundra.io.InputStreamHelper;
import permafrost.tundra.lang.BooleanHelper;
import permafrost.tundra.lang.CharsetHelper;
import permafrost.tundra.lang.ExceptionHelper;
import permafrost.tundra.lang.ObjectConvertMode;
import permafrost.tundra.lang.ObjectHelper;
import permafrost.tundra.math.IntegerHelper;
import permafrost.tundra.server.PackageHelper;
import permafrost.tundra.server.ServerLogHelper;
import permafrost.tundra.server.ServerLogLevel;
// --- <<IS-END-IMPORTS>> ---
public final class pipeline
{
// ---( internal utility methods )---
final static pipeline _instance = new pipeline();
static pipeline _newInstance() { return new pipeline(); }
static pipeline _cast(Object o) { return (pipeline)o; }
// ---( server methods )---
public static final void capture (IData pipeline)
throws ServiceException
{
// --- <<IS-START(capture)>> ---
// @subtype unknown
// @sigtype java 3.5
// [o] record:0:required $pipeline
IDataCursor cursor = pipeline.getCursor();
try {
IDataHelper.put(cursor, "$pipeline", IDataHelper.duplicate(pipeline, false));
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void clear (IData pipeline)
throws ServiceException
{
// --- <<IS-START(clear)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:1:optional $preserve
IDataCursor cursor = pipeline.getCursor();
try {
String[] keys = IDataHelper.get(cursor, "$preserve", String[].class);
IDataHelper.clear(pipeline, keys);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void copy (IData pipeline)
throws ServiceException
{
// --- <<IS-START(copy)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:required $key.source
// [i] field:0:required $key.target
// [i] field:0:optional $key.literal? {"false","true"}
IDataCursor cursor = pipeline.getCursor();
try {
String source = IDataHelper.get(cursor, "$key.source", String.class);
String target = IDataHelper.get(cursor, "$key.target", String.class);
boolean literal = IDataHelper.getOrDefault(cursor, "$key.literal?", Boolean.class, false);
IDataHelper.copy(pipeline, source, target, literal);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void denormalize (IData pipeline)
throws ServiceException
{
// --- <<IS-START(denormalize)>> ---
// @subtype unknown
// @sigtype java 3.5
IDataCursor cursor = pipeline.getCursor();
try {
IData copy = IDataHelper.denormalize(pipeline);
IDataHelper.clear(pipeline);
merge(pipeline, copy);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void drop (IData pipeline)
throws ServiceException
{
// --- <<IS-START(drop)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:optional $key
// [i] field:0:optional $key.literal? {"false","true"}
IDataCursor cursor = pipeline.getCursor();
try {
String key = IDataHelper.get(cursor, "$key", String.class);
boolean literal = IDataHelper.getOrDefault(cursor, "$key.literal?", Boolean.class, false);
IDataHelper.drop(pipeline, key, literal);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void emit (IData pipeline)
throws ServiceException
{
// --- <<IS-START(emit)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:optional $content.class {"xml","json","yaml"}
// [i] field:0:optional $content.encoding
// [i] field:0:optional $content.mode {"stream","bytes","string"}
// [o] object:0:optional $content
IDataCursor cursor = pipeline.getCursor();
try {
// remove input arguments so that they are not included in serialization of the pipeline
String contentClass = IDataHelper.remove(cursor, "$content.class", String.class);
Charset charset = IDataHelper.remove(cursor, "$content.encoding", Charset.class);
ObjectConvertMode mode = IDataHelper.remove(cursor, "$content.mode", ObjectConvertMode.class);
Object content;
if (contentClass == null || contentClass.equals("xml")) {
content = ObjectHelper.convert(new IDataXMLParser().emit(pipeline, charset), charset, mode);
} else if (contentClass.equals("json")) {
content = ObjectHelper.convert(new IDataJSONParser().emit(pipeline, charset), charset, mode);
} else if (contentClass.equals("yaml")) {
content = ObjectHelper.convert(new IDataYAMLParser().emit(pipeline, charset), charset, mode);
} else {
throw new IllegalArgumentException(MessageFormat.format("$content.class must be either \"xml\", \"json\", or \"yaml\": {0}", contentClass));
}
IDataHelper.put(cursor, "$content", content);
} catch(IOException ex) {
ExceptionHelper.raise(ex);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void first (IData pipeline)
throws ServiceException
{
// --- <<IS-START(first)>> ---
// @subtype unknown
// @sigtype java 3.5
// [o] field:0:optional $key
// [o] object:0:optional $value
IDataCursor cursor = pipeline.getCursor();
try {
if (cursor.first()) {
IDataHelper.put(cursor, "$key", cursor.getKey());
IDataHelper.put(cursor, "$value", cursor.getValue());
}
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void flatten (IData pipeline)
throws ServiceException
{
// --- <<IS-START(flatten)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:1:optional $keys
// [i] field:0:optional $nulls? {"false","true"}
// [o] object:1:optional $values
IDataCursor cursor = pipeline.getCursor();
try {
String[] keys = IDataHelper.get(cursor, "$keys", String[].class);
boolean includeNulls = IDataHelper.getOrDefault(cursor, "$nulls?", Boolean.class, false);
Object[] values = IDataHelper.flatten(pipeline, includeNulls, keys);
IDataHelper.put(cursor, "$values", values, false);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void get (IData pipeline)
throws ServiceException
{
// --- <<IS-START(get)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:optional $key
// [i] field:0:optional $key.literal? {"false","true"}
// [i] object:0:optional $default.object
// [i] field:0:optional $default.string
// [o] object:0:optional $value
IDataCursor cursor = pipeline.getCursor();
try {
String key = IDataHelper.get(cursor, "$key", String.class);
Object defaultObject = IDataHelper.get(cursor, "$default.object");
if (defaultObject == null) defaultObject = IDataHelper.get(cursor, "$default.string", String.class);
boolean literal = IDataHelper.getOrDefault(cursor, "$key.literal?", Boolean.class, false);
Object value = IDataHelper.get(pipeline, key, defaultObject, literal);
IDataHelper.put(cursor, "$value", value, false);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void last (IData pipeline)
throws ServiceException
{
// --- <<IS-START(last)>> ---
// @subtype unknown
// @sigtype java 3.5
// [o] field:0:optional $key
// [o] object:0:optional $value
IDataCursor cursor = pipeline.getCursor();
try {
if (cursor.last()) {
IDataHelper.put(cursor, "$key", cursor.getKey());
IDataHelper.put(cursor, "$value", cursor.getValue());
}
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void length (IData pipeline)
throws ServiceException
{
// --- <<IS-START(length)>> ---
// @subtype unknown
// @sigtype java 3.5
// [o] field:0:required $length
IDataCursor cursor = pipeline.getCursor();
try {
IDataHelper.put(cursor, "$length", IDataHelper.size(pipeline), String.class);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void listify (IData pipeline)
throws ServiceException
{
// --- <<IS-START(listify)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:required $key
IDataCursor cursor = pipeline.getCursor();
try {
String key = IDataHelper.get(cursor, "$key", String.class);
IDataHelper.arrayify(pipeline, key);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void log (IData pipeline)
throws ServiceException
{
// --- <<IS-START(log)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:optional $log.level {"Fatal","Error","Warn","Info","Debug","Trace","Off"}
// [i] field:0:optional $log.message
// [i] field:0:optional $log.prefix? {"true","false"}
// [i] field:0:optional $log.name
IDataCursor cursor = pipeline.getCursor();
try {
ServerLogLevel level = IDataHelper.remove(cursor, "$log.level", ServerLogLevel.class);
if (level == null) level = IDataHelper.get(cursor, "$level", ServerLogLevel.class);
String message = IDataHelper.remove(cursor, "$log.message", String.class);
boolean addPrefix = IDataHelper.removeOrDefault(cursor, "$log.prefix?", Boolean.class, true);
String name = IDataHelper.remove(cursor, "$log.name", String.class);
if (name == null) {
// infer log name as the invoking service's package
Package invokingPackage = PackageHelper.self();
if (invokingPackage != null) name = invokingPackage.getName();
}
ServerLogHelper.log(name, level, message, pipeline, addPrefix);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void merge (IData pipeline)
throws ServiceException
{
// --- <<IS-START(merge)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] record:0:optional $document
IDataCursor cursor = pipeline.getCursor();
try {
merge(pipeline, IDataHelper.get(cursor, "$document", IData.class));
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void normalize (IData pipeline)
throws ServiceException
{
// --- <<IS-START(normalize)>> ---
// @subtype unknown
// @sigtype java 3.5
IDataCursor cursor = pipeline.getCursor();
try {
IData copy = IDataHelper.normalize(pipeline);
IDataHelper.clear(pipeline);
merge(pipeline, copy);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void parse (IData pipeline)
throws ServiceException
{
// --- <<IS-START(parse)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] object:0:optional $content
// [i] field:0:optional $content.class {"xml","json","yaml"}
// [i] field:0:optional $content.encoding
IDataCursor cursor = pipeline.getCursor();
try {
Object content = IDataHelper.get(cursor, "$content");
String contentClass = IDataHelper.get(cursor, "$content.class", String.class);
Charset charset = IDataHelper.get(cursor, "$content.encoding", Charset.class);
if (contentClass == null || contentClass.equals("xml")) {
merge(pipeline, new IDataXMLParser().parse(InputStreamHelper.normalize(content, charset)));
} else if (contentClass.equals("json")) {
merge(pipeline, new IDataJSONParser().parse(InputStreamHelper.normalize(content, charset)));
} else if (contentClass.equals("yaml")) {
merge(pipeline, new IDataYAMLParser().parse(InputStreamHelper.normalize(content, charset)));
} else {
throw new IllegalArgumentException(MessageFormat.format("$content.class must be either \"xml\", \"json\", or \"yaml\": {0}", contentClass));
}
} catch(IOException ex) {
ExceptionHelper.raise(ex);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void put (IData pipeline)
throws ServiceException
{
// --- <<IS-START(put)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:optional $key
// [i] field:0:optional $key.literal? {"false","true"}
// [i] object:0:optional $value
IDataCursor cursor = pipeline.getCursor();
try {
String key = IDataHelper.get(cursor, "$key", String.class);
boolean literal = IDataHelper.getOrDefault(cursor, "$key.literal?", Boolean.class, false);
Object value = IDataHelper.get(cursor, "$value");
IDataHelper.put(pipeline, key, value, literal);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void rename (IData pipeline)
throws ServiceException
{
// --- <<IS-START(rename)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:required $key.source
// [i] field:0:required $key.target
// [i] field:0:optional $key.literal? {"false","true"}
IDataCursor cursor = pipeline.getCursor();
try {
String source = IDataHelper.get(cursor, "$key.source", String.class);
String target = IDataHelper.get(cursor, "$key.target", String.class);
boolean literal = IDataHelper.getOrDefault(cursor, "$key.literal?", Boolean.class, false);
IDataHelper.rename(pipeline, source, target, literal);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void sanitize (IData pipeline)
throws ServiceException
{
// --- <<IS-START(sanitize)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:required $service.signature.direction {"input","output"}
// [i] field:0:optional $pipeline.sanitize.recurse? {"false","true"}
IDataCursor cursor = pipeline.getCursor();
try {
InputOutputSignature direction = IDataHelper.remove(cursor, "$service.signature.direction", InputOutputSignature.class);
boolean recurse = IDataHelper.removeOrDefault(cursor, "$pipeline.sanitize.recurse?", Boolean.class, false);
PipelineHelper.sanitize(pipeline, direction, recurse);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void sort (IData pipeline)
throws ServiceException
{
// --- <<IS-START(sort)>> ---
// @subtype unknown
// @sigtype java 3.5
sort(pipeline, false);
// --- <<IS-END>> ---
}
public static final void squeeze (IData pipeline)
throws ServiceException
{
// --- <<IS-START(squeeze)>> ---
// @subtype unknown
// @sigtype java 3.5
IDataCursor cursor = pipeline.getCursor();
try {
IData copy = Transformer.transform(pipeline, new Squeezer(true));
IDataHelper.clear(pipeline);
merge(pipeline, copy);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void substitute (IData pipeline)
throws ServiceException
{
// --- <<IS-START(substitute)>> ---
// @subtype unknown
// @sigtype java 3.5
IDataCursor cursor = pipeline.getCursor();
try {
IData copy = SubstitutionHelper.substitute(pipeline, null, true, true, null, pipeline);
IDataHelper.clear(pipeline);
merge(pipeline, copy);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void trim (IData pipeline)
throws ServiceException
{
// --- <<IS-START(trim)>> ---
// @subtype unknown
// @sigtype java 3.5
IDataCursor cursor = pipeline.getCursor();
try {
IData copy = Transformer.transform(pipeline, new Trimmer(true));
IDataHelper.clear(pipeline);
merge(pipeline, copy);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
public static final void validate (IData pipeline)
throws ServiceException
{
// --- <<IS-START(validate)>> ---
// @subtype unknown
// @sigtype java 3.5
// [i] field:0:required $service.signature.direction {"input","output"}
// [i] field:0:optional $validation.raise? {"false","true"}
// [o] field:0:required $validation.result?
// [o] field:0:optional $validation.message
// [o] record:1:optional $validation.errors
// [o] - field:0:optional code
// [o] - field:0:optional message
// [o] - field:0:optional key
// [o] - object:0:optional value
IDataCursor cursor = pipeline.getCursor();
try {
InputOutputSignature direction = IDataHelper.remove(cursor, "$service.signature.direction", InputOutputSignature.class);
boolean raise = IDataHelper.removeOrDefault(cursor, "$validation.raise?", Boolean.class, false);
ValidationResult result = PipelineHelper.validate(pipeline, direction);
result.raiseIfInvalid(raise);
IDataHelper.put(cursor, "$validation.result?", result.isValid(), String.class);
IDataHelper.put(cursor, "$validation.message", result.getMessage(), false);
IDataHelper.put(cursor, "$validation.errors", result.getErrors(), false);
} finally {
cursor.destroy();
}
// --- <<IS-END>> ---
}
// --- <<IS-START-SHARED>> ---
// merges the contents of the given document into the given pipeline
public static void merge(IData target, IData source) {
if (target != null && source != null) IDataUtil.merge(source, target);
}
// sorts the elements in the pipeline by its keys in natural ascending order
public static void sort(IData pipeline, boolean recurse) {
IData sorted = IDataHelper.sort(pipeline, recurse);
IDataHelper.clear(pipeline);
IDataUtil.append(sorted, pipeline);
}
// --- <<IS-END-SHARED>> ---
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.move.moveInstanceMethod;
import com.intellij.codeInsight.ChangeContextUtil;
import com.intellij.codeInsight.generation.OverrideImplementUtil;
import com.intellij.ide.util.EditorHelper;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.javadoc.PsiDocTagValue;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.searches.ClassInheritorsSearch;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.MethodSignature;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.refactoring.BaseRefactoringProcessor;
import com.intellij.refactoring.RefactoringBundle;
import com.intellij.refactoring.move.MoveInstanceMembersUtil;
import com.intellij.refactoring.util.*;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewDescriptor;
import com.intellij.usageView.UsageViewUtil;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.VisibilityUtil;
import java.util.HashSet;
import com.intellij.util.containers.MultiMap;
import com.siyeh.ig.psiutils.ExpressionUtils;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import java.util.*;
/**
* @author ven
*/
public class MoveInstanceMethodProcessor extends BaseRefactoringProcessor{
private static final Logger LOG = Logger.getInstance("#com.intellij.refactoring.move.moveInstanceMethod.MoveInstanceMethodProcessor");
public PsiMethod getMethod() {
return myMethod;
}
public PsiVariable getTargetVariable() {
return myTargetVariable;
}
private PsiMethod myMethod;
private PsiVariable myTargetVariable;
private PsiClass myTargetClass;
private final String myNewVisibility;
private final boolean myOpenInEditor;
private final Map<PsiClass, String> myOldClassParameterNames;
public MoveInstanceMethodProcessor(final Project project,
final PsiMethod method,
final PsiVariable targetVariable,
final String newVisibility,
final Map<PsiClass, String> oldClassParameterNames) {
this(project, method, targetVariable, newVisibility, false, oldClassParameterNames);
}
public MoveInstanceMethodProcessor(final Project project,
final PsiMethod method,
final PsiVariable targetVariable,
final String newVisibility,
boolean openInEditor,
final Map<PsiClass, String> oldClassParameterNames) {
super(project);
myMethod = method;
myTargetVariable = targetVariable;
myOpenInEditor = openInEditor;
myOldClassParameterNames = oldClassParameterNames;
LOG.assertTrue(myTargetVariable instanceof PsiParameter || myTargetVariable instanceof PsiField);
LOG.assertTrue(myTargetVariable.getType() instanceof PsiClassType);
final PsiType type = myTargetVariable.getType();
LOG.assertTrue(type instanceof PsiClassType);
myTargetClass = ((PsiClassType) type).resolve();
myNewVisibility = newVisibility;
}
@NotNull
protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) {
return new MoveInstanceMethodViewDescriptor(myMethod, myTargetVariable, myTargetClass);
}
protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) {
final UsageInfo[] usages = refUsages.get();
MultiMap<PsiElement, String> conflicts = new MultiMap<>();
final Set<PsiMember> members = new HashSet<>();
members.add(myMethod);
if (myTargetVariable instanceof PsiField) members.add((PsiMember)myTargetVariable);
if (!myTargetClass.isInterface()) {
RefactoringConflictsUtil.analyzeAccessibilityConflicts(members, myTargetClass, conflicts, myNewVisibility);
}
else {
for (final UsageInfo usage : usages) {
if (usage instanceof InheritorUsageInfo) {
RefactoringConflictsUtil.analyzeAccessibilityConflicts(
members, ((InheritorUsageInfo)usage).getInheritor(), conflicts, myNewVisibility);
}
}
}
if (myTargetVariable instanceof PsiParameter) {
PsiParameter parameter = (PsiParameter)myTargetVariable;
final int index = myMethod.getParameterList().getParameterIndex(parameter);
for (final UsageInfo usageInfo : usages) {
if (usageInfo instanceof MethodCallUsageInfo) {
final PsiElement methodCall = ((MethodCallUsageInfo)usageInfo).getMethodCallExpression();
if (methodCall instanceof PsiMethodCallExpression) {
final PsiExpression[] expressions = ((PsiMethodCallExpression)methodCall).getArgumentList().getExpressions();
if (index < expressions.length) {
PsiExpression instanceValue = expressions[index];
instanceValue = RefactoringUtil.unparenthesizeExpression(instanceValue);
if (instanceValue instanceof PsiLiteralExpression && ((PsiLiteralExpression)instanceValue).getValue() == null) {
String message = RefactoringBundle.message("0.contains.call.with.null.argument.for.parameter.1",
RefactoringUIUtil.getDescription(ConflictsUtil.getContainer(methodCall), true),
CommonRefactoringUtil.htmlEmphasize(parameter.getName()));
conflicts.putValue(instanceValue, message);
}
}
} else if (methodCall instanceof PsiMethodReferenceExpression && shouldBeExpandedToLambda((PsiMethodReferenceExpression)methodCall, index)) {
conflicts.putValue(methodCall, RefactoringBundle.message("expand.method.reference.warning"));
}
}
}
}
try {
ConflictsUtil.checkMethodConflicts(myTargetClass, myMethod, getPatternMethod(), conflicts);
}
catch (IncorrectOperationException ignored) {}
return showConflicts(conflicts, usages);
}
/**
* If collapse by second search is possible, then it's possible not to expand
*/
private boolean shouldBeExpandedToLambda(PsiMethodReferenceExpression referenceExpression, int index) {
PsiClassType.ClassResolveResult resolveResult = PsiUtil.resolveGenericsClassInType(referenceExpression.getFunctionalInterfaceType());
PsiMethod interfaceMethod = LambdaUtil.getFunctionalInterfaceMethod(resolveResult);
if (interfaceMethod != null) {
MethodSignature methodSignature = interfaceMethod.getSignature(LambdaUtil.getSubstitutor(interfaceMethod, resolveResult));
if (index == 0 && methodSignature.getParameterTypes().length > 0 &&
methodSignature.getParameterTypes()[0].isAssignableFrom(myMethod.getParameterList().getParameters()[0].getType())) {
return false;
}
}
return true;
}
@NotNull
protected UsageInfo[] findUsages() {
final PsiManager manager = myMethod.getManager();
final GlobalSearchScope searchScope = GlobalSearchScope.allScope(manager.getProject());
final List<UsageInfo> usages = new ArrayList<>();
for (PsiReference ref : ReferencesSearch.search(myMethod, searchScope, false)) {
final PsiElement element = ref.getElement();
if (element instanceof PsiReferenceExpression) {
boolean isInternal = PsiTreeUtil.isAncestor(myMethod, element, true);
usages.add(new MethodCallUsageInfo((PsiReferenceExpression)element, isInternal));
}
else if (element instanceof PsiDocTagValue) {
usages.add(new JavadocUsageInfo((PsiDocTagValue)element));
}
else {
throw new UnknownReferenceTypeException(element.getLanguage());
}
}
if (myTargetClass.isInterface() && !PsiUtil.isLanguageLevel8OrHigher(myTargetClass)) {
addInheritorUsages(myTargetClass, searchScope, usages);
}
final PsiCodeBlock body = myMethod.getBody();
if (body != null) {
body.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitNewExpression(PsiNewExpression expression) {
if (MoveInstanceMembersUtil.getClassReferencedByThis(expression) != null) {
usages.add(new InternalUsageInfo(expression));
}
super.visitNewExpression(expression);
}
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
if (MoveInstanceMembersUtil.getClassReferencedByThis(expression) != null) {
usages.add(new InternalUsageInfo(expression));
} else if (!expression.isQualified()) {
final PsiElement resolved = expression.resolve();
if (myTargetVariable.equals(resolved)) {
usages.add(new InternalUsageInfo(expression));
}
}
super.visitReferenceExpression(expression);
}
});
}
return usages.toArray(UsageInfo.EMPTY_ARRAY);
}
private static void addInheritorUsages(PsiClass aClass, final GlobalSearchScope searchScope, final List<UsageInfo> usages) {
for (PsiClass inheritor : ClassInheritorsSearch.search(aClass, searchScope, false).findAll()) {
if (!inheritor.isInterface()) {
usages.add(new InheritorUsageInfo(inheritor));
}
else {
addInheritorUsages(inheritor, searchScope, usages);
}
}
}
@Override
protected void refreshElements(@NotNull PsiElement[] elements) {
LOG.assertTrue(elements.length == 3);
myMethod = (PsiMethod) elements[0];
myTargetVariable = (PsiVariable) elements[1];
myTargetClass = (PsiClass) elements[2];
}
@NotNull
protected String getCommandName() {
return RefactoringBundle.message("move.instance.method.command");
}
public PsiClass getTargetClass() {
return myTargetClass;
}
protected void performRefactoring(@NotNull UsageInfo[] usages) {
PsiMethod patternMethod = createMethodToAdd();
final List<PsiReference> docRefs = new ArrayList<>();
for (UsageInfo usage : usages) {
if (usage instanceof InheritorUsageInfo) {
final PsiClass inheritor = ((InheritorUsageInfo)usage).getInheritor();
addMethodToClass(inheritor, patternMethod, true);
}
else if (usage instanceof MethodCallUsageInfo && !((MethodCallUsageInfo)usage).isInternal()) {
final PsiElement expression = ((MethodCallUsageInfo)usage).getMethodCallExpression();
if (expression instanceof PsiMethodCallExpression) {
correctMethodCall((PsiMethodCallExpression)expression, false);
}
else if (expression instanceof PsiMethodReferenceExpression) {
PsiMethodReferenceExpression methodReferenceExpression = (PsiMethodReferenceExpression)expression;
PsiExpression qualifierExpression = methodReferenceExpression.getQualifierExpression();
if (myTargetVariable instanceof PsiParameter && shouldBeExpandedToLambda(methodReferenceExpression, myMethod.getParameterList().getParameterIndex((PsiParameter)myTargetVariable))) {
PsiLambdaExpression lambdaExpression = LambdaRefactoringUtil.convertMethodReferenceToLambda(methodReferenceExpression, false, true);
if (lambdaExpression != null) {
List<PsiExpression> returnExpressions = LambdaUtil.getReturnExpressions(lambdaExpression);
if (!returnExpressions.isEmpty()) {
correctMethodCall((PsiMethodCallExpression)returnExpressions.get(0), false);
}
}
}
else {
String exprText;
if (myTargetVariable instanceof PsiParameter ||
qualifierExpression instanceof PsiReferenceExpression && ((PsiReferenceExpression)qualifierExpression).resolve() == myMethod.getContainingClass()) {
exprText = myTargetVariable.getType().getCanonicalText();
}
else if (qualifierExpression instanceof PsiReferenceExpression) {
exprText = qualifierExpression.getText() + "." + myTargetVariable.getName();
}
else {
exprText = myTargetVariable.getName();
}
PsiExpression newQualifier = JavaPsiFacade.getInstance(myProject).getElementFactory().createExpressionFromText(exprText, null);
((PsiMethodReferenceExpression)expression).setQualifierExpression(newQualifier);
JavaCodeStyleManager.getInstance(myProject).shortenClassReferences(expression);
}
}
}
else if (usage instanceof JavadocUsageInfo) {
docRefs.add(usage.getElement().getReference());
}
}
try {
if (myTargetClass.isInterface()) {
final PsiModifierList modifierList = patternMethod.getModifierList();
if (!PsiUtil.isLanguageLevel8OrHigher(myTargetClass)) {
patternMethod.getBody().delete();
modifierList.setModifierProperty(PsiModifier.DEFAULT, false);
}
else {
modifierList.setModifierProperty(PsiModifier.DEFAULT, true);
}
RefactoringUtil.makeMethodAbstract(myTargetClass, patternMethod);
}
final PsiMethod method = addMethodToClass(myTargetClass, patternMethod, false);
myMethod.delete();
for (PsiReference reference : docRefs) {
reference.bindToElement(method);
}
VisibilityUtil.fixVisibility(UsageViewUtil.toElements(usages), method, myNewVisibility);
if (myOpenInEditor) {
EditorHelper.openInEditor(method);
}
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
private void correctMethodCall(final PsiMethodCallExpression expression, final boolean isInternalCall) {
try {
final PsiManager manager = myMethod.getManager();
PsiReferenceExpression methodExpression = expression.getMethodExpression();
if (!methodExpression.isReferenceTo(myMethod)) return;
final PsiExpression oldQualifier = methodExpression.getQualifierExpression();
PsiExpression newQualifier = null;
final PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(methodExpression);
if (myTargetVariable instanceof PsiParameter) {
final int index = myMethod.getParameterList().getParameterIndex((PsiParameter)myTargetVariable);
final PsiExpression[] arguments = expression.getArgumentList().getExpressions();
if (index < arguments.length) {
newQualifier = (PsiExpression)arguments[index].copy();
arguments[index].delete();
}
}
else {
VisibilityUtil.escalateVisibility((PsiField)myTargetVariable, expression);
String newQualifierName = myTargetVariable.getName();
if (myTargetVariable instanceof PsiField && oldQualifier != null) {
final PsiClass aClass = PsiUtil.resolveClassInClassTypeOnly(oldQualifier.getType());
if (aClass == ((PsiField)myTargetVariable).getContainingClass()) {
newQualifierName = oldQualifier.getText() + "." + newQualifierName;
}
}
newQualifier = JavaPsiFacade.getInstance(manager.getProject()).getElementFactory().createExpressionFromText(newQualifierName, null);
}
PsiExpression newArgument = null;
if (classReferencedByThis != null) {
@NonNls String thisArgumentText = null;
if (manager.areElementsEquivalent(myMethod.getContainingClass(), classReferencedByThis)) {
if (myOldClassParameterNames.containsKey(myMethod.getContainingClass())) {
thisArgumentText = "this";
}
}
else {
final String name = classReferencedByThis.getName();
if (name != null) {
thisArgumentText = name + ".this";
}
else {
thisArgumentText = "this";
}
}
if (thisArgumentText != null) {
newArgument = JavaPsiFacade.getInstance(manager.getProject()).getElementFactory().createExpressionFromText(thisArgumentText, null);
}
} else {
if (!isInternalCall && oldQualifier != null) {
final PsiType type = oldQualifier.getType();
if (type instanceof PsiClassType) {
final PsiClass resolved = ((PsiClassType)type).resolve();
if (resolved != null && getParameterNameToCreate(resolved) != null) {
newArgument = replaceRefsToTargetVariable(oldQualifier); //replace is needed in case old qualifier is e.g. the same as field as target variable
}
}
}
}
if (newArgument != null) {
expression.getArgumentList().add(newArgument);
}
if (newQualifier != null) {
if (newQualifier instanceof PsiThisExpression && ((PsiThisExpression)newQualifier).getQualifier() == null) {
//Remove now redundant 'this' qualifier
if (oldQualifier != null) oldQualifier.delete();
}
else {
final PsiReferenceExpression refExpr = (PsiReferenceExpression)JavaPsiFacade.getInstance(manager.getProject()).getElementFactory()
.createExpressionFromText("q." + myMethod.getName(), null);
refExpr.getQualifierExpression().replace(newQualifier);
methodExpression.replace(refExpr);
}
}
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
private PsiExpression replaceRefsToTargetVariable(final PsiExpression expression) {
final PsiManager manager = expression.getManager();
if (ExpressionUtils.isReferenceTo(expression, myTargetVariable)) {
return createThisExpr(manager);
}
expression.accept(new JavaRecursiveElementVisitor() {
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
if (expression.isReferenceTo(myTargetVariable)) {
try {
expression.replace(createThisExpr(manager));
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
});
return expression;
}
private static PsiExpression createThisExpr(final PsiManager manager) {
try {
return JavaPsiFacade.getInstance(manager.getProject()).getElementFactory().createExpressionFromText("this", null);
}
catch (IncorrectOperationException e) {
LOG.error(e);
return null;
}
}
private static PsiMethod addMethodToClass(final PsiClass aClass, final PsiMethod patternMethod, boolean canAddOverride) {
try {
final PsiMethod method = (PsiMethod)aClass.add(patternMethod);
ChangeContextUtil.decodeContextInfo(method, null, null);
if (canAddOverride && OverrideImplementUtil.isInsertOverride(method, aClass)) {
method.getModifierList().addAnnotation(CommonClassNames.JAVA_LANG_OVERRIDE);
}
return method;
}
catch (IncorrectOperationException e) {
LOG.error(e);
return null;
}
}
private PsiMethod createMethodToAdd () {
ChangeContextUtil.encodeContextInfo(myMethod, true);
try {
final PsiManager manager = myMethod.getManager();
JavaPsiFacade facade = JavaPsiFacade.getInstance(manager.getProject());
final PsiElementFactory factory = facade.getElementFactory();
//correct internal references
final PsiCodeBlock body = myMethod.getBody();
if (body != null) {
final Map<PsiElement, PsiElement> replaceMap = new HashMap<>();
body.accept(new JavaRecursiveElementVisitor() {
@Override public void visitThisExpression(PsiThisExpression expression) {
final PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(expression);
if (classReferencedByThis != null && !PsiTreeUtil.isAncestor(myMethod, classReferencedByThis, false)) {
final PsiElementFactory factory = JavaPsiFacade.getInstance(myProject).getElementFactory();
String paramName = getParameterNameToCreate(classReferencedByThis);
try {
final PsiExpression refExpression = factory.createExpressionFromText(paramName, null);
replaceMap.put(expression, refExpression);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
}
@Override public void visitReferenceExpression(PsiReferenceExpression expression) {
try {
final PsiExpression qualifier = expression.getQualifierExpression();
final PsiElement resolved = expression.resolve();
if (ExpressionUtils.isReferenceTo(qualifier, myTargetVariable)) {
if (resolved instanceof PsiField) {
String fieldName = ((PsiField)resolved).getName();
LOG.assertTrue(fieldName != null);
for (PsiParameter parameter : myMethod.getParameterList().getParameters()) {
if (Comparing.strEqual(parameter.getName(), fieldName) ||
facade.getResolveHelper().resolveReferencedVariable(fieldName, expression) != null) {
qualifier.replace(factory.createExpressionFromText("this", null));
return;
}
}
}
if (expression instanceof PsiMethodReferenceExpression) {
qualifier.replace(factory.createExpressionFromText("this", null));
}
else {
//Target is a field, replace target.m -> m
qualifier.delete();
}
return;
}
if (myTargetVariable.equals(resolved)) {
PsiThisExpression thisExpression = RefactoringChangeUtil.createThisExpression(manager, PsiTreeUtil.isAncestor(myMethod, PsiTreeUtil.getParentOfType(expression, PsiClass.class), true) ? myTargetClass : null);
replaceMap.put(expression, thisExpression);
return;
}
else if (myMethod.equals(resolved)) {
}
else {
PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(expression);
if (classReferencedByThis != null) {
final String paramName = getParameterNameToCreate(classReferencedByThis);
if (paramName != null) {
PsiReferenceExpression newQualifier = (PsiReferenceExpression)factory.createExpressionFromText(paramName, null);
expression.setQualifierExpression(newQualifier);
return;
}
}
}
super.visitReferenceExpression(expression);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
@Override public void visitNewExpression(PsiNewExpression expression) {
try {
final PsiExpression qualifier = expression.getQualifier();
if (ExpressionUtils.isReferenceTo(qualifier, myTargetVariable)) {
//Target is a field, replace target.new A() -> new A()
qualifier.delete();
} else {
final PsiClass classReferencedByThis = MoveInstanceMembersUtil.getClassReferencedByThis(expression);
if (classReferencedByThis != null) {
if (qualifier != null) qualifier.delete();
final String paramName = getParameterNameToCreate(classReferencedByThis);
final PsiExpression newExpression = factory.createExpressionFromText(paramName + "." + expression.getText(), null);
replaceMap.put(expression, newExpression);
}
}
super.visitNewExpression(expression);
}
catch (IncorrectOperationException e) {
LOG.error(e);
}
}
@Override public void visitMethodCallExpression(PsiMethodCallExpression expression) {
correctMethodCall(expression, true);
super.visitMethodCallExpression(expression);
}
});
for (PsiElement element : replaceMap.keySet()) {
final PsiElement replacement = replaceMap.get(element);
element.replace(replacement);
}
}
final PsiMethod methodCopy = getPatternMethod();
final List<PsiParameter> newParameters = Arrays.asList(methodCopy.getParameterList().getParameters());
RefactoringUtil.fixJavadocsForParams(methodCopy, new HashSet<>(newParameters));
return methodCopy;
}
catch (IncorrectOperationException e) {
LOG.error(e);
return myMethod;
}
}
private PsiMethod getPatternMethod() throws IncorrectOperationException {
final PsiMethod methodCopy = (PsiMethod)myMethod.copy();
String name = myTargetClass.isInterface()
? PsiModifier.PUBLIC :
!Comparing.strEqual(myNewVisibility, VisibilityUtil.ESCALATE_VISIBILITY) ? myNewVisibility : null;
if (name != null) {
PsiUtil.setModifierProperty(methodCopy, name, true);
}
if (myTargetVariable instanceof PsiParameter) {
final int index = myMethod.getParameterList().getParameterIndex((PsiParameter)myTargetVariable);
methodCopy.getParameterList().getParameters()[index].delete();
}
addParameters(JavaPsiFacade.getInstance(myProject).getElementFactory(), methodCopy, myTargetClass.isInterface());
return methodCopy;
}
private void addParameters(final PsiElementFactory factory, final PsiMethod methodCopy, final boolean isInterface) throws IncorrectOperationException {
final Set<Map.Entry<PsiClass, String>> entries = myOldClassParameterNames.entrySet();
for (final Map.Entry<PsiClass, String> entry : entries) {
final PsiClassType type = factory.createType(entry.getKey());
final PsiParameter parameter = factory.createParameter(entry.getValue(), type);
if (isInterface) {
PsiUtil.setModifierProperty(parameter, PsiModifier.FINAL, false);
}
methodCopy.getParameterList().add(parameter);
}
}
private String getParameterNameToCreate(@NotNull PsiClass aClass) {
return myOldClassParameterNames.get(aClass);
}
}
| |
package com.swfarm.biz.ebay.bo;
import java.io.Serializable;
import java.util.Date;
import org.apache.log4j.Logger;
import org.springframework.beans.BeanUtils;
import org.springframework.util.ClassUtils;
import com.swfarm.biz.chain.bo.PaymentCommissionFormula;
import com.swfarm.biz.ebay.bo.strategy.AdjustPriceStrategy;
import com.swfarm.biz.product.bo.StockKeepingUnit;
import com.swfarm.pub.utils.DateUtils;
public class EbayItemSaleConfig implements Serializable {
Logger logger = Logger.getLogger(EbayItemSaleConfig.class);
private Long id;
private EbayItem ebayItem;
private Long ebayItemId;
private String itemId;
private StockKeepingUnit stockKeepingUnit;
private Long stockKeepingUnitId;
private String articleNumber;
private String category;
private PaymentCommissionFormula paymentCommissionFormula;
private Long paymentCommissionFormulaId;
private Double handlingCost;
private String localShippingService;
private String intlShippingService;
private String localSfcCode;
private String localSfmCode;
private String intlSfcCode;
private String intlSfmCode;
private Double adjustPriceStep;
private Double grossProfitMarginThreshold;
private Boolean isAdjustPrice;
private Boolean isForceAjustPrice;
private Integer saleDuration;
private Integer adjustCycleDuration;
private Double firstIncreaseFluctuationQuantity;
private Double firstDecreaseFluctuationQuantity;
private Double increaseFluctuationRate;
private Double increaseFluctuationQuantity;
private Double decreaseFluctuationRate;
private Double decreaseFluctuationQuantity;
private Date lastAdjustPriceDate;
private Double priceBeforeAdjustment;
private Double priceAfterAdjustment;
private String adjustPriceStrategyClass;
private Boolean isAutoRecoverQuantity;
private Integer recoverQuantity;
private Date lastRecoverDate;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public EbayItem getEbayItem() {
return ebayItem;
}
public void setEbayItem(EbayItem ebayItem) {
this.ebayItem = ebayItem;
}
public Long getEbayItemId() {
return ebayItemId;
}
public void setEbayItemId(Long ebayItemId) {
this.ebayItemId = ebayItemId;
}
public String getItemId() {
return itemId;
}
public void setItemId(String itemId) {
this.itemId = itemId;
}
public StockKeepingUnit getStockKeepingUnit() {
return stockKeepingUnit;
}
public void setStockKeepingUnit(StockKeepingUnit stockKeepingUnit) {
this.stockKeepingUnit = stockKeepingUnit;
}
public Long getStockKeepingUnitId() {
return stockKeepingUnitId;
}
public void setStockKeepingUnitId(Long stockKeepingUnitId) {
this.stockKeepingUnitId = stockKeepingUnitId;
}
public String getArticleNumber() {
return articleNumber;
}
public void setArticleNumber(String articleNumber) {
this.articleNumber = articleNumber;
}
public String getCategory() {
return category;
}
public void setCategory(String category) {
this.category = category;
}
public PaymentCommissionFormula getPaymentCommissionFormula() {
return paymentCommissionFormula;
}
public void setPaymentCommissionFormula(
PaymentCommissionFormula paymentCommissionFormula) {
this.paymentCommissionFormula = paymentCommissionFormula;
}
public Long getPaymentCommissionFormulaId() {
return paymentCommissionFormulaId;
}
public void setPaymentCommissionFormulaId(Long paymentCommissionFormulaId) {
this.paymentCommissionFormulaId = paymentCommissionFormulaId;
}
public Double getHandlingCost() {
return handlingCost;
}
public void setHandlingCost(Double handlingCost) {
this.handlingCost = handlingCost;
}
public String getLocalShippingService() {
return localShippingService;
}
public void setLocalShippingService(String localShippingService) {
this.localShippingService = localShippingService;
}
public String getIntlShippingService() {
return intlShippingService;
}
public void setIntlShippingService(String intlShippingService) {
this.intlShippingService = intlShippingService;
}
public String getLocalSfcCode() {
return localSfcCode;
}
public void setLocalSfcCode(String localSfcCode) {
this.localSfcCode = localSfcCode;
}
public String getLocalSfmCode() {
return localSfmCode;
}
public void setLocalSfmCode(String usShippingMethodCode) {
this.localSfmCode = usShippingMethodCode;
}
public String getIntlSfcCode() {
return intlSfcCode;
}
public void setIntlSfcCode(String intlSfcCode) {
this.intlSfcCode = intlSfcCode;
}
public String getIntlSfmCode() {
return intlSfmCode;
}
public void setIntlSfmCode(String shippingMethodCode) {
this.intlSfmCode = shippingMethodCode;
}
public Double getAdjustPriceStep() {
return adjustPriceStep;
}
public void setAdjustPriceStep(Double adjustPriceStep) {
this.adjustPriceStep = adjustPriceStep;
}
public Double getGrossProfitMarginThreshold() {
return grossProfitMarginThreshold;
}
public void setGrossProfitMarginThreshold(Double grossProfitMarginThreshold) {
this.grossProfitMarginThreshold = grossProfitMarginThreshold;
}
public Boolean getIsAdjustPrice() {
if (isAdjustPrice == null) {
isAdjustPrice = false;
}
return isAdjustPrice;
}
public void setIsAdjustPrice(Boolean isAdjustPrice) {
this.isAdjustPrice = isAdjustPrice;
}
public Boolean getIsForceAjustPrice() {
if (isForceAjustPrice == null) {
isForceAjustPrice = false;
}
return isForceAjustPrice;
}
public void setIsForceAjustPrice(Boolean isForceAjustPrice) {
this.isForceAjustPrice = isForceAjustPrice;
}
public Integer getSaleDuration() {
return saleDuration;
}
public void setSaleDuration(Integer duration) {
this.saleDuration = duration;
}
public Integer getAdjustCycleDuration() {
if (adjustCycleDuration == null) {
adjustCycleDuration = 30;
}
return adjustCycleDuration;
}
public void setAdjustCycleDuration(Integer adjustCycleDuration) {
this.adjustCycleDuration = adjustCycleDuration;
}
public Double getFirstIncreaseFluctuationQuantity() {
return firstIncreaseFluctuationQuantity;
}
public void setFirstIncreaseFluctuationQuantity(
Double firstIncreaseFluctuationRate) {
this.firstIncreaseFluctuationQuantity = firstIncreaseFluctuationRate;
}
public Double getFirstDecreaseFluctuationQuantity() {
return firstDecreaseFluctuationQuantity;
}
public void setFirstDecreaseFluctuationQuantity(
Double firstDecreaseFluctuationRate) {
this.firstDecreaseFluctuationQuantity = firstDecreaseFluctuationRate;
}
public Double getIncreaseFluctuationRate() {
return increaseFluctuationRate;
}
public void setIncreaseFluctuationRate(Double fluctuationRate) {
this.increaseFluctuationRate = fluctuationRate;
}
public Double getIncreaseFluctuationQuantity() {
return increaseFluctuationQuantity;
}
public void setIncreaseFluctuationQuantity(Double fluctuationQuantity) {
this.increaseFluctuationQuantity = fluctuationQuantity;
}
public Double getDecreaseFluctuationRate() {
return decreaseFluctuationRate;
}
public void setDecreaseFluctuationRate(Double decreaseFluctuationRate) {
this.decreaseFluctuationRate = decreaseFluctuationRate;
}
public Double getDecreaseFluctuationQuantity() {
return decreaseFluctuationQuantity;
}
public void setDecreaseFluctuationQuantity(
Double decreaseFluctuationQuantity) {
this.decreaseFluctuationQuantity = decreaseFluctuationQuantity;
}
public Date getLastAdjustPriceDate() {
return lastAdjustPriceDate;
}
public Boolean getIsAdjustPriceApplicable() {
if (this.getIsForceAjustPrice() == true) {
return true;
}
Integer adjustCycleDuration = this.getAdjustCycleDuration();
if (this.lastAdjustPriceDate == null
|| this.lastAdjustPriceDate.before(DateUtils.getBeforeDate(
new Date(), adjustCycleDuration))) {
return true;
} else {
return false;
}
}
public void setLastAdjustPriceDate(Date lastAdjustPriceDate) {
this.lastAdjustPriceDate = lastAdjustPriceDate;
}
public Double getPriceBeforeAdjustment() {
return priceBeforeAdjustment;
}
public void setPriceBeforeAdjustment(Double priceBeforeAdjustment) {
this.priceBeforeAdjustment = priceBeforeAdjustment;
}
public Double getPriceAfterAdjustment() {
return priceAfterAdjustment;
}
public void setPriceAfterAdjustment(Double priceAfterAdjustment) {
if (this.priceAfterAdjustment != null
&& this.priceAfterAdjustment != priceAfterAdjustment) {
this.setPriceBeforeAdjustment(this.priceAfterAdjustment);
this.setLastAdjustPriceDate(new Date());
}
this.priceAfterAdjustment = priceAfterAdjustment;
}
public String getAdjustPriceStrategyClass() {
return adjustPriceStrategyClass;
}
public void setAdjustPriceStrategyClass(String adjustPriceStrategyClass) {
this.adjustPriceStrategyClass = adjustPriceStrategyClass;
}
public Boolean getIsAutoRecoverQuantity() {
if (isAutoRecoverQuantity == null) {
isAutoRecoverQuantity = false;
}
return isAutoRecoverQuantity;
}
public void setIsAutoRecoverQuantity(Boolean isAutoRecoverQuantity) {
this.isAutoRecoverQuantity = isAutoRecoverQuantity;
}
public Integer getRecoverQuantity() {
if (recoverQuantity == null) {
recoverQuantity = 1;
}
return recoverQuantity;
}
public void setRecoverQuantity(Integer recoverQuantity) {
this.recoverQuantity = recoverQuantity;
}
public Date getLastRecoverDate() {
return lastRecoverDate;
}
public void setLastRecoverDate(Date lastRecoverDate) {
this.lastRecoverDate = lastRecoverDate;
}
public Boolean executeAdjustPriceStrategy() {
try {
Class strategyClass = ClassUtils
.forName(this.adjustPriceStrategyClass);
AdjustPriceStrategy adjustPriceStrategy = (AdjustPriceStrategy) BeanUtils
.instantiateClass(strategyClass);
return adjustPriceStrategy.execute(this);
} catch (Exception e) {
logger.warn(e.getMessage(), e);
return null;
}
}
}
| |
package org.aikodi.chameleon.core.language;
import org.aikodi.chameleon.core.element.Element;
import org.aikodi.chameleon.core.lookup.LookupContextFactory;
import org.aikodi.chameleon.core.namespace.LazyRootNamespace;
import org.aikodi.chameleon.core.namespace.RootNamespace;
import org.aikodi.chameleon.core.property.ChameleonProperty;
import org.aikodi.chameleon.core.property.PropertyRule;
import org.aikodi.chameleon.core.property.StaticChameleonProperty;
import org.aikodi.chameleon.core.validation.Verification;
import org.aikodi.chameleon.core.validation.VerificationRule;
import org.aikodi.chameleon.exception.ChameleonProgrammerException;
import org.aikodi.chameleon.plugin.LanguagePlugin;
import org.aikodi.chameleon.plugin.LanguageProcessor;
import org.aikodi.chameleon.plugin.PluginContainer;
import org.aikodi.chameleon.plugin.ProcessorContainer;
import org.aikodi.chameleon.workspace.View;
import org.aikodi.rejuse.association.MultiAssociation;
import org.aikodi.rejuse.junit.Revision;
import org.aikodi.rejuse.property.PropertyMutex;
import org.aikodi.rejuse.property.PropertySet;
import org.aikodi.rejuse.property.PropertyUniverse;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* <p>An object that represents a language. The main responsibility of
* a language object is determine <i>some</i> of the properties of its language
* constructs.</p>
*
* <p>By extracting certain parts of the language semantics from the
* language constructs themselves, we try to avoid an explosion of classes.
* For example, elements such as methods can have many different properties that are determined
* by modifiers. A language that has methods can specify the default properties of
* methods. These are the properties that a method has if it has no relevant
* modifier with respect to that property. In Java, methods are overridable by default,
* whereas they are not in C#. Implementing new classes for each language just to fix the
* default properties would make it too hard to create a new language. Therefore, we use delegation
* instead.</p>
*
* @author Marko van Dooren
*/
public interface Language extends PropertyUniverse<ChameleonProperty>, PluginContainer<LanguagePlugin>, ProcessorContainer<LanguageProcessor> {
/**
* Return the version of this language.
* @return
*/
Revision version();
/**
* Return the name of this language.
*/
/*@
@ public behavior
@
@ post \result != null;
@*/
String name();
/**
* Return the default properties of the given element.
* @return
*/
/*@
@ public behavior
@
@ pre element != null;
@
@ (* The properties of all rules are added to the result.*)
@ post (\forall PropertyRule rule; propertyRules().contains(rule);
@ \result.containsAll(rule.properties(element)));
@ (* Only the properties given by the property rules are in the result *);
@ post (\forall Property<Element> p; \result.contains(p);
@ \exists(PropertyRule rule; propertyRules().contains(rule);
@ rule.properties(element).contains(p)));
@*/
PropertySet<Element,ChameleonProperty> defaultProperties(Element element, PropertySet<Element,ChameleonProperty> explicit);
/**
* Return the list of rule that determine the default properties of an element.
* @return
*/
/*@
@ public behavior
@
@ post \result != null;
@*/
public List<PropertyRule> propertyRules();
/**
* Add a property rule to this language object.
* @param rule
*/
/*@
@ public behavior
@
@ pre rule != null;
@
@ post propertyRules().contains(rule);
@*/
public void addPropertyRule(PropertyRule rule);
/**
* Remove a property rule from this language object.
* @param rule
*/
/*@
@ public behavior
@
@ pre rule != null;
@
@ post ! propertyRules().contains(rule);
@*/
public void removePropertyRule(PropertyRule rule);
/**
* Set the name of this language.
* @param name
* The new name of this language
*/
/*@
@ public behavior
@
@ pre name != null;
@
@ post getName() == name;
@*/
public void setName(String name);
/**
* Return the default namespace attached to this language. A language is always attached to a default namespace because a language
* may need access to predefined elements, which are somewhere in the model.
* @return
*/
// public RootNamespace defaultNamespace();
/**
* A property mutex for the scope property.
*/
public PropertyMutex<ChameleonProperty> SCOPE_MUTEX();
// /**
// * Return the connector corresponding to the given connector interface.
// */
// /*@
// @ public behavior
// @
// @ pre connectorInterface != null;
// @*/
// public <T extends LanguagePlugin> T plugin(Class<T> pluginInterface);
// /**
// * Remove the plugin corresponding to the given plugin interface. The
// * bidirectional relation is kept in a consistent state.
// *
// * @param <T>
// * @param pluginInterface
// */
// /*@
// @ public behavior
// @
// @ pre pluginInterface != null;
// @
// @ post plugin(pluginInterface) == null;
// @*/
// public <T extends LanguagePlugin> void removePlugin(Class<T> pluginInterface);
// /**
// * Set the plugin corresponding to the given plugin interface. The bidirectional relation is
// * kept in a consistent state.
// *
// * @param <T>
// * @param pluginInterface
// * @param plugin
// */
// /*@
// @ public behavior
// @
// @ pre pluginInterface != null;
// @ pre plugin != null;
// @
// @ post plugin(pluginInterface) == plugin;
// @*/
// public <T extends LanguagePlugin> void setPlugin(Class<T> pluginInterface, T plugin);
/**************
* PROCESSORS *
**************/
// /**
// * Return the processors corresponding to the given processor interface.
// */
// /*@
// @ public behavior
// @
// @ post \result.equals(processorMap().get(connectorInterface));
// @*/
// public <T extends LanguageProcessor> List<T> processors(Class<T> connectorInterface);
//
// /**
// * Remove the given processor. The
// * bidirection relation is kept in a consistent state.
// *
// * @param <T>
// * @param connectorInterface
// */
// /*@
// @ public behavior
// @
// @ pre connectorInterface != null;
// @ pre processor != null;
// @
// @ post !processor(connectorInterface).contains(processor);
// @*/
// public <T extends LanguageProcessor> void removeProcessor(Class<T> connectorInterface, T processor);
//
// /**
// * Add the given processor to the list of processors correponding to the given connector interface.
// * The bidirectional relation is kept in a consistent state.
// *
// * @param <T>
// * @param connectorInterface
// * @param connector
// */
// /*@
// @ public behavior
// @
// @ pre connectorInterface != null;
// @ pre processor != null;
// @
// @ post processor(connectorInterface).contains(processor);
// @*/
// public <T extends LanguageProcessor> void addProcessor(Class<T> connectorInterface, T processor);
//
// /**
// * Copy the processor mapping from the given language to this language.
// */
// /*@
// @ public behavior
// @
// @ post (\forall Class<? extends Processor> cls; from.processorMap().containsKey(cls);
// @ processors(cls).containsAll(from.processorMap().valueSet());
// @*/
// public <S extends Processor> void cloneProcessorsFrom(Language from);
/**
* Return the mapping of classes/interfaces to the processors of that kind.
*/
/*@
@ public behavior
@
@ post \result != null;
@*/
@Override
public Map<Class<? extends LanguageProcessor>, List<? extends LanguageProcessor>> processorMap();
/**************************************************************************
* PROPERTIES *
**************************************************************************/
/**
* Return the properties that can be used for elements in this model.
*
* For every class of properties, one object is in the set.
* @return
*/
@Override
public Set<ChameleonProperty> properties();
/**
* Return the object representing the association between this language and the
* properties to which it is attached.
*
* DO NOT MODIFY THE RESULTING OBJECT. IT IS ACCESSIBLE ONLY BECAUSE OF THE
* VERY DUMB ACCESS CONTROL IN JAVA.
*/
@Override
public MultiAssociation<Language,ChameleonProperty> propertyLink();
/**
*
* @param name
* @return
* @throws ChameleonProgrammerException
* There is no property with the given name.
*/
public ChameleonProperty property(String name) throws ChameleonProgrammerException;
/**************************************************************************
* DEFAULT NAMESPACE *
**************************************************************************/
/**
* Set the default namespace.
*/
// public void setDefaultNamespace(RootNamespace defaultNamespace);
/**
* Return the association object that represents that association with the
* default (root) namespace.
*/
// public Association<Language, View> viewLink();
// public View view();
// public Project project();
/**
* Return the factory for creating lookup strategies.
*/
public LookupContextFactory lookupFactory();
/**
* Returns true if the given character is a valid character
* for an identifier.
*/
public boolean isValidIdentifierCharacter(char character);
/**
* Return the list of rule that determine the language specific validity conditions of an element.
* @return
*/
/*@
@ public behavior
@
@ post \result != null;
@*/
public List<VerificationRule> validityRules();
/**
* Add a property rule to this language object.
* @param rule
*/
/*@
@ public behavior
@
@ pre rule != null;
@
@ post propertyRules().contains(rule);
@*/
public void addValidityRule(VerificationRule rule);
/**
* Remove a property rule from this language object.
* @param rule
*/
/*@
@ public behavior
@
@ pre rule != null;
@
@ post ! propertyRules().contains(rule);
@*/
public void removeValidityRule(VerificationRule rule);
/**
* Verify the given element.
*
* This method verifies constraints on the element that are specific for the language.
* One example is the validity of the name of an element. Different languages may have different
* rules with respect to the validity of a name.
*
* @param element
* @return
*/
public Verification verify(Element element);
/**
* Flush the caches kept by this language. Caches of model elements are flushed separately.
* The default behavior is to do nothing.
*/
@Override
public void flushCache();
public default View createView() {
return new View(createRootNamespace(),this);
}
public default RootNamespace createRootNamespace() {
return new LazyRootNamespace();
}
/**
* A property that specifies that a declaration can be overridden.
*
* OVERRIDABLE implies REFINABLE.
*/
StaticChameleonProperty OVERRIDABLE();
/**
* A property that specifies that a declaration can be refined.
* Refinement can be seen as overriding buy purely augmenting
* the refined declaration.
*/
ChameleonProperty REFINABLE();
StaticChameleonProperty INHERITABLE();
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.android.xml;
import static com.google.common.base.Predicates.equalTo;
import static com.google.common.base.Predicates.not;
import com.android.resources.ResourceType;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.base.Preconditions;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableMap.Builder;
import com.google.common.collect.Ordering;
import com.google.devtools.build.android.AndroidDataWritingVisitor;
import com.google.devtools.build.android.AndroidDataWritingVisitor.StartTag;
import com.google.devtools.build.android.AndroidDataWritingVisitor.ValuesResourceDefinition;
import com.google.devtools.build.android.AndroidResourceSymbolSink;
import com.google.devtools.build.android.DataSource;
import com.google.devtools.build.android.FullyQualifiedName;
import com.google.devtools.build.android.XmlResourceValue;
import com.google.devtools.build.android.XmlResourceValues;
import com.google.devtools.build.android.proto.SerializeFormat;
import com.google.protobuf.InvalidProtocolBufferException;
import java.io.IOException;
import java.io.OutputStream;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import javax.annotation.CheckReturnValue;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import javax.xml.namespace.QName;
import javax.xml.stream.XMLEventReader;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.events.StartElement;
import javax.xml.stream.events.XMLEvent;
/**
* Represents an Android Resource custom attribute.
*
* <p>Attribute are the most complicated Android resource, and therefore the least documented. Most
* of the information about them is found by reading the android compatibility library source. An
* Attribute defines a parameter that can be passed into a view class -- as such you can think of
* attributes as creating slots for other resources to fit into. Each slot will have at least one
* format, and can have multiples. Simple attributes (color, boolean, reference, dimension, float,
* integer, string, and fraction) are defined as <attr name="<em>name</em>" format=
* "<em>format</em>" /> while the complex ones, flag and enum, have sub parentTags: <attr
* name= "<em>name</em>" ><flag name="<em>name</em>" value="<em>value</em>"> </attr>.
*
* <p>Attributes also have a double duty as defining validation logic for layout resources -- each
* layout attribute *must* have a corresponding attribute which will be used to validate the
* value/resource reference defined in it.
*
* <p>AttrXmlValue, due to the multiple types of attributes is actually a composite class that
* contains multiple {@link XmlResourceValue} instances for each resource.
*/
@Immutable
public class AttrXmlResourceValue implements XmlResourceValue {
private static final String FRACTION = "fraction";
private static final String STRING = "string";
private static final String INTEGER = "integer";
private static final String FLOAT = "float";
private static final String DIMENSION = "dimension";
private static final String BOOLEAN = "boolean";
private static final String COLOR = "color";
private static final String REFERENCE = "reference";
private static final String ENUM = "enum";
private static final String FLAGS = "flags";
private static final QName TAG_ENUM = QName.valueOf(ENUM);
private static final QName TAG_FLAG = QName.valueOf("flag");
private final ImmutableMap<String, ResourceXmlAttrValue> formats;
private AttrXmlResourceValue(ImmutableMap<String, ResourceXmlAttrValue> formats) {
this.formats = formats;
}
private static Map<String, String> readSubValues(XMLEventReader reader, QName subTagType)
throws XMLStreamException {
Builder<String, String> builder = ImmutableMap.builder();
while (reader.hasNext()
&& XmlResourceValues.isTag(XmlResourceValues.peekNextTag(reader), subTagType)) {
StartElement element = reader.nextEvent().asStartElement();
builder.put(
XmlResourceValues.getElementName(element), XmlResourceValues.getElementValue(element));
XMLEvent endTag = reader.nextEvent();
if (!XmlResourceValues.isEndTag(endTag, subTagType)) {
throw new XMLStreamException(
String.format("Unexpected [%s]; Expected %s", endTag, "</enum>"), endTag.getLocation());
}
}
return builder.build();
}
private static void endAttrElement(XMLEventReader reader) throws XMLStreamException {
XMLEvent endTag = reader.nextTag();
if (!endTag.isEndElement() || !QName.valueOf("attr").equals(endTag.asEndElement().getName())) {
throw new XMLStreamException("Unexpected ParentTag:" + endTag, endTag.getLocation());
}
}
@VisibleForTesting
private static final class BuilderEntry implements Entry<String, ResourceXmlAttrValue> {
private final String name;
private final ResourceXmlAttrValue value;
BuilderEntry(String name, ResourceXmlAttrValue value) {
this.name = name;
this.value = value;
}
@Override
public String getKey() {
return name;
}
@Override
public ResourceXmlAttrValue getValue() {
return value;
}
@Override
public ResourceXmlAttrValue setValue(ResourceXmlAttrValue value) {
throw new UnsupportedOperationException();
}
}
@SafeVarargs
@VisibleForTesting
public static XmlResourceValue fromFormatEntries(Entry<String, ResourceXmlAttrValue>... entries) {
return of(ImmutableMap.copyOf(Arrays.asList(entries)));
}
@SuppressWarnings("deprecation")
public static XmlResourceValue from(SerializeFormat.DataValueXml proto)
throws InvalidProtocolBufferException {
Builder<String, ResourceXmlAttrValue> formats =
ImmutableMap.<String, AttrXmlResourceValue.ResourceXmlAttrValue>builder();
for (Entry<String, SerializeFormat.DataValueXml> entry : proto.getMappedXmlValue().entrySet()) {
switch (entry.getKey()) {
case FLAGS:
formats.put(
entry.getKey(), FlagResourceXmlAttrValue.of(entry.getValue().getMappedStringValue()));
break;
case ENUM:
formats.put(
entry.getKey(), EnumResourceXmlAttrValue.of(entry.getValue().getMappedStringValue()));
break;
case REFERENCE:
formats.put(entry.getKey(), ReferenceResourceXmlAttrValue.of());
break;
case COLOR:
formats.put(entry.getKey(), ColorResourceXmlAttrValue.of());
break;
case BOOLEAN:
formats.put(entry.getKey(), BooleanResourceXmlAttrValue.of());
break;
case DIMENSION:
formats.put(entry.getKey(), DimensionResourceXmlAttrValue.of());
break;
case FLOAT:
formats.put(entry.getKey(), FloatResourceXmlAttrValue.of());
break;
case INTEGER:
formats.put(entry.getKey(), IntegerResourceXmlAttrValue.of());
break;
case STRING:
formats.put(entry.getKey(), StringResourceXmlAttrValue.of());
break;
case FRACTION:
formats.put(entry.getKey(), FractionResourceXmlAttrValue.of());
break;
default:
throw new InvalidProtocolBufferException("Unexpected format: " + entry.getKey());
}
}
return of(formats.build());
}
/**
* Creates a new {@link AttrXmlResourceValue}. Returns null if there are no formats.
*/
@Nullable
public static XmlResourceValue from(
StartElement attr, @Nullable String format, XMLEventReader eventReader)
throws XMLStreamException {
Set<String> formatNames = new HashSet<>();
if (format != null) {
Collections.addAll(formatNames, format.split("\\|"));
}
XMLEvent nextTag = XmlResourceValues.peekNextTag(eventReader);
if (nextTag != null && nextTag.isStartElement()) {
QName tagName = nextTag.asStartElement().getName();
if (TAG_FLAG.equals(tagName)) {
formatNames.add(FLAGS);
} else {
formatNames.add(tagName.getLocalPart().toLowerCase());
}
}
Builder<String, ResourceXmlAttrValue> formats = ImmutableMap.builder();
for (String formatName : formatNames) {
switch (formatName) {
case FLAGS:
Map<String, String> flags = readSubValues(eventReader, TAG_FLAG);
endAttrElement(eventReader);
formats.put(formatName, FlagResourceXmlAttrValue.of(flags));
break;
case ENUM:
Map<String, String> enums = readSubValues(eventReader, TAG_ENUM);
endAttrElement(eventReader);
formats.put(formatName, EnumResourceXmlAttrValue.of(enums));
break;
case REFERENCE:
formats.put(formatName, ReferenceResourceXmlAttrValue.of());
break;
case COLOR:
formats.put(formatName, ColorResourceXmlAttrValue.of());
break;
case BOOLEAN:
formats.put(formatName, BooleanResourceXmlAttrValue.of());
break;
case DIMENSION:
formats.put(formatName, DimensionResourceXmlAttrValue.of());
break;
case FLOAT:
formats.put(formatName, FloatResourceXmlAttrValue.of());
break;
case INTEGER:
formats.put(formatName, IntegerResourceXmlAttrValue.of());
break;
case STRING:
formats.put(formatName, StringResourceXmlAttrValue.of());
break;
case FRACTION:
formats.put(formatName, FractionResourceXmlAttrValue.of());
break;
default:
throw new XMLStreamException(
String.format("Unexpected attr format: %S", formatName), attr.getLocation());
}
}
return of(formats.build());
}
public static XmlResourceValue of(ImmutableMap<String, ResourceXmlAttrValue> formats) {
return new AttrXmlResourceValue(formats);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
AttrXmlResourceValue other = (AttrXmlResourceValue) o;
return Objects.equals(formats, other.formats);
}
@Override
public int hashCode() {
return formats.hashCode();
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this).add("formats", formats).toString();
}
@Override
public void write(
FullyQualifiedName key, DataSource source, AndroidDataWritingVisitor mergedDataWriter) {
if (formats.isEmpty()) {
mergedDataWriter
.define(key)
.derivedFrom(source)
.startTag("attr")
.named(key)
.closeUnaryTag()
.save();
} else {
ImmutableList<String> formatKeys =
FluentIterable.from(formats.keySet())
.filter(not(equalTo(FLAGS)))
.filter(not(equalTo(ENUM)))
.toSortedList(Ordering.natural());
StartTag startTag =
mergedDataWriter
.define(key)
.derivedFrom(source)
.startTag("attr")
.named(key)
.optional()
.attribute("format")
.setFrom(formatKeys)
.joinedBy("|");
ValuesResourceDefinition definition;
if (formats.keySet().contains(FLAGS) || formats.keySet().contains(ENUM)) {
definition = startTag.closeTag();
for (ResourceXmlAttrValue value : formats.values()) {
definition = value.writeTo(definition);
}
definition = definition.addCharactersOf("\n").endTag();
} else {
definition = startTag.closeUnaryTag();
}
definition.save();
}
}
@Override
public void writeResourceToClass(FullyQualifiedName key, AndroidResourceSymbolSink sink) {
sink.acceptSimpleResource(key.type(), key.name());
// Flags and enums generate ID fields.
if (formats.keySet().contains(FLAGS) || formats.keySet().contains(ENUM)) {
for (ResourceXmlAttrValue value : formats.values()) {
value.writeToClass(sink);
}
}
}
@SuppressWarnings("deprecation")
@Override
public int serializeTo(int sourceId, Namespaces namespaces, OutputStream output)
throws IOException {
SerializeFormat.DataValue.Builder builder =
XmlResourceValues.newSerializableDataValueBuilder(sourceId);
SerializeFormat.DataValueXml.Builder xmlValueBuilder =
SerializeFormat.DataValueXml.newBuilder();
xmlValueBuilder
.setType(SerializeFormat.DataValueXml.XmlType.ATTR)
.putAllNamespace(namespaces.asMap());
for (Entry<String, ResourceXmlAttrValue> entry : formats.entrySet()) {
xmlValueBuilder.putMappedXmlValue(
entry.getKey(), entry.getValue().appendTo(builder.getXmlValueBuilder()));
}
builder.setXmlValue(xmlValueBuilder);
return XmlResourceValues.serializeProtoDataValue(output, builder);
}
@Override
public XmlResourceValue combineWith(XmlResourceValue value) {
throw new IllegalArgumentException(this + " is not a combinable resource.");
}
/** Represents the xml value for an attr definition. */
@CheckReturnValue
public interface ResourceXmlAttrValue {
ValuesResourceDefinition writeTo(ValuesResourceDefinition writer);
SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder);
void writeToClass(AndroidResourceSymbolSink writer);
}
// TODO(corysmith): The ResourceXmlAttrValue implementors, other than enum and flag, share a
// lot of boilerplate. Determine how to reduce it.
/** Represents an Android Enum Attribute resource. */
@VisibleForTesting
public static class EnumResourceXmlAttrValue implements ResourceXmlAttrValue {
private Map<String, String> values;
private EnumResourceXmlAttrValue(Map<String, String> values) {
this.values = values;
}
@VisibleForTesting
public static Entry<String, ResourceXmlAttrValue> asEntryOf(String... keyThenValue) {
Preconditions.checkArgument(keyThenValue.length > 0);
Preconditions.checkArgument(keyThenValue.length % 2 == 0);
Builder<String, String> builder = ImmutableMap.builder();
for (int i = 0; i < keyThenValue.length; i += 2) {
builder.put(keyThenValue[i], keyThenValue[i + 1]);
}
return new BuilderEntry(ENUM, of(builder.build()));
}
public static ResourceXmlAttrValue of(Map<String, String> values) {
return new EnumResourceXmlAttrValue(values);
}
@Override
public int hashCode() {
return values.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof EnumResourceXmlAttrValue)) {
return false;
}
EnumResourceXmlAttrValue other = (EnumResourceXmlAttrValue) obj;
return Objects.equals(values, other.values);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass()).add("values", values).toString();
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.putAllMappedStringValue(values).build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
for (Entry<String, String> entry : values.entrySet()) {
writer =
writer
.startTag("enum")
.attribute("name")
.setTo(entry.getKey())
.attribute("value")
.setTo(entry.getValue())
.closeUnaryTag()
.addCharactersOf("\n");
}
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {
for (Map.Entry<String, String> entry : values.entrySet()) {
writer.acceptSimpleResource(ResourceType.ID, entry.getKey());
}
}
}
/** Represents an Android Flag Attribute resource. */
@VisibleForTesting
public static class FlagResourceXmlAttrValue implements ResourceXmlAttrValue {
private Map<String, String> values;
private FlagResourceXmlAttrValue(Map<String, String> values) {
this.values = values;
}
public static ResourceXmlAttrValue of(Map<String, String> values) {
// ImmutableMap guarantees a stable order.
return new FlagResourceXmlAttrValue(ImmutableMap.copyOf(values));
}
@VisibleForTesting
public static Entry<String, ResourceXmlAttrValue> asEntryOf(String... keyThenValue) {
Builder<String, String> builder = ImmutableMap.builder();
Preconditions.checkArgument(keyThenValue.length > 0);
Preconditions.checkArgument(keyThenValue.length % 2 == 0);
for (int i = 0; i < keyThenValue.length; i += 2) {
builder.put(keyThenValue[i], keyThenValue[i + 1]);
}
return new BuilderEntry(FLAGS, of(builder.build()));
}
@Override
public int hashCode() {
return values.hashCode();
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof FlagResourceXmlAttrValue)) {
return false;
}
FlagResourceXmlAttrValue other = (FlagResourceXmlAttrValue) obj;
return Objects.equals(values, other.values);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(getClass()).add("values", values).toString();
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.putAllMappedStringValue(values).build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
for (Entry<String, String> entry : values.entrySet()) {
writer =
writer
.startTag("flag")
.attribute("name")
.setTo(entry.getKey())
.attribute("value")
.setTo(entry.getValue())
.closeUnaryTag();
}
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {
for (Map.Entry<String, String> entry : values.entrySet()) {
writer.acceptSimpleResource(ResourceType.ID, entry.getKey());
}
}
}
/** Represents an Android Reference Attribute resource. */
@VisibleForTesting
public static class ReferenceResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final ReferenceResourceXmlAttrValue INSTANCE =
new ReferenceResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(REFERENCE, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
/** Represents an Android Color Attribute resource. */
@VisibleForTesting
public static class ColorResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final ColorResourceXmlAttrValue INSTANCE = new ColorResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(COLOR, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
/** Represents an Android Boolean Attribute resource. */
@VisibleForTesting
public static class BooleanResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final BooleanResourceXmlAttrValue INSTANCE = new BooleanResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(BOOLEAN, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
/** Represents an Android Float Attribute resource. */
@VisibleForTesting
public static class FloatResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final FloatResourceXmlAttrValue INSTANCE = new FloatResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(FLOAT, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
/** Represents an Android Dimension Attribute resource. */
@VisibleForTesting
public static class DimensionResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final DimensionResourceXmlAttrValue INSTANCE =
new DimensionResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(DIMENSION, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
/** Represents an Android Integer Attribute resource. */
@VisibleForTesting
public static class IntegerResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final IntegerResourceXmlAttrValue INSTANCE = new IntegerResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(INTEGER, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
/** Represents an Android String Attribute resource. */
@VisibleForTesting
public static class StringResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final StringResourceXmlAttrValue INSTANCE = new StringResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(STRING, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
/** Represents an Android Fraction Attribute resource. */
@VisibleForTesting
public static class FractionResourceXmlAttrValue implements ResourceXmlAttrValue {
private static final FractionResourceXmlAttrValue INSTANCE = new FractionResourceXmlAttrValue();
public static ResourceXmlAttrValue of() {
return INSTANCE;
}
@VisibleForTesting
public static BuilderEntry asEntry() {
return new BuilderEntry(FRACTION, of());
}
@Override
public SerializeFormat.DataValueXml appendTo(SerializeFormat.DataValueXml.Builder builder) {
return builder.build();
}
@Override
public ValuesResourceDefinition writeTo(ValuesResourceDefinition writer) {
return writer;
}
@Override
public void writeToClass(AndroidResourceSymbolSink writer) {}
}
@Override
public String asConflictStringWith(DataSource source) {
return source.asConflictString();
}
}
| |
/*
* Copyright 2012 Ian D. Bollinger
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package org.rustlang.oxide.common;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
import com.google.inject.Inject;
import org.eclipse.core.runtime.ILog;
import org.eclipse.core.runtime.IStatus;
import org.slf4j.helpers.FormattingTuple;
import org.slf4j.helpers.MarkerIgnoringBase;
import org.slf4j.helpers.MessageFormatter;
/**
* TODO: Document class.
*/
@Immutable
public class EclipseLogger extends MarkerIgnoringBase {
private static final long serialVersionUID = 1L;
private final ILog wrappedLog;
private final StatusFactory statusFactory;
@Inject
EclipseLogger(final ILog wrappedLog, final StatusFactory statusFactory) {
this.wrappedLog = wrappedLog;
this.statusFactory = statusFactory;
}
@Override
public void debug(@Nullable final String message) {
}
@Override
public void debug(@Nullable final String message,
@Nullable final Object arg) {
}
@Override
public void debug(@Nullable final String message,
@Nullable final Object[] args) {
}
@Override
public void debug(@Nullable final String message,
@Nullable final Throwable throwable) {
}
@Override
public void debug(@Nullable final String message,
@Nullable final Object arg1, @Nullable final Object arg2) {
}
@Override
public void error(@Nullable final String message) {
log(IStatus.ERROR, message, null);
}
@Override
public void error(@Nullable final String message,
@Nullable final Object arg) {
formatAndLog(IStatus.ERROR, message, arg, null);
}
@Override
public void error(@Nullable final String message,
@Nullable final Object[] args) {
formatAndLog(IStatus.ERROR, message, args);
}
@Override
public void error(@Nullable final String message,
@Nullable final Throwable throwable) {
log(IStatus.ERROR, message, throwable);
}
@Override
public void error(@Nullable final String message,
@Nullable final Object arg1, @Nullable final Object arg2) {
formatAndLog(IStatus.ERROR, message, arg1, arg2);
}
@Override
public void info(@Nullable final String message) {
log(IStatus.INFO, message, null);
}
@Override
public void info(@Nullable final String message,
@Nullable final Object arg) {
formatAndLog(IStatus.INFO, message, arg, null);
}
@Override
public void info(@Nullable final String message,
@Nullable final Object[] args) {
formatAndLog(IStatus.INFO, message, args);
}
@Override
public void info(@Nullable final String message,
@Nullable final Throwable throwable) {
log(IStatus.INFO, message, throwable);
}
@Override
public void info(@Nullable final String message,
@Nullable final Object arg1, @Nullable final Object arg2) {
formatAndLog(IStatus.INFO, message, arg1, arg2);
}
@Override
public boolean isDebugEnabled() {
return false;
}
@Override
public boolean isErrorEnabled() {
return true;
}
@Override
public boolean isInfoEnabled() {
return true;
}
@Override
public boolean isTraceEnabled() {
return false;
}
@Override
public boolean isWarnEnabled() {
return true;
}
@Override
public void trace(@Nullable final String message) {
log(IStatus.INFO, message, null);
}
@Override
public void trace(@Nullable final String message,
@Nullable final Object arg) {
}
@Override
public void trace(@Nullable final String message,
@Nullable final Object[] args) {
}
@Override
public void trace(@Nullable final String message,
@Nullable final Throwable throwable) {
}
@Override
public void trace(@Nullable final String message,
@Nullable final Object arg1, @Nullable final Object arg2) {
}
@Override
public void warn(@Nullable final String message) {
log(IStatus.WARNING, message, null);
}
@Override
public void warn(@Nullable final String message,
@Nullable final Object arg) {
formatAndLog(IStatus.WARNING, message, arg, null);
}
@Override
public void warn(@Nullable final String message,
@Nullable final Object[] args) {
formatAndLog(IStatus.WARNING, message, args);
}
@Override
public void warn(@Nullable final String message,
@Nullable final Throwable throwable) {
log(IStatus.WARNING, message, throwable);
}
@Override
public void warn(@Nullable final String message,
@Nullable final Object arg1, @Nullable final Object arg2) {
formatAndLog(IStatus.INFO, message, arg1, arg2);
}
private void log(final int severity, @Nullable final String message,
@Nullable final Throwable throwable) {
wrappedLog.log(statusFactory.create(severity, message, throwable));
}
private void formatAndLog(final int severity, @Nullable final String format,
@Nullable final Object arg1, @Nullable final Object arg2) {
final FormattingTuple tp = MessageFormatter.format(format, arg1, arg2);
log(severity, tp.getMessage(), tp.getThrowable());
}
private void formatAndLog(final int severity, @Nullable final String format,
@Nullable final Object[] args) {
final FormattingTuple tp = MessageFormatter.arrayFormat(format, args);
log(severity, tp.getMessage(), tp.getThrowable());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.MessageDigest;
import java.util.Collection;
import org.apache.log4j.Logger;
import org.apache.commons.lang.ArrayUtils;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.io.util.DataOutputBuffer;
import org.apache.cassandra.utils.FBUtilities;
/**
* Column is immutable, which prevents all kinds of confusion in a multithreaded environment.
* (TODO: look at making SuperColumn immutable too. This is trickier but is probably doable
* with something like PCollections -- http://code.google.com
*/
public class Column implements IColumn
{
private static Logger logger_ = Logger.getLogger(Column.class);
private static ColumnSerializer serializer_ = new ColumnSerializer();
public static ColumnSerializer serializer()
{
return serializer_;
}
private final byte[] name;
private final byte[] value;
private final long timestamp;
private final boolean isMarkedForDelete;
Column(byte[] name)
{
this(name, ArrayUtils.EMPTY_BYTE_ARRAY);
}
Column(byte[] name, byte[] value)
{
this(name, value, 0);
}
public Column(byte[] name, byte[] value, long timestamp)
{
this(name, value, timestamp, false);
}
public Column(byte[] name, byte[] value, long timestamp, boolean isDeleted)
{
assert name != null;
assert value != null;
assert name.length <= IColumn.MAX_NAME_LENGTH;
this.name = name;
this.value = value;
this.timestamp = timestamp;
isMarkedForDelete = isDeleted;
}
public byte[] name()
{
return name;
}
public Column getSubColumn(byte[] columnName)
{
throw new UnsupportedOperationException("This operation is unsupported on simple columns.");
}
public byte[] value()
{
return value;
}
public Collection<IColumn> getSubColumns()
{
throw new UnsupportedOperationException("This operation is unsupported on simple columns.");
}
public int getObjectCount()
{
return 1;
}
public long timestamp()
{
return timestamp;
}
public boolean isMarkedForDelete()
{
return isMarkedForDelete;
}
public long getMarkedForDeleteAt()
{
if (!isMarkedForDelete())
{
throw new IllegalStateException("column is not marked for delete");
}
return timestamp;
}
public long mostRecentLiveChangeAt()
{
return timestamp;
}
public int size()
{
/*
* Size of a column is =
* size of a name (UtfPrefix + length of the string)
* + 1 byte to indicate if the column has been deleted
* + 8 bytes for timestamp
* + 4 bytes which basically indicates the size of the byte array
* + entire byte array.
*/
/*
* We store the string as UTF-8 encoded, so when we calculate the length, it
* should be converted to UTF-8.
*/
return IColumn.UtfPrefix_ + name.length + DBConstants.boolSize_ + DBConstants.tsSize_ + DBConstants.intSize_ + value.length;
}
/*
* This returns the size of the column when serialized.
* @see com.facebook.infrastructure.db.IColumn#serializedSize()
*/
public int serializedSize()
{
return size();
}
public void addColumn(IColumn column)
{
throw new UnsupportedOperationException("This operation is not supported for simple columns.");
}
public IColumn diff(IColumn column)
{
if (timestamp() < column.timestamp())
{
return column;
}
return null;
}
public void updateDigest(MessageDigest digest)
{
digest.update(name);
digest.update(value);
DataOutputBuffer buffer = new DataOutputBuffer();
try
{
buffer.writeLong(timestamp);
buffer.writeBoolean(isMarkedForDelete);
}
catch (IOException e)
{
throw new RuntimeException(e);
}
digest.update(buffer.getData(), 0, buffer.getLength());
}
public int getLocalDeletionTime()
{
assert isMarkedForDelete;
return ByteBuffer.wrap(value).getInt();
}
// note that we do not call this simply compareTo since it also makes sense to compare Columns by name
public long comparePriority(Column o)
{
// tombstone always wins ties.
if (isMarkedForDelete)
return timestamp < o.timestamp ? -1 : 1;
if (o.isMarkedForDelete)
return timestamp > o.timestamp ? 1 : -1;
// compare value as tie-breaker for equal timestamps
if (timestamp == o.timestamp)
return FBUtilities.compareByteArrays(value, o.value);
// neither is tombstoned and timestamps are different
return timestamp - o.timestamp;
}
public String getString(AbstractType comparator)
{
StringBuilder sb = new StringBuilder();
sb.append(comparator.getString(name));
sb.append(":");
sb.append(isMarkedForDelete());
sb.append(":");
sb.append(value.length);
sb.append("@");
sb.append(timestamp());
return sb.toString();
}
public boolean isLive()
{
return !isMarkedForDelete;
}
}
| |
package com.taskadapter.redmineapi.internal;
import com.taskadapter.redmineapi.RedmineInternalError;
import com.taskadapter.redmineapi.bean.Attachment;
import com.taskadapter.redmineapi.bean.CustomField;
import com.taskadapter.redmineapi.bean.Group;
import com.taskadapter.redmineapi.bean.Issue;
import com.taskadapter.redmineapi.bean.IssueCategory;
import com.taskadapter.redmineapi.bean.IssueRelation;
import com.taskadapter.redmineapi.bean.Membership;
import com.taskadapter.redmineapi.bean.Project;
import com.taskadapter.redmineapi.bean.Role;
import com.taskadapter.redmineapi.bean.TimeEntry;
import com.taskadapter.redmineapi.bean.Tracker;
import com.taskadapter.redmineapi.bean.User;
import com.taskadapter.redmineapi.bean.Version;
import com.taskadapter.redmineapi.bean.Watcher;
import com.taskadapter.redmineapi.internal.json.JsonObjectWriter;
import com.taskadapter.redmineapi.internal.json.JsonOutput;
import org.json.JSONException;
import org.json.JSONWriter;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
/**
* Converts Redmine objects to JSon format.
*/
public class RedmineJSONBuilder {
private static final JsonObjectWriter<Tracker> TRACKER_WRITER = new JsonObjectWriter<Tracker>() {
@Override
public void write(JSONWriter writer, Tracker object)
throws JSONException {
writeTracker(writer, object);
}
};
public static final JsonObjectWriter<Project> PROJECT_WRITER = new JsonObjectWriter<Project>() {
@Override
public void write(JSONWriter writer, Project object)
throws JSONException {
writeProject(writer, object);
}
};
public static final JsonObjectWriter<Issue> ISSUE_WRITER = new JsonObjectWriter<Issue>() {
@Override
public void write(JSONWriter writer, Issue object) throws JSONException {
writeIssue(object, writer);
}
};
public static final JsonObjectWriter<User> USER_WRITER = new JsonObjectWriter<User>() {
@Override
public void write(JSONWriter writer, User object) throws JSONException {
writeUser(object, writer);
}
};
public static final JsonObjectWriter<Group> GROUP_WRITER = new JsonObjectWriter<Group>() {
@Override
public void write(JSONWriter writer, Group object) throws JSONException {
writeGroup(object, writer);
}
};
public static final JsonObjectWriter<IssueRelation> RELATION_WRITER = new JsonObjectWriter<IssueRelation>() {
@Override
public void write(JSONWriter writer, IssueRelation object)
throws JSONException {
writeRelation(writer, object);
}
};
public static final JsonObjectWriter<IssueCategory> CATEGORY_WRITER = new JsonObjectWriter<IssueCategory>() {
@Override
public void write(JSONWriter writer, IssueCategory object)
throws JSONException {
writeCategory(object, writer);
}
};
public static final JsonObjectWriter<Version> VERSION_WRITER = new JsonObjectWriter<Version>() {
@Override
public void write(JSONWriter writer, Version object)
throws JSONException {
writeVersion(writer, object);
}
};
public static final JsonObjectWriter<TimeEntry> TIME_ENTRY_WRITER = new JsonObjectWriter<TimeEntry>() {
@Override
public void write(JSONWriter writer, TimeEntry object)
throws JSONException {
writeTimeEntry(writer, object);
}
};
public static final JsonObjectWriter<Attachment> UPLOAD_WRITER = new JsonObjectWriter<Attachment>() {
@Override
public void write(JSONWriter writer, Attachment object)
throws JSONException {
writeUpload(writer, object);
}
};
public static final JsonObjectWriter<Membership> MEMBERSHIP_WRITER = new JsonObjectWriter<Membership>() {
@Override
public void write(JSONWriter writer, Membership object)
throws JSONException {
writeMembership(writer, object);
}
};
/**
* Writes a "create project" request.
*
* @param writer
* project writer.
* @param project
* project to create.
* @throws IllegalArgumentException
* if some project fields are not configured.
* @throws JSONException
* if IO error occurs.
*/
public static void writeProject(JSONWriter writer, Project project)
throws IllegalArgumentException, JSONException {
/* Validate project */
if (project.getName() == null)
throw new IllegalArgumentException(
"Project name must be set to create a new project");
if (project.getIdentifier() == null)
throw new IllegalArgumentException(
"Project identifier must be set to create a new project");
writeProject(project, writer);
}
static void writeTimeEntry(JSONWriter writer, TimeEntry timeEntry)
throws JSONException {
JsonOutput.addIfNotNull(writer, "id", timeEntry.getId());
JsonOutput.addIfNotNull(writer, "project_id", timeEntry.getProjectId());
JsonOutput.addIfNotNull(writer, "issue_id", timeEntry.getIssueId());
JsonOutput.addIfNotNull(writer, "user_id", timeEntry.getUserId());
JsonOutput.addIfNotNull(writer, "activity_id",
timeEntry.getActivityId());
JsonOutput.addIfNotNull(writer, "hours", timeEntry.getHours());
JsonOutput.addIfNotNull(writer, "comments", timeEntry.getComment());
addIfNotNullShort2(writer, "spent_on", timeEntry.getSpentOn());
addIfNotNullFull(writer, "created_on", timeEntry.getSpentOn());
addIfNotNullFull(writer, "updated_on", timeEntry.getSpentOn());
}
/**
* Writes a tracker.
*
* @param writer
* used writer.
* @param tracker
* tracker to writer.
* @throws JSONException
* if error occurs.
*/
static void writeTracker(JSONWriter writer, Tracker tracker)
throws JSONException {
writer.key("id");
writer.value(tracker.getId());
writer.key("name");
writer.value(tracker.getName());
}
static void writeRelation(JSONWriter writer, IssueRelation relation)
throws JSONException {
JsonOutput.addIfNotNull(writer, "issue_to_id", relation.getIssueToId());
JsonOutput.addIfNotNull(writer, "relation_type", relation.getType());
JsonOutput.addIfNotNull(writer, "delay", relation.getDelay());
}
static void writeVersion(JSONWriter writer, Version version)
throws JSONException {
JsonOutput.addIfNotNull(writer, "id", version.getId());
if (version.getProject() != null)
JsonOutput.addIfNotNull(writer, "project_id", version.getProject()
.getId());
JsonOutput.addIfNotNull(writer, "name", version.getName());
JsonOutput
.addIfNotNull(writer, "description", version.getDescription());
JsonOutput.addIfNotNull(writer, "sharing", version.getSharing());
JsonOutput.addIfNotNull(writer, "status", version.getStatus());
addIfNotNullShort2(writer, "due_date", version.getDueDate());
addIfNotNullFull(writer, "created_on", version.getCreatedOn());
addIfNotNullFull(writer, "updated_on", version.getUpdatedOn());
writeCustomFields(writer, version.getCustomFields());
}
/**
* Converts object to a "simple" json.
*
* @param tag
* object tag.
* @param object
* object to convert.
* @param writer
* object writer.
* @return object String representation.
* @throws RedmineInternalError
* if conversion fails.
*/
public static <T> String toSimpleJSON(String tag, T object,
JsonObjectWriter<T> writer) throws RedmineInternalError {
final StringWriter swriter = new StringWriter();
final JSONWriter jsWriter = new JSONWriter(swriter);
try {
jsWriter.object();
jsWriter.key(tag);
jsWriter.object();
writer.write(jsWriter, object);
jsWriter.endObject();
jsWriter.endObject();
} catch (JSONException e) {
throw new RedmineInternalError("Unexpected JSONException", e);
}
return swriter.toString();
}
public static void writeProject(Project project, final JSONWriter writer)
throws JSONException {
JsonOutput.addIfNotNull(writer, "id", project.getId());
JsonOutput.addIfNotNull(writer, "identifier", project.getIdentifier());
JsonOutput.addIfNotNull(writer, "name", project.getName());
JsonOutput
.addIfNotNull(writer, "description", project.getDescription());
JsonOutput.addIfNotNull(writer, "homepage", project.getHomepage());
addIfNotNullFull(writer, "created_on", project.getCreatedOn());
addIfNotNullFull(writer, "updated_on", project.getUpdatedOn());
writeCustomFields(writer, project.getCustomFields());
JsonOutput.addIfNotNull(writer, "parent_id", project.getParentId());
JsonOutput.addIfNotNull(writer, "is_public", project.getProjectPublic());
JsonOutput.addArrayIfNotNull(writer, "trackers", project.getTrackers(),
TRACKER_WRITER);
}
public static void writeCategory(IssueCategory category,
final JSONWriter writer) throws JSONException {
writer.key("id");
writer.value(category.getId());
JsonOutput.addIfNotNull(writer, "name", category.getName());
if (category.getProject() != null)
JsonOutput.addIfNotNull(writer, "project_id", category.getProject()
.getId());
if (category.getAssignee() != null)
JsonOutput.addIfNotNull(writer, "assigned_to_id", category
.getAssignee().getId());
}
public static void writeUser(User user, final JSONWriter writer)
throws JSONException {
JsonOutput.addIfNotNull(writer, "id", user.getId());
JsonOutput.addIfNotNull(writer, "login", user.getLogin());
JsonOutput.addIfNotNull(writer, "password", user.getPassword());
JsonOutput.addIfNotNull(writer, "firstname", user.getFirstName());
JsonOutput.addIfNotNull(writer, "lastname", user.getLastName());
JsonOutput.addIfNotNull(writer, "name", user.getFullName());
JsonOutput.addIfNotNull(writer, "mail", user.getMail());
JsonOutput.addIfNotNull(writer, "auth_source_id", user.getAuthSourceId());
JsonOutput.addIfNotNull(writer, "status", user.getStatus());
addIfNotNullFull(writer, "created_on", user.getCreatedOn());
addIfNotNullFull(writer, "last_login_on", user.getLastLoginOn());
writeCustomFields(writer, user.getCustomFields());
}
public static void writeGroup(Group group, final JSONWriter writer) throws JSONException {
JsonOutput.addIfNotNull(writer, "id", group.getId());
JsonOutput.addIfNotNull(writer, "name", group.getName());
}
public static void writeIssue(Issue issue, final JSONWriter writer) throws JSONException {
JsonOutput.addIfNotNull(writer, "id", issue.getId());
JsonOutput.addIfNotNull(writer, "subject", issue.getSubject());
JsonOutput.addIfNotNull(writer, "parent_issue_id", issue.getParentId());
JsonOutput.addIfNotNull(writer, "estimated_hours",
issue.getEstimatedHours());
JsonOutput.addIfNotNull(writer, "spent_hours", issue.getSpentHours());
if (issue.getAssignee() != null)
JsonOutput.addIfNotNull(writer, "assigned_to_id", issue
.getAssignee().getId());
JsonOutput.addIfNotNull(writer, "priority_id", issue.getPriorityId());
JsonOutput.addIfNotNull(writer, "done_ratio", issue.getDoneRatio());
if (issue.getProject() != null) {
// Checked in Redmine 2.6.0: updating issues based on
// identifier fails and only using the project id works.
// As the identifier usage is used in several places, this
// case selection is introduced. The identifier is
// used, if no real ID is provided
if (issue.getProject().getId() != null) {
JsonOutput.addIfNotNull(writer, "project_id", issue.getProject()
.getId());
} else {
throw new IllegalArgumentException("Project ID must be set on issue. " +
"You can use a factory method to create Issue object in memory: IssueFactory.create(projectId, subject)");
}
}
if (issue.getAuthor() != null)
JsonOutput.addIfNotNull(writer, "author_id", issue.getAuthor().getId());
addShort2(writer, "start_date", issue.getStartDate());
addIfNotNullShort2(writer, "due_date", issue.getDueDate());
if (issue.getTracker() != null)
JsonOutput.addIfNotNull(writer, "tracker_id", issue.getTracker().getId());
JsonOutput.addIfNotNull(writer, "description", issue.getDescription());
addIfNotNullFull(writer, "created_on", issue.getCreatedOn());
addIfNotNullFull(writer, "updated_on", issue.getUpdatedOn());
JsonOutput.addIfNotNull(writer, "status_id", issue.getStatusId());
if (issue.getTargetVersion() != null)
JsonOutput.addIfNotNull(writer, "fixed_version_id", issue
.getTargetVersion().getId());
if (issue.getCategory() != null)
JsonOutput.addIfNotNull(writer, "category_id", issue.getCategory().getId());
JsonOutput.addIfNotNull(writer, "notes", issue.getNotes());
writeCustomFields(writer, issue.getCustomFields());
Collection<Watcher> issueWatchers = issue.getWatchers();
if (issueWatchers != null && !issueWatchers.isEmpty()) {
writeWatchers(writer, issueWatchers);
}
final List<Attachment> uploads = new ArrayList<Attachment>();
for (Attachment attachment : issue.getAttachments()) {
if (attachment.getToken() != null) {
uploads.add(attachment);
}
}
JsonOutput.addArrayIfNotEmpty(writer, "uploads", uploads,
UPLOAD_WRITER);
/*
* Journals and Relations cannot be set for an issue during creation or
* updates.
*/
}
public static void writeUpload(JSONWriter writer, Attachment attachment)
throws JSONException {
JsonOutput.addIfNotNull(writer, "token", attachment.getToken());
JsonOutput.addIfNotNull(writer, "filename", attachment.getFileName());
JsonOutput.addIfNotNull(writer, "content_type",
attachment.getContentType());
JsonOutput.addIfNotNull(writer, "description", attachment.getDescription());
}
public static void writeMembership(JSONWriter writer, Membership membership)
throws JSONException {
if (membership.getUser() != null) {
JsonOutput.addIfNotNull(writer, "user_id", membership.getUser().getId());
}
if (membership.getGroup() != null) {
JsonOutput.addIfNotNull(writer, "group_id", membership.getGroup().getId());
}
if (membership.getRoles() != null) {
writer.key("role_ids");
writer.array();
for (Role role : membership.getRoles()) {
writer.value(role.getId().longValue());
}
writer.endArray();
}
}
private static void writeCustomFields(JSONWriter writer,
Collection<CustomField> customFields) throws JSONException {
if (customFields == null || customFields.isEmpty()) {
return;
}
writer.key("custom_field_values").object();
for (CustomField field : customFields) {
// see https://github.com/taskadapter/redmine-java-api/issues/54
Object valueToWrite;
if (field.isMultiple()) {
valueToWrite = field.getValues();
} else {
valueToWrite = field.getValue();
}
writer.key(Integer.toString(field.getId())).value(valueToWrite);
}
writer.endObject();
}
public static void writeWatchers(JSONWriter writer, Collection<Watcher> watchers)
throws JSONException {
if (watchers == null || watchers.isEmpty()) {
return;
}
writer.key("watcher_user_ids");
writer.array();
for (Watcher watcher : watchers) {
if (watcher.getId() != null) {
writer.value(watcher.getId().longValue());
}
}
writer.endArray();
}
/**
* Adds a value to a writer if value is not <code>null</code>.
*
* @param writer
* writer to add object to.
* @param field
* field name to set.
* @param value
* field value.
* @throws JSONException
* if io error occurs.
*/
public static void addIfNotNullFull(JSONWriter writer, String field,
Date value) throws JSONException {
final SimpleDateFormat format = RedmineDateParser.FULL_DATE_FORMAT.get();
JsonOutput.addIfNotNull(writer, field, value, format);
}
/**
* Adds a value to a writer.
*
* @param writer
* writer to add object to.
* @param field
* field name to set.
* @param value
* field value.
* @throws JSONException
* if io error occurs.
*/
public static void addFull(JSONWriter writer, String field, Date value)
throws JSONException {
final SimpleDateFormat format = RedmineDateParser.FULL_DATE_FORMAT.get();
JsonOutput.add(writer, field, value, format);
}
/**
* Adds a value to a writer if value is not <code>null</code>.
*
* @param writer
* writer to add object to.
* @param field
* field name to set.
* @param value
* field value.
* @throws JSONException
* if io error occurs.
*/
public static void addIfNotNullShort(JSONWriter writer, String field,
Date value) throws JSONException {
final SimpleDateFormat format = RedmineDateParser.SHORT_DATE_FORMAT
.get();
JsonOutput.addIfNotNull(writer, field, value, format);
}
/**
* Adds a value to a writer if value is not <code>null</code>.
*
* @param writer
* writer to add object to.
* @param field
* field name to set.
* @param value
* field value.
* @throws JSONException
* if io error occurs.
*/
public static void addIfNotNullShort2(JSONWriter writer, String field,
Date value) throws JSONException {
final SimpleDateFormat format = RedmineDateParser.SHORT_DATE_FORMAT_V2
.get();
JsonOutput.addIfNotNull(writer, field, value, format);
}
/**
* Adds a value to a writer.
*
* @param writer
* writer to add object to.
* @param field
* field name to set.
* @param value
* field value.
* @throws JSONException
* if io error occurs.
*/
public static void addShort2(JSONWriter writer, String field, Date value)
throws JSONException {
final SimpleDateFormat format = RedmineDateParser.SHORT_DATE_FORMAT_V2.get();
JsonOutput.add(writer, field, value, format);
}
}
| |
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
package com.facebook.imagepipeline.request;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_DATA;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_LOCAL_ASSET;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_LOCAL_CONTENT;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_LOCAL_IMAGE_FILE;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_LOCAL_RESOURCE;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_LOCAL_VIDEO_FILE;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_NETWORK;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_QUALIFIED_RESOURCE;
import static com.facebook.imagepipeline.common.SourceUriType.SOURCE_TYPE_UNKNOWN;
import android.net.Uri;
import androidx.annotation.IntDef;
import com.facebook.cache.common.CacheKey;
import com.facebook.common.internal.Fn;
import com.facebook.common.internal.Objects;
import com.facebook.common.media.MediaUtils;
import com.facebook.common.util.UriUtil;
import com.facebook.imagepipeline.common.BytesRange;
import com.facebook.imagepipeline.common.ImageDecodeOptions;
import com.facebook.imagepipeline.common.Priority;
import com.facebook.imagepipeline.common.ResizeOptions;
import com.facebook.imagepipeline.common.RotationOptions;
import com.facebook.imagepipeline.common.SourceUriType;
import com.facebook.imagepipeline.listener.RequestListener;
import com.facebook.imageutils.BitmapUtil;
import java.io.File;
import javax.annotation.Nullable;
import javax.annotation.concurrent.Immutable;
/**
* Immutable object encapsulating everything pipeline has to know about requested image to proceed.
*/
@Immutable
public class ImageRequest {
private static boolean sUseCachedHashcodeInEquals;
private static boolean sCacheHashcode;
private int mHashcode;
/** Cache choice */
private final CacheChoice mCacheChoice;
/** Source Uri */
private final Uri mSourceUri;
private final @SourceUriType int mSourceUriType;
/** Source File - for local fetches only, lazily initialized */
@Nullable private File mSourceFile;
/** If set - the client will receive intermediate results */
private final boolean mProgressiveRenderingEnabled;
/** If set the client will receive thumbnail previews for local images, before the whole image */
private final boolean mLocalThumbnailPreviewsEnabled;
/** If set, only the image thumbnail will be loaded, not the full image */
private final boolean mLoadThumbnailOnly;
private final ImageDecodeOptions mImageDecodeOptions;
/** resize options */
private final @Nullable ResizeOptions mResizeOptions;
/** rotation options */
private final RotationOptions mRotationOptions;
/** Range of bytes to request from the network */
private final @Nullable BytesRange mBytesRange;
/** Priority levels of this request. */
private final Priority mRequestPriority;
/** Lowest level that is permitted to fetch an image from */
private final RequestLevel mLowestPermittedRequestLevel;
/**
* int in which each bit represents read or write permission of each cache from bitmap read bit
* (rightest) to disk write bit
*/
protected int mCachesDisabled;
/** Whether the disk cache should be used for this request */
private final boolean mIsDiskCacheEnabled;
/** Whether the memory cache should be used for this request */
private final boolean mIsMemoryCacheEnabled;
/**
* Whether to decode prefetched images. true -> Cache both encoded image and bitmap. false ->
* Cache only encoded image and do not decode until image is needed to be shown. null -> Use
* pipeline's default
*/
private final @Nullable Boolean mDecodePrefetches;
/** Postprocessor to run on the output bitmap. */
private final @Nullable Postprocessor mPostprocessor;
/** Request listener to use for this image request */
private final @Nullable RequestListener mRequestListener;
/**
* Controls whether resizing is allowed for this request. true -> allow for this request. false ->
* disallow for this request. null -> use default pipeline's setting.
*/
private final @Nullable Boolean mResizingAllowedOverride;
private final int mDelayMs;
public static @Nullable ImageRequest fromFile(@Nullable File file) {
return (file == null) ? null : ImageRequest.fromUri(UriUtil.getUriForFile(file));
}
public static @Nullable ImageRequest fromUri(@Nullable Uri uri) {
return (uri == null) ? null : ImageRequestBuilder.newBuilderWithSource(uri).build();
}
public static @Nullable ImageRequest fromUri(@Nullable String uriString) {
return (uriString == null || uriString.length() == 0) ? null : fromUri(Uri.parse(uriString));
}
protected ImageRequest(ImageRequestBuilder builder) {
mCacheChoice = builder.getCacheChoice();
mSourceUri = builder.getSourceUri();
mSourceUriType = getSourceUriType(mSourceUri);
mProgressiveRenderingEnabled = builder.isProgressiveRenderingEnabled();
mLocalThumbnailPreviewsEnabled = builder.isLocalThumbnailPreviewsEnabled();
mLoadThumbnailOnly = builder.getLoadThumbnailOnly();
mImageDecodeOptions = builder.getImageDecodeOptions();
mResizeOptions = builder.getResizeOptions();
mRotationOptions =
builder.getRotationOptions() == null
? RotationOptions.autoRotate()
: builder.getRotationOptions();
mBytesRange = builder.getBytesRange();
mRequestPriority = builder.getRequestPriority();
mLowestPermittedRequestLevel = builder.getLowestPermittedRequestLevel();
mIsDiskCacheEnabled = builder.isDiskCacheEnabled();
int cachesDisabledFlags = builder.getCachesDisabled();
if (!mIsDiskCacheEnabled) {
// If disk cache is disabled we must make sure mCachesDisabled reflects it
cachesDisabledFlags |= CachesLocationsMasks.DISK_READ | CachesLocationsMasks.DISK_WRITE;
}
mCachesDisabled = cachesDisabledFlags;
mIsMemoryCacheEnabled = builder.isMemoryCacheEnabled();
mDecodePrefetches = builder.shouldDecodePrefetches();
mPostprocessor = builder.getPostprocessor();
mRequestListener = builder.getRequestListener();
mResizingAllowedOverride = builder.getResizingAllowedOverride();
mDelayMs = builder.getDelayMs();
}
public CacheChoice getCacheChoice() {
return mCacheChoice;
}
public Uri getSourceUri() {
return mSourceUri;
}
public @SourceUriType int getSourceUriType() {
return mSourceUriType;
}
public int getPreferredWidth() {
return (mResizeOptions != null) ? mResizeOptions.width : (int) BitmapUtil.MAX_BITMAP_SIZE;
}
public int getPreferredHeight() {
return (mResizeOptions != null) ? mResizeOptions.height : (int) BitmapUtil.MAX_BITMAP_SIZE;
}
public @Nullable ResizeOptions getResizeOptions() {
return mResizeOptions;
}
public RotationOptions getRotationOptions() {
return mRotationOptions;
}
/** @deprecated Use {@link #getRotationOptions()} */
@Deprecated
public boolean getAutoRotateEnabled() {
return mRotationOptions.useImageMetadata();
}
@Nullable
public BytesRange getBytesRange() {
return mBytesRange;
}
public ImageDecodeOptions getImageDecodeOptions() {
return mImageDecodeOptions;
}
public boolean getProgressiveRenderingEnabled() {
return mProgressiveRenderingEnabled;
}
public boolean getLocalThumbnailPreviewsEnabled() {
return mLocalThumbnailPreviewsEnabled;
}
public boolean getLoadThumbnailOnly() {
return mLoadThumbnailOnly;
}
public Priority getPriority() {
return mRequestPriority;
}
public RequestLevel getLowestPermittedRequestLevel() {
return mLowestPermittedRequestLevel;
}
public int getCachesDisabled() {
return mCachesDisabled;
}
public boolean isDiskCacheEnabled() {
return mIsDiskCacheEnabled;
}
/** Returns whether the use of the cache is enabled for read or write according to given mask. */
public boolean isCacheEnabled(int cacheMask) {
return (getCachesDisabled() & cacheMask) == 0;
}
public boolean isMemoryCacheEnabled() {
return mIsMemoryCacheEnabled;
}
public @Nullable Boolean shouldDecodePrefetches() {
return mDecodePrefetches;
}
public @Nullable Boolean getResizingAllowedOverride() {
return mResizingAllowedOverride;
}
public int getDelayMs() {
return mDelayMs;
}
public synchronized File getSourceFile() {
if (mSourceFile == null) {
mSourceFile = new File(mSourceUri.getPath());
}
return mSourceFile;
}
public @Nullable Postprocessor getPostprocessor() {
return mPostprocessor;
}
public @Nullable RequestListener getRequestListener() {
return mRequestListener;
}
@Override
public boolean equals(@Nullable Object o) {
if (!(o instanceof ImageRequest)) {
return false;
}
ImageRequest request = (ImageRequest) o;
if (sUseCachedHashcodeInEquals) {
int a = mHashcode;
int b = request.mHashcode;
if (a != 0 && b != 0 && a != b) {
return false;
}
}
if (mLocalThumbnailPreviewsEnabled != request.mLocalThumbnailPreviewsEnabled) return false;
if (mIsDiskCacheEnabled != request.mIsDiskCacheEnabled) return false;
if (mIsMemoryCacheEnabled != request.mIsMemoryCacheEnabled) return false;
if (!Objects.equal(mSourceUri, request.mSourceUri)
|| !Objects.equal(mCacheChoice, request.mCacheChoice)
|| !Objects.equal(mSourceFile, request.mSourceFile)
|| !Objects.equal(mBytesRange, request.mBytesRange)
|| !Objects.equal(mImageDecodeOptions, request.mImageDecodeOptions)
|| !Objects.equal(mResizeOptions, request.mResizeOptions)
|| !Objects.equal(mRequestPriority, request.mRequestPriority)
|| !Objects.equal(mLowestPermittedRequestLevel, request.mLowestPermittedRequestLevel)
|| !Objects.equal(mCachesDisabled, request.mCachesDisabled)
|| !Objects.equal(mDecodePrefetches, request.mDecodePrefetches)
|| !Objects.equal(mResizingAllowedOverride, request.mResizingAllowedOverride)
|| !Objects.equal(mRotationOptions, request.mRotationOptions)
|| mLoadThumbnailOnly != request.mLoadThumbnailOnly) {
return false;
}
final CacheKey thisPostprocessorKey =
mPostprocessor != null ? mPostprocessor.getPostprocessorCacheKey() : null;
final CacheKey thatPostprocessorKey =
request.mPostprocessor != null ? request.mPostprocessor.getPostprocessorCacheKey() : null;
if (!Objects.equal(thisPostprocessorKey, thatPostprocessorKey)) return false;
return mDelayMs == request.mDelayMs;
}
@Override
public int hashCode() {
final boolean cacheHashcode = sCacheHashcode;
int result = 0;
if (cacheHashcode) {
result = mHashcode;
}
if (result == 0) {
final CacheKey postprocessorCacheKey =
mPostprocessor != null ? mPostprocessor.getPostprocessorCacheKey() : null;
result =
Objects.hashCode(
mCacheChoice,
mSourceUri,
mLocalThumbnailPreviewsEnabled,
mBytesRange,
mRequestPriority,
mLowestPermittedRequestLevel,
mCachesDisabled,
mIsDiskCacheEnabled,
mIsMemoryCacheEnabled,
mImageDecodeOptions,
mDecodePrefetches,
mResizeOptions,
mRotationOptions,
postprocessorCacheKey,
mResizingAllowedOverride,
mDelayMs,
mLoadThumbnailOnly);
// ^ I *think* this is safe despite autoboxing...?
if (cacheHashcode) {
mHashcode = result;
}
}
return result;
}
@Override
public String toString() {
return Objects.toStringHelper(this)
.add("uri", mSourceUri)
.add("cacheChoice", mCacheChoice)
.add("decodeOptions", mImageDecodeOptions)
.add("postprocessor", mPostprocessor)
.add("priority", mRequestPriority)
.add("resizeOptions", mResizeOptions)
.add("rotationOptions", mRotationOptions)
.add("bytesRange", mBytesRange)
.add("resizingAllowedOverride", mResizingAllowedOverride)
.add("progressiveRenderingEnabled", mProgressiveRenderingEnabled)
.add("localThumbnailPreviewsEnabled", mLocalThumbnailPreviewsEnabled)
.add("loadThumbnailOnly", mLoadThumbnailOnly)
.add("lowestPermittedRequestLevel", mLowestPermittedRequestLevel)
.add("cachesDisabled", mCachesDisabled)
.add("isDiskCacheEnabled", mIsDiskCacheEnabled)
.add("isMemoryCacheEnabled", mIsMemoryCacheEnabled)
.add("decodePrefetches", mDecodePrefetches)
.add("delayMs", mDelayMs)
.toString();
}
/** An enum describing the cache choice. */
public enum CacheChoice {
/* Indicates that this image should go in the small disk cache, if one is being used */
SMALL,
/* Default */
DEFAULT
}
/**
* Level down to we are willing to go in order to find an image. E.g., we might only want to go
* down to bitmap memory cache, and not check the disk cache or do a full fetch.
*/
public enum RequestLevel {
/* Fetch (from the network or local storage) */
FULL_FETCH(1),
/* Disk caching */
DISK_CACHE(2),
/* Encoded memory caching */
ENCODED_MEMORY_CACHE(3),
/* Bitmap caching */
BITMAP_MEMORY_CACHE(4);
private int mValue;
private RequestLevel(int value) {
mValue = value;
}
public int getValue() {
return mValue;
}
public static RequestLevel getMax(RequestLevel requestLevel1, RequestLevel requestLevel2) {
return requestLevel1.getValue() > requestLevel2.getValue() ? requestLevel1 : requestLevel2;
}
}
/**
* Caches bit locations in cachesDisabled from bitmap read bit (rightest bit, 00000001) to disk
* write bit (00100000). Uses for creating mask when performing bitwise operation with
* cachesDisabled in order to turn on (disable cache) or turn off (enable cache) the right bit.
*/
@IntDef({
CachesLocationsMasks.BITMAP_READ,
CachesLocationsMasks.BITMAP_WRITE,
CachesLocationsMasks.ENCODED_READ,
CachesLocationsMasks.ENCODED_WRITE,
CachesLocationsMasks.DISK_READ,
CachesLocationsMasks.DISK_WRITE
})
public @interface CachesLocationsMasks {
/* bitmap cache read bit location- 00000001 */
final /* bitmap cache read bit location- 00000001 */ int BITMAP_READ = 1;
/* bitmap cache write bit location- 00000010 */
final /* bitmap cache write bit location- 00000010 */ int BITMAP_WRITE = 2;
/* encoded cache read bit location- 00000100 */
final /* encoded cache read bit location- 00000100 */ int ENCODED_READ = 4;
/* encoded cache write bit location- 00001000 */
final /* encoded cache write bit location- 00001000 */ int ENCODED_WRITE = 8;
/* disk cache read bit location- 00010000 */
final /* disk cache read bit location- 00010000 */ int DISK_READ = 16;
/* disk cache write bit location- 00100000 */
final /* disk cache write bit location- 00100000 */ int DISK_WRITE = 32;
}
/**
* This is a utility method which returns the type of Uri
*
* @param uri The Uri to test
* @return The type of the given Uri if available or SOURCE_TYPE_UNKNOWN if not
*/
private static @SourceUriType int getSourceUriType(final Uri uri) {
if (uri == null) {
return SOURCE_TYPE_UNKNOWN;
}
if (UriUtil.isNetworkUri(uri)) {
return SOURCE_TYPE_NETWORK;
} else if (UriUtil.isLocalFileUri(uri)) {
if (MediaUtils.isVideo(MediaUtils.extractMime(uri.getPath()))) {
return SOURCE_TYPE_LOCAL_VIDEO_FILE;
} else {
return SOURCE_TYPE_LOCAL_IMAGE_FILE;
}
} else if (UriUtil.isLocalContentUri(uri)) {
return SOURCE_TYPE_LOCAL_CONTENT;
} else if (UriUtil.isLocalAssetUri(uri)) {
return SOURCE_TYPE_LOCAL_ASSET;
} else if (UriUtil.isLocalResourceUri(uri)) {
return SOURCE_TYPE_LOCAL_RESOURCE;
} else if (UriUtil.isDataUri(uri)) {
return SOURCE_TYPE_DATA;
} else if (UriUtil.isQualifiedResourceUri(uri)) {
return SOURCE_TYPE_QUALIFIED_RESOURCE;
} else {
return SOURCE_TYPE_UNKNOWN;
}
}
public static final Fn<ImageRequest, Uri> REQUEST_TO_URI_FN =
new Fn<ImageRequest, Uri>() {
@Override
public @Nullable Uri apply(@Nullable ImageRequest arg) {
return arg != null ? arg.getSourceUri() : null;
}
};
public static void setUseCachedHashcodeInEquals(boolean useCachedHashcodeInEquals) {
sUseCachedHashcodeInEquals = useCachedHashcodeInEquals;
}
public static void setCacheHashcode(boolean cacheHashcode) {
sCacheHashcode = cacheHashcode;
}
}
| |
package edu.gemini.spModel.target.obsComp;
import edu.gemini.shared.util.immutable.*;
import edu.gemini.spModel.config.ConfigPostProcessor;
import edu.gemini.spModel.config2.Config;
import edu.gemini.spModel.config2.ConfigSequence;
import edu.gemini.spModel.config2.ItemKey;
import edu.gemini.spModel.gemini.gmos.GmosOiwfsGuideProbe;
import edu.gemini.spModel.guide.*;
import edu.gemini.spModel.obs.context.ObsContext;
import edu.gemini.spModel.seqcomp.SeqConfigNames;
import edu.gemini.spModel.target.env.GuideProbeTargets;
import edu.gemini.spModel.target.env.TargetEnvironment;
import java.util.*;
import java.util.stream.Collectors;
public final class GuideSequence implements ConfigPostProcessor {
private static final String SYSTEM_NAME = TargetObsCompConstants.CONFIG_NAME;
private static final ItemKey PARENT_KEY = new ItemKey(SYSTEM_NAME);
public static final String GUIDE_STATE_PARAM = "tmpGuideState";
public static final ItemKey GUIDE_STATE_KEY = new ItemKey(PARENT_KEY, GUIDE_STATE_PARAM);
/**
* An explicit override for a particular guide probe. In general, the
* default setting is applied to all guide probes at a given step. The
* user may wish to override the default setting for a particular probe,
* however.
*/
public static final class ExplicitGuideSetting {
public final GuideProbe probe;
public final GuideOption option;
public ExplicitGuideSetting(GuideProbe probe, GuideOption option) {
if (probe == null) throw new IllegalArgumentException("probe == null");
if (option == null) throw new IllegalArgumentException("option == null");
this.probe = probe;
this.option = option;
}
@Override public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final ExplicitGuideSetting that = (ExplicitGuideSetting) o;
if (!option.equals(that.option)) return false;
return probe.equals(that.probe);
}
@Override public int hashCode() {
int result = probe.hashCode();
result = 31 * result + option.hashCode();
return result;
}
@Override public String toString() {
return String.format("(%s, %s)", probe.getKey(), option.name());
}
}
/**
* The guide state for a particular step. It includes the default value
* for any probes that are not mentioned along with any guide configuration
* for explicitly overridden probes.
*/
public static final class GuideState {
public static final GuideState DEFAULT_ON = new GuideState(DefaultGuideOptions.Value.on);
public static final GuideState DEFAULT_OFF = new GuideState(DefaultGuideOptions.Value.off);
public static GuideState forDefaultOption(DefaultGuideOptions.Value val) {
return (val == DEFAULT_ON.defaultState) ? DEFAULT_ON : DEFAULT_OFF;
}
public final DefaultGuideOptions.Value defaultState;
public final ImList<ExplicitGuideSetting> overrides;
public GuideState(DefaultGuideOptions.Value defaultState) {
this(defaultState, ImCollections.emptyList());
}
public GuideState(DefaultGuideOptions.Value defaultState, ImList<ExplicitGuideSetting> overrides) {
if (defaultState == null) throw new IllegalArgumentException("defaultState == null");
if (overrides == null) throw new IllegalArgumentException("overrides == null");
this.defaultState = defaultState;
this.overrides = overrides;
}
@Override public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
final GuideState that = (GuideState) o;
if (defaultState != that.defaultState) return false;
return overrides.equals(that.overrides);
}
@Override public int hashCode() {
int result = defaultState.hashCode();
result = 31 * result + overrides.hashCode();
return result;
}
@Override public String toString() {
return String.format("(%s, %s)", defaultState, overrides.mkString("(", ", ", ")"));
}
}
/**
* Gets the collection of guiders that are required in order to perform the
* observation. A guide probe is required if there is a guide star marked
* "primary" assigned to it in the primary guide group.
*/
public static ImList<GuideProbe> getRequiredGuiders(Option<TargetEnvironment> envOpt) {
return envOpt.map(env -> env.getOrCreatePrimaryGuideGroup().getAll().
filter(gpt -> gpt.getPrimary().isDefined()).
map(GuideProbeTargets::getGuider)).getOrElse(ImCollections.emptyList());
}
/**
* Gets all the guiders that should be parked for the duration of this
* observation. Those would be the guiders available in the current
* context and yet not assigned to any guide star. This is a hack-around
* for the brain dead seqexec which should be handling this itself.
*/
public static ImList<GuideProbe> getPermanentlyParkedGuiders(Option<ObsContext> ctxOpt, ImList<GuideProbe> req) {
final Set<GuideProbe> required = new TreeSet<>(GuideProbe.KeyComparator.instance);
for (GuideProbe g : req) required.add(g);
// What's available in this environment?
final Set<GuideProbe> available = new TreeSet<>(GuideProbe.KeyComparator.instance);
ctxOpt.foreach(c -> available.addAll(GuideProbeUtil.instance.getAvailableGuiders(c)));
// But we only care about parkable guiders.
available.stream().filter(g -> !(g.getGuideOptions() instanceof StandardGuideOptions)).forEach(available::remove);
// Always have to park the PWFS? even if not available.
available.addAll(Arrays.asList(PwfsGuideProbe.values()));
// Finally, if avaliable but unused, park it.
final Collection<GuideProbe> parked = new ArrayList<>(available.size());
parked.addAll(available.stream().filter(g -> !required.contains(g)).collect(Collectors.toList()));
return DefaultImList.create(parked);
}
public static Map<ItemKey, GuideOption> getGuideWithParked(Option<ObsContext> ctx, ImList<GuideProbe> requiredGuiders) {
final ImList<GuideProbe> parked = getPermanentlyParkedGuiders(ctx, requiredGuiders);
// Remember any sequence props whose values will be set because they
// involved required guiders.
final Set<ItemKey> handled = new HashSet<>();
for (GuideProbe g : requiredGuiders) {
handled.add(new ItemKey(PARENT_KEY, g.getSequenceProp()));
}
// Now for any unused guiders whose sequence property doesn't appear in
// the handled set, record it as parked / off.
final Map<ItemKey, GuideOption> res = new HashMap<>();
for (GuideProbe g : parked) {
final ItemKey key = new ItemKey(PARENT_KEY, g.getSequenceProp());
if (!handled.contains(key)) {
res.put(key, g.getGuideOptions().getDefaultOff());
}
}
return res;
}
// Another seqexec hack here -- it cannot use the Altair+PWFS1 setting and
// figure out that it needs to have the AOWFS guide on a LGS so we have to
// hack this into the sequence for it. So, when we find that something has
// already set a guideWith* parameter, we'll respect it (since the Altair
// component will have to do this for the seqexec).
private static Map<ItemKey, GuideOption> seqexecHack(Map<ItemKey, GuideOption> parked, Config step) {
// filter out any existing guideWith* parameters that appear in the
// config step.
final Set<ItemKey> rmSet = parked.keySet().stream().filter(step::containsItem).collect(Collectors.toSet());
if (rmSet.size() == 0) {
return parked;
} else {
final Map<ItemKey, GuideOption> updated = new HashMap<>(parked);
rmSet.forEach(updated::remove);
return updated;
}
}
public static Map<ItemKey, GuideOption> getGuideWith(GuideState state, ImList<GuideProbe> requiredGuiders) {
final Map<ItemKey, GuideOption> res = new HashMap<>();
// First, set all to the default value.
for (GuideProbe g : requiredGuiders) {
final ItemKey key = new ItemKey(PARENT_KEY, g.getSequenceProp());
res.put(key, g.getGuideOptions().fromDefaultGuideOption(state.defaultState));
}
// Then override any as necessary.
for (ExplicitGuideSetting o : state.overrides) {
if (!requiredGuiders.contains(o.probe)) continue;
final ItemKey key = new ItemKey(PARENT_KEY, o.probe.getSequenceProp());
res.put(key, o.option);
}
return res;
}
private final Option<ObsContext> ctx;
public GuideSequence(Option<ObsContext> ctx) {
this.ctx = ctx;
}
@Override public ConfigSequence postProcessSequence(ConfigSequence in) {
final ImList<GuideProbe> requiredGuiders = getRequiredGuiders(ctx.flatMap(c -> ImOption.apply(c.getTargets())));
final Config[] steps = in.getAllSteps();
// Handle parking unused probes for the brain-dead seqexec.
if (steps.length > 0) {
Map<ItemKey, GuideOption> parked = seqexecHack(getGuideWithParked(ctx, requiredGuiders), steps[0]);
applyGuideWith(parked, steps[0]);
gmosNsHack(steps[0], requiredGuiders);
}
// Now go through each step, remove the guide state value and replace it
// with the appropriate settings for guideWith
for (final Config step : steps) {
final GuideState guideState = (GuideState) step.remove(GUIDE_STATE_KEY);
if (guideState == null) continue;
applyGuideWith(getGuideWith(guideState, requiredGuiders), step);
}
return new ConfigSequence(steps);
}
private static void applyGuideWith(Map<ItemKey, GuideOption> guideWith, Config step) {
for (final Map.Entry<ItemKey, GuideOption> me : guideWith.entrySet()) {
step.putItem(me.getKey(), me.getValue());
}
}
// FR 30444: GMOS N&S offset position guideWith hack.
private static final ItemKey GMOS_NS_A = new ItemKey(SeqConfigNames.INSTRUMENT_KEY, "nsBeamA-guideWithOIWFS");
private static final ItemKey GMOS_NS_B = new ItemKey(SeqConfigNames.INSTRUMENT_KEY, "nsBeamB-guideWithOIWFS");
private void gmosNsHack(Config step, ImList<GuideProbe> guiders) {
// First, if we aren't dealing with GMOS N&S there is nothing to do.
Object optA = step.getItemValue(GMOS_NS_A);
if (optA == null) return;
boolean hasOIWFS = guiders.contains(GmosOiwfsGuideProbe.instance);
Object optB = step.getItemValue(GMOS_NS_B);
step.putItem(GMOS_NS_A, hasOIWFS ? optA : StandardGuideOptions.Value.park);
step.putItem(GMOS_NS_B, hasOIWFS ? optB : StandardGuideOptions.Value.park);
}
}
| |
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package libcore.javax.net.ssl;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.security.KeyStore;
import java.security.Principal;
import java.security.SecureRandom;
import libcore.java.security.StandardNames;
import libcore.java.security.TestKeyStore;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import java.util.Collections;
import javax.net.ssl.KeyManager;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLServerSocket;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509ExtendedKeyManager;
import javax.net.ssl.X509TrustManager;
import junit.framework.Assert;
/**
* TestSSLContext is a convenience class for other tests that
* want a canned SSLContext and related state for testing so they
* don't have to duplicate the logic.
*/
public final class TestSSLContext extends Assert {
/*
* The RI and Android have very different default SSLSession cache behaviors.
* The RI keeps an unlimited number of SSLSesions around for 1 day.
* Android keeps 10 SSLSessions forever.
*/
private static final boolean IS_RI = StandardNames.IS_RI;
public static final int EXPECTED_DEFAULT_CLIENT_SSL_SESSION_CACHE_SIZE = (IS_RI) ? 0 : 10;
public static final int EXPECTED_DEFAULT_SERVER_SSL_SESSION_CACHE_SIZE = (IS_RI) ? 0 : 100;
public static final int EXPECTED_DEFAULT_SSL_SESSION_CACHE_TIMEOUT = (IS_RI) ? 86400 : 0;
/**
* The Android SSLSocket and SSLServerSocket implementations are
* based on a version of OpenSSL which includes support for RFC
* 4507 session tickets. When using session tickets, the server
* does not need to keep a cache mapping session IDs to SSL
* sessions for reuse. Instead, the client presents the server
* with a session ticket it received from the server earlier,
* which is an SSL session encrypted by the server's secret
* key. Since in this case the server does not need to keep a
* cache, some tests may find different results depending on
* whether or not the session tickets are in use. These tests can
* use this function to determine if loopback SSL connections are
* expected to use session tickets and conditionalize their
* results appropriately.
*/
public static boolean sslServerSocketSupportsSessionTickets () {
// Disabled session tickets for better compatability b/2682876
// return !IS_RI;
return false;
}
public final KeyStore clientKeyStore;
public final char[] clientStorePassword;
public final KeyStore serverKeyStore;
public final char[] serverStorePassword;
public final X509ExtendedKeyManager clientKeyManager;
public final X509ExtendedKeyManager serverKeyManager;
public final X509TrustManager clientTrustManager;
public final X509TrustManager serverTrustManager;
public final SSLContext clientContext;
public final SSLContext serverContext;
public final SSLServerSocket serverSocket;
public final InetAddress host;
public final int port;
private TestSSLContext(KeyStore clientKeyStore,
char[] clientStorePassword,
KeyStore serverKeyStore,
char[] serverStorePassword,
X509ExtendedKeyManager clientKeyManager,
X509ExtendedKeyManager serverKeyManager,
X509TrustManager clientTrustManager,
X509TrustManager serverTrustManager,
SSLContext clientContext,
SSLContext serverContext,
SSLServerSocket serverSocket,
InetAddress host,
int port) {
this.clientKeyStore = clientKeyStore;
this.clientStorePassword = clientStorePassword;
this.serverKeyStore = serverKeyStore;
this.serverStorePassword = serverStorePassword;
this.clientKeyManager = clientKeyManager;
this.serverKeyManager = serverKeyManager;
this.clientTrustManager = clientTrustManager;
this.serverTrustManager = serverTrustManager;
this.clientContext = clientContext;
this.serverContext = serverContext;
this.serverSocket = serverSocket;
this.host = host;
this.port = port;
}
/**
* Usual TestSSLContext creation method, creates underlying
* SSLContext with certificate and key as well as SSLServerSocket
* listening provided host and port.
*/
public static TestSSLContext create() {
return create(TestKeyStore.getClient(),
TestKeyStore.getServer());
}
/**
* TestSSLContext creation method that allows separate creation of server key store
*/
public static TestSSLContext create(TestKeyStore client, TestKeyStore server) {
String provider = StandardNames.JSSE_PROVIDER_NAME;
return create(client, server, provider, provider);
}
public static TestSSLContext create(TestKeyStore client, TestKeyStore server,
String clientProvider, String serverProvider) {
String protocol = "TLS";
SSLContext clientContext = createSSLContext(protocol, clientProvider,
client.keyManagers, client.trustManagers);
SSLContext serverContext = createSSLContext(protocol, serverProvider,
server.keyManagers, server.trustManagers);
return create(client.keyStore, client.storePassword,
server.keyStore, server.storePassword,
client.keyManagers[0],
server.keyManagers[0],
client.trustManagers[0],
server.trustManagers[0],
clientContext,
serverContext);
}
/**
* TestSSLContext creation method that allows separate creation of client and server key store
*/
public static TestSSLContext create(KeyStore clientKeyStore, char[] clientStorePassword,
KeyStore serverKeyStore, char[] serverStorePassword,
KeyManager clientKeyManagers,
KeyManager serverKeyManagers,
TrustManager clientTrustManagers,
TrustManager serverTrustManagers,
SSLContext clientContext,
SSLContext serverContext) {
try {
SSLServerSocket serverSocket = (SSLServerSocket)
serverContext.getServerSocketFactory().createServerSocket(0);
InetSocketAddress sa = (InetSocketAddress) serverSocket.getLocalSocketAddress();
InetAddress host = sa.getAddress();
int port = sa.getPort();
return new TestSSLContext(clientKeyStore, clientStorePassword,
serverKeyStore, serverStorePassword,
(X509ExtendedKeyManager) clientKeyManagers,
(X509ExtendedKeyManager) serverKeyManagers,
(X509TrustManager) clientTrustManagers,
(X509TrustManager) serverTrustManagers,
clientContext, serverContext,
serverSocket, host, port);
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Create a SSLContext with a KeyManager using the private key and
* certificate chain from the given KeyStore and a TrustManager
* using the certificates authorities from the same KeyStore.
*/
public static final SSLContext createSSLContext(final String protocol,
final String provider,
final KeyManager[] keyManagers,
final TrustManager[] trustManagers)
{
try {
SSLContext context = SSLContext.getInstance(protocol, provider);
context.init(keyManagers, trustManagers, new SecureRandom());
return context;
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public static void assertCertificateInKeyStore(Principal principal,
KeyStore keyStore) throws Exception {
String subjectName = principal.getName();
boolean found = false;
for (String alias: Collections.list(keyStore.aliases())) {
if (!keyStore.isCertificateEntry(alias)) {
continue;
}
X509Certificate keyStoreCertificate = (X509Certificate) keyStore.getCertificate(alias);
if (subjectName.equals(keyStoreCertificate.getSubjectDN().getName())) {
found = true;
break;
}
}
assertTrue(found);
}
public static void assertCertificateInKeyStore(Certificate certificate,
KeyStore keyStore) throws Exception {
boolean found = false;
for (String alias: Collections.list(keyStore.aliases())) {
if (!keyStore.isCertificateEntry(alias)) {
continue;
}
Certificate keyStoreCertificate = keyStore.getCertificate(alias);
if (certificate.equals(keyStoreCertificate)) {
found = true;
break;
}
}
assertTrue(found);
}
public static void assertServerCertificateChain(X509TrustManager trustManager,
Certificate[] serverChain)
throws CertificateException {
X509Certificate[] chain = (X509Certificate[]) serverChain;
trustManager.checkServerTrusted(chain, chain[0].getPublicKey().getAlgorithm());
}
public static void assertClientCertificateChain(X509TrustManager trustManager,
Certificate[] clientChain)
throws CertificateException {
X509Certificate[] chain = (X509Certificate[]) clientChain;
trustManager.checkClientTrusted(chain, chain[0].getPublicKey().getAlgorithm());
}
}
| |
package com.abhi.datastructure.linkedlist;
import java.util.HashSet;
import java.util.Set;
import org.junit.Assert;
import org.junit.Test;
public class LinkedList {
public LinkedListNode head;// firstNode
public static boolean containsCycle(LinkedListNode head) {
Set<LinkedListNode> set = new HashSet<LinkedListNode>();
while (head != null) {
if(!set.add(head)) {
return true;
}
head = head.next;
}
return false;
}
public static int size(LinkedListNode head) {
int size = 0;
while (head != null && size != Integer.MAX_VALUE) {
size++;
head = head.next;
}
return size;
}
public static void print(LinkedListNode head) {
if (head == null) {
System.out.println("end");
return;
}
if (head != null) {
System.out.print(head.value);
System.out.print("-->");
print(head.next);
}
}
@Test
public void testContainsCycle() {
LinkedListNode head = new LinkedListNode(0);
head.next = new LinkedListNode(1);
head.next.next = new LinkedListNode(2);
print(head);
Assert.assertEquals(3, size(head));
Assert.assertFalse(containsCycle(head));
head.next.next.next = head.next;
// print(head);
Assert.assertTrue(containsCycle(head));
Assert.assertEquals(Integer.MAX_VALUE, size(head));
}
// Reverse a linked list from position m to n. Do it in-place and in
// one-pass.
// For example:
// Given 1->2->3->4->5->NULL, m = 2 and n = 4,
// return 1->4->3->2->5->NULL.
public static LinkedListNode reverseLinkList(LinkedListNode head) {
if (head == null) {
return head;
}
LinkedListNode previous = null;
LinkedListNode current = head;
LinkedListNode next = null;
while (current != null) {
next = current.next;
current.next = previous;
previous = current;
current = next;
}
head = previous;
return head;
}
// Reverse a linked list from position m to n. Do it in-place and in
// one-pass.
// For example:
// Given 1->2->3->4->5->NULL, m = 2 and n = 4,
// return 1->4->3->2->5->NULL.
public static LinkedListNode reverseLinkList(LinkedListNode head, int m, int n) {
if (head == null) {
return head;
}
if (n <= m) {
return head;// nothing to do//what will happen when m =1
}
LinkedListNode previous = null;
LinkedListNode current = head;
LinkedListNode next = null;
LinkedListNode headToTrack = null;
int index = 1;
while (current != null && index < m) {
index++;
previous = current;
current = current.next;
}
if (m == 1) {
head = reverseLinkList(current, n);
} else {
previous.next = reverseLinkList(current, n);
}
// head = previous;
return head;
}
public static LinkedListNode reverseLinkList(LinkedListNode head, int n) {
if (head == null) {
return head;
}
LinkedListNode previous = null;
LinkedListNode current = head;
LinkedListNode next = null;
int index = 1;
while (current != null && index < n) {
index++;
next = current.next;
current.next = previous;
previous = current;
current = next;
}
head.next = current;
head = previous;
return head;
}
// A simple and tail recursive function to reverse
// a linked list. prev is passed as NULL initially.
static void reverseUtil(LinkedListNode curr, LinkedListNode prev) {
if (curr.next == null) {
curr.next = prev;
return;
}
LinkedListNode next = curr.next;
curr.next = prev;
reverseUtil(next, curr);
}
// you are given two linked lists representing two non-negative numbers. The
// digits are stored in reverse order and each of their nodes contain a
// single digit. Add the two numbers and return it as a linked list.
// Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
// Output: 7 -> 0 -> 8
// 342 + 465 = 807
// Make sure there are no trailing zeros in the output list
// So, 7 -> 0 -> 8 -> 0 is not a valid response even though the value is
// still 807.
public LinkedListNode addTwoNumbers(LinkedListNode a, LinkedListNode b) {
LinkedListNode current1 = a;
LinkedListNode current2 = b;
LinkedListNode next1 = null;
LinkedListNode next2 = null;
int headVal = current1.value + current2.value;
LinkedListNode current = new LinkedListNode(headVal % 10);
LinkedListNode next = null;
LinkedListNode head = current;
int nextSum = headVal / 10;
while (current1.next != null || current2.next != null) {
next1 = current1.next;
int value1 = 0;
if (next1 != null) {
current1 = next1;
value1 = next1.value;
}
next2 = current2.next;
int value2 = 0;
if (next2 != null) {
current2 = next2;
value2 = next2.value;
}
int value = value1 + value2 + nextSum;
next = new LinkedListNode(value % 10);
nextSum = value / 10;
current.next = next;
current = next;
}
if (nextSum != 0) {
next = new LinkedListNode(nextSum);
current.next = next;
}
return head;
}
@Test
public void testAdd() {
LinkedList list1 = new LinkedList();
LinkedList list2 = new LinkedList();
LinkedListNode head = new LinkedListNode(2);
head.next = new LinkedListNode(4);
head.next.next = new LinkedListNode(3);
list1.head = head;
LinkedListNode head2 = new LinkedListNode(5);
head2.next = new LinkedListNode(6);
// head2.next.next = new LinkedListNode(4);
list2.head = head2;
print(head);
print(head2);
print(addTwoNumbers(head, head2));
}
// prints content of double linked list
static void printList(LinkedListNode node) {
while (node != null) {
System.out.print(node.value + "->>");
node = node.next;
}
}
@Test
public void testReverseLinkListWithBound() {
LinkedListNode head = new LinkedListNode(1);
head.next = new LinkedListNode(2);
head.next.next = new LinkedListNode(3);
// head.next.next.next = new LinkedListNode(4);
// head.next.next.next.next = new LinkedListNode(5);
System.out.println("testReverseLinkListWithBound-->");
print(head);
head = reverseLinkList(head, 1, 2);
print(head);
System.out.println("testReverseLinkListWithBound-->");
}
@Test
public void testReverseLinkList() {
LinkedListNode head = new LinkedListNode(1);
head.next = new LinkedListNode(2);
head.next.next = new LinkedListNode(3);
head.next.next.next = new LinkedListNode(4);
head.next.next.next.next = new LinkedListNode(5);
print(head);
head = reverseLinkList(head);
print(head);
}
static LinkedListNode deleteNode(LinkedListNode head, int value) {
if(head == null){
return head;
}
if(head.value == value){
return head.next;//moved head to next and deleted head
}
LinkedListNode current = head;
while(current.next != null){
if(current.next.value == value){
current.next = current.next.next;
return head;//no need to change head
}
current = current.next;
}
return head;//value not found nothing deleted
}
static void appendToEnd(LinkedListNode head, int data){
LinkedListNode newEndNode = new LinkedListNode(data);
if(head == null){
head = newEndNode;
return;
}
LinkedListNode current = head;
while(current.next != null){
current = current.next;
}
current.next = newEndNode;
}
}
| |
package com.ctrip.zeus.dao.entity;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
public class SlbGroup {
private Long id;
private String name;
private String appId;
private Integer version;
private Boolean ssl;
private Date createdTime;
private Date datachangeLasttime;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name == null ? null : name.trim();
}
public String getAppId() {
return appId;
}
public void setAppId(String appId) {
this.appId = appId == null ? null : appId.trim();
}
public Integer getVersion() {
return version;
}
public void setVersion(Integer version) {
this.version = version;
}
public Boolean getSsl() {
return ssl;
}
public void setSsl(Boolean ssl) {
this.ssl = ssl;
}
public Date getCreatedTime() {
return createdTime;
}
public void setCreatedTime(Date createdTime) {
this.createdTime = createdTime;
}
public Date getDatachangeLasttime() {
return datachangeLasttime;
}
public void setDatachangeLasttime(Date datachangeLasttime) {
this.datachangeLasttime = datachangeLasttime;
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append(getClass().getSimpleName());
sb.append(" [");
sb.append("Hash = ").append(hashCode());
sb.append(", id=").append(id);
sb.append(", name=").append(name);
sb.append(", appId=").append(appId);
sb.append(", version=").append(version);
sb.append(", ssl=").append(ssl);
sb.append(", createdTime=").append(createdTime);
sb.append(", datachangeLasttime=").append(datachangeLasttime);
sb.append("]");
return sb.toString();
}
public static SlbGroup.Builder builder() {
return new SlbGroup.Builder();
}
public static class Builder {
private SlbGroup obj;
public Builder() {
this.obj = new SlbGroup();
}
public Builder id(Long id) {
obj.setId(id);
return this;
}
public Builder name(String name) {
obj.setName(name);
return this;
}
public Builder appId(String appId) {
obj.setAppId(appId);
return this;
}
public Builder version(Integer version) {
obj.setVersion(version);
return this;
}
public Builder ssl(Boolean ssl) {
obj.setSsl(ssl);
return this;
}
public Builder createdTime(Date createdTime) {
obj.setCreatedTime(createdTime);
return this;
}
public Builder datachangeLasttime(Date datachangeLasttime) {
obj.setDatachangeLasttime(datachangeLasttime);
return this;
}
public SlbGroup build() {
return this.obj;
}
}
public enum Column {
id("id", "id", "BIGINT", false),
name("name", "name", "VARCHAR", true),
appId("app_id", "appId", "VARCHAR", false),
version("version", "version", "INTEGER", false),
ssl("ssl", "ssl", "BIT", true),
createdTime("created_time", "createdTime", "TIMESTAMP", false),
datachangeLasttime("DataChange_LastTime", "datachangeLasttime", "TIMESTAMP", false);
private static final String BEGINNING_DELIMITER = "`";
private static final String ENDING_DELIMITER = "`";
private final String column;
private final boolean isColumnNameDelimited;
private final String javaProperty;
private final String jdbcType;
public String value() {
return this.column;
}
public String getValue() {
return this.column;
}
public String getJavaProperty() {
return this.javaProperty;
}
public String getJdbcType() {
return this.jdbcType;
}
Column(String column, String javaProperty, String jdbcType, boolean isColumnNameDelimited) {
this.column = column;
this.javaProperty = javaProperty;
this.jdbcType = jdbcType;
this.isColumnNameDelimited = isColumnNameDelimited;
}
public String desc() {
return this.getEscapedColumnName() + " DESC";
}
public String asc() {
return this.getEscapedColumnName() + " ASC";
}
public static Column[] excludes(Column ... excludes) {
ArrayList<Column> columns = new ArrayList<>(Arrays.asList(Column.values()));
if (excludes != null && excludes.length > 0) {
columns.removeAll(new ArrayList<>(Arrays.asList(excludes)));
}
return columns.toArray(new Column[]{});
}
public String getEscapedColumnName() {
if (this.isColumnNameDelimited) {
return new StringBuilder().append(BEGINNING_DELIMITER).append(this.column).append(ENDING_DELIMITER).toString();
} else {
return this.column;
}
}
public String getAliasedEscapedColumnName() {
return this.getEscapedColumnName();
}
}
}
| |
/*
* $Header: /home/jerenkrantz/tmp/commons/commons-convert/cvs/home/cvs/jakarta-commons//httpclient/src/contrib/org/apache/commons/httpclient/contrib/ssl/EasySSLProtocolSocketFactory.java,v 1.7 2004/06/11 19:26:27 olegk Exp $
* $Revision$
* $Date$
*
* ====================================================================
*
* Copyright 2002-2004 The Apache Software Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.codehaus.xfire.transport.http;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.net.SocketAddress;
import java.net.UnknownHostException;
import org.apache.commons.httpclient.ConnectTimeoutException;
import org.apache.commons.httpclient.HttpClientError;
import org.apache.commons.httpclient.params.HttpConnectionParams;
import org.apache.commons.httpclient.protocol.SecureProtocolSocketFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.net.SocketFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
/**
* <p>
* EasySSLProtocolSocketFactory can be used to creats SSL {@link Socket}s
* that accept self-signed certificates.
* </p>
* <p>
* This socket factory SHOULD NOT be used for productive systems
* due to security reasons, unless it is a concious decision and
* you are perfectly aware of security implications of accepting
* self-signed certificates
* </p>
*
* <p>
* Example of using custom protocol socket factory for a specific host:
* <pre>
* Protocol easyhttps = new Protocol("https", new EasySSLProtocolSocketFactory(), 443);
*
* HttpClient client = new HttpClient();
* client.getHostConfiguration().setHost("localhost", 443, easyhttps);
* // use relative url only
* GetMethod httpget = new GetMethod("/");
* client.executeMethod(httpget);
* </pre>
* </p>
* <p>
* Example of using custom protocol socket factory per default instead of the standard one:
* <pre>
* Protocol easyhttps = new Protocol("https", new EasySSLProtocolSocketFactory(), 443);
* Protocol.registerProtocol("https", easyhttps);
*
* HttpClient client = new HttpClient();
* GetMethod httpget = new GetMethod("https://localhost/");
* client.executeMethod(httpget);
* </pre>
* </p>
*
* @author <a href="mailto:oleg -at- ural.ru">Oleg Kalnichevski</a>
*
* <p>
* DISCLAIMER: HttpClient developers DO NOT actively support this component.
* The component is provided as a reference material, which may be inappropriate
* for use without additional customization.
* </p>
*/
public class EasySSLProtocolSocketFactory implements SecureProtocolSocketFactory {
/** Log object for this class. */
private static final Log LOG = LogFactory.getLog(EasySSLProtocolSocketFactory.class);
private SSLContext sslcontext = null;
/**
* Constructor for EasySSLProtocolSocketFactory.
*/
public EasySSLProtocolSocketFactory() {
super();
}
private static SSLContext createEasySSLContext() {
try {
SSLContext context = SSLContext.getInstance("SSL");
context.init(
null,
new TrustManager[] {new EasyX509TrustManager(null)},
null);
return context;
} catch (Exception e) {
LOG.error(e.getMessage(), e);
throw new HttpClientError(e.toString());
}
}
private SSLContext getSSLContext() {
if (this.sslcontext == null) {
this.sslcontext = createEasySSLContext();
}
return this.sslcontext;
}
/**
* @see SecureProtocolSocketFactory#createSocket(java.lang.String,int,java.net.InetAddress,int)
*/
public Socket createSocket(
String host,
int port,
InetAddress clientHost,
int clientPort)
throws IOException, UnknownHostException {
return getSSLContext().getSocketFactory().createSocket(
host,
port,
clientHost,
clientPort
);
}
/**
* Attempts to get a new socket connection to the given host within the given time limit.
* <p>
* To circumvent the limitations of older JREs that do not support connect timeout a
* controller thread is executed. The controller thread attempts to create a new socket
* within the given limit of time. If socket constructor does not return until the
* timeout expires, the controller terminates and throws an {@link ConnectTimeoutException}
* </p>
*
* @param host the host name/IP
* @param port the port on the host
* @param clientHost the local host name/IP to bind the socket to
* @param clientPort the port on the local machine
* @param params {@link HttpConnectionParams Http connection parameters}
*
* @return Socket a new socket
*
* @throws IOException if an I/O error occurs while creating the socket
* @throws UnknownHostException if the IP address of the host cannot be
* determined
*/
public Socket createSocket(
final String host,
final int port,
final InetAddress localAddress,
final int localPort,
final HttpConnectionParams params
) throws IOException, UnknownHostException, ConnectTimeoutException {
if (params == null) {
throw new IllegalArgumentException("Parameters may not be null");
}
int timeout = params.getConnectionTimeout();
SocketFactory socketfactory = getSSLContext().getSocketFactory();
if (timeout == 0) {
return socketfactory.createSocket(host, port, localAddress, localPort);
} else {
Socket socket = socketfactory.createSocket();
SocketAddress localaddr = new InetSocketAddress(localAddress, localPort);
SocketAddress remoteaddr = new InetSocketAddress(host, port);
socket.bind(localaddr);
socket.connect(remoteaddr, timeout);
return socket;
}
}
/**
* @see SecureProtocolSocketFactory#createSocket(java.lang.String,int)
*/
public Socket createSocket(String host, int port)
throws IOException, UnknownHostException {
return getSSLContext().getSocketFactory().createSocket(
host,
port
);
}
/**
* @see SecureProtocolSocketFactory#createSocket(java.net.Socket,java.lang.String,int,boolean)
*/
public Socket createSocket(
Socket socket,
String host,
int port,
boolean autoClose)
throws IOException, UnknownHostException {
return getSSLContext().getSocketFactory().createSocket(
socket,
host,
port,
autoClose
);
}
public boolean equals(Object obj) {
return ((obj != null) && obj.getClass().equals(EasySSLProtocolSocketFactory.class));
}
public int hashCode() {
return EasySSLProtocolSocketFactory.class.hashCode();
}
}
| |
/*
* #%L
* Wisdom-Framework
* %%
* Copyright (C) 2013 - 2014 Wisdom Framework
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package org.wisdom.router;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.util.Types;
import org.junit.Test;
import org.wisdom.api.content.ParameterConverter;
import org.wisdom.api.content.ParameterFactories;
import org.wisdom.api.content.ParameterFactory;
import org.wisdom.api.cookies.Cookie;
import org.wisdom.api.cookies.FlashCookie;
import org.wisdom.api.cookies.SessionCookie;
import org.wisdom.api.http.Context;
import org.wisdom.api.http.Request;
import org.wisdom.api.router.Route;
import org.wisdom.api.router.parameters.ActionParameter;
import org.wisdom.api.router.parameters.Source;
import org.wisdom.content.converters.ParamConverterEngine;
import org.wisdom.router.parameter.Bindings;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.Reader;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.Assertions.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
* Check the HTTP Parameters.
*/
public class HttpParameterTest {
private ParameterFactories engine =
new ParamConverterEngine(
Collections.<ParameterConverter>emptyList(),
Collections.<ParameterFactory>emptyList());
@Test
public void testContext() {
Context ctx = mock(Context.class);
ActionParameter argument = new ActionParameter(null, Source.HTTP, Context.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(ctx);
}
@Test
public void testRequest() {
Context ctx = mock(Context.class);
Request request = mock(Request.class);
when(ctx.request()).thenReturn(request);
ActionParameter argument = new ActionParameter(null, Source.HTTP, Request.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(request);
}
@Test
public void testRoute() {
Context ctx = mock(Context.class);
Route route = mock(Route.class);
when(ctx.route()).thenReturn(route);
ActionParameter argument = new ActionParameter(null, Source.HTTP, Route.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(route);
}
@Test
public void testSessionCookie() {
Context ctx = mock(Context.class);
SessionCookie cookie = mock(SessionCookie.class);
when(ctx.session()).thenReturn(cookie);
ActionParameter argument = new ActionParameter(null, Source.HTTP, SessionCookie.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(cookie);
}
@Test
public void testFlashCookie() {
Context ctx = mock(Context.class);
FlashCookie cookie = mock(FlashCookie.class);
when(ctx.flash()).thenReturn(cookie);
ActionParameter argument = new ActionParameter(null, Source.HTTP, FlashCookie.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(cookie);
}
@Test
public void testCookie() {
Context ctx = mock(Context.class);
Cookie cookie = mock(Cookie.class);
when(ctx.cookie("cookie")).thenReturn(cookie);
ActionParameter argument = new ActionParameter("cookie", Source.HTTP, Cookie.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(cookie);
}
@Test
public void testReader() throws IOException {
Context ctx = mock(Context.class);
BufferedReader reader = mock(BufferedReader.class);
when(ctx.reader()).thenReturn(reader);
ActionParameter argument = new ActionParameter(null, Source.HTTP, Reader.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(reader);
argument = new ActionParameter(null, Source.HTTP, BufferedReader.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(reader);
}
@Test(expected = IllegalArgumentException.class)
public void testCookieWithoutName() {
Context ctx = mock(Context.class);
Cookie cookie = mock(Cookie.class);
when(ctx.cookie("cookie")).thenReturn(cookie);
ActionParameter argument = new ActionParameter(null, Source.HTTP, Cookie.class);
Bindings.create(argument, ctx, engine);
fail("Unexpected creation of object");
}
@Test(expected = IllegalArgumentException.class)
public void testCookieWithEmptyName() {
Context ctx = mock(Context.class);
Cookie cookie = mock(Cookie.class);
when(ctx.cookie("cookie")).thenReturn(cookie);
ActionParameter argument = new ActionParameter("", Source.HTTP, Cookie.class);
Bindings.create(argument, ctx, engine);
fail("Unexpected creation of object");
}
@Test
public void testMissingCookie() {
Context ctx = mock(Context.class);
Cookie cookie = mock(Cookie.class);
when(ctx.cookie("cookie")).thenReturn(cookie);
ActionParameter argument = new ActionParameter("missing", Source.HTTP, Cookie.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(null);
}
@Test
public void testHeader() {
Request request = mock(Request.class);
Context ctx = mock(Context.class);
when(ctx.request()).thenReturn(request);
when(request.data()).thenReturn(Collections.<String, Object>emptyMap());
when(ctx.headers("header")).thenReturn(ImmutableList.of("value"));
when(ctx.header("header")).thenReturn("value");
when(ctx.headers("count")).thenReturn(ImmutableList.of("1"));
when(ctx.header("count")).thenReturn("1");
ActionParameter argument = new ActionParameter("header", Source.HTTP, String.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo("value");
argument = new ActionParameter("count", Source.HTTP, Integer.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(1);
argument = new ActionParameter("count", Source.HTTP, Integer.TYPE);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(1);
argument = new ActionParameter("count", Source.HTTP, Long.TYPE);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(1l);
}
@Test
public void testHeaderWithMultipleValues() {
Request request = mock(Request.class);
Context ctx = mock(Context.class);
when(ctx.request()).thenReturn(request);
when(request.data()).thenReturn(Collections.<String, Object>emptyMap());
when(ctx.headers("header")).thenReturn(ImmutableList.of("value1", "value2"));
when(ctx.header("header")).thenReturn("value1");
when(ctx.headers("count")).thenReturn(ImmutableList.of("1"));
when(ctx.header("count")).thenReturn("1");
ActionParameter argument = new ActionParameter("header", Source.HTTP, List.class, Types.listOf(String.class));
assertThat((List) Bindings.create(argument, ctx, engine)).contains("value1", "value2");
argument = new ActionParameter("header", Source.HTTP, String.class, null);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo("value1");
argument = new ActionParameter("count", Source.HTTP, Integer.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(1);
argument = new ActionParameter("count", Source.HTTP, List.class, Types.listOf(Integer.class));
assertThat((List) Bindings.create(argument, ctx, engine)).containsExactly(1);
}
@Test
public void testMissingHeader() {
Request request = mock(Request.class);
Context ctx = mock(Context.class);
when(ctx.request()).thenReturn(request);
when(request.data()).thenReturn(Collections.<String, Object>emptyMap());
when(ctx.headers("header")).thenReturn(ImmutableList.of("value1", "value2"));
when(ctx.header("header")).thenReturn("value1");
when(ctx.headers("count")).thenReturn(ImmutableList.of("1"));
when(ctx.header("count")).thenReturn("1");
ActionParameter argument = new ActionParameter("missing", Source.HTTP, List.class, Types.listOf(String.class));
assertThat((List) Bindings.create(argument, ctx, engine)).isEmpty();
argument = new ActionParameter("missing", Source.HTTP, String.class, null);
assertThat((List) Bindings.create(argument, ctx, engine)).isNull();
}
@Test(expected = IllegalArgumentException.class)
public void testHeaderWithoutName() {
Context ctx = mock(Context.class);
when(ctx.headers("header")).thenReturn(ImmutableList.of("value1", "value2"));
when(ctx.header("header")).thenReturn("value1");
when(ctx.headers("count")).thenReturn(ImmutableList.of("1"));
when(ctx.header("count")).thenReturn("1");
ActionParameter argument = new ActionParameter(null, Source.HTTP, List.class, Types.listOf(String.class));
Bindings.create(argument, ctx, engine);
fail("Should have failed");
}
@Test(expected = IllegalArgumentException.class)
public void testHeaderWithEmptyName() {
Context ctx = mock(Context.class);
when(ctx.headers("header")).thenReturn(ImmutableList.of("value1", "value2"));
when(ctx.header("header")).thenReturn("value1");
when(ctx.headers("count")).thenReturn(ImmutableList.of("1"));
when(ctx.header("count")).thenReturn("1");
ActionParameter argument = new ActionParameter("", Source.HTTP, List.class, Types.listOf(String.class));
Bindings.create(argument, ctx, engine);
fail("Should have failed");
}
@Test
public void testRequestScopeInjection() throws MalformedURLException {
Request request = mock(Request.class);
Context ctx = mock(Context.class);
when(ctx.request()).thenReturn(request);
final URL url = new URL("http://perdu.com");
when(request.data()).thenReturn(ImmutableMap.<String, Object>of(
"data", url,
"key", "value",
"count", 1
));
ActionParameter argument = new ActionParameter("data", Source.HTTP, URL.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(url);
argument = new ActionParameter("key", Source.HTTP, String.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo("value");
argument = new ActionParameter("count", Source.HTTP, Integer.class);
assertThat(Bindings.create(argument, ctx, engine)).isEqualTo(1);
}
@Test
public void testRequestScopeInjectionWithMultipleValues() {
Request request = mock(Request.class);
Context ctx = mock(Context.class);
when(ctx.request()).thenReturn(request);
when(request.data()).thenReturn(ImmutableMap.<String, Object>of(
"data", ImmutableList.of("value1", "value2"),
"key", "value",
"count", 1
));
ActionParameter argument = new ActionParameter("data", Source.HTTP, List.class, Types.listOf(String.class));
assertThat((List) Bindings.create(argument, ctx, engine)).contains("value1", "value2");
}
@Test
public void testMissingRequestScopeValue() {
Request request = mock(Request.class);
Context ctx = mock(Context.class);
when(ctx.request()).thenReturn(request);
when(request.data()).thenReturn(ImmutableMap.<String, Object>of(
"data", ImmutableList.of("value1", "value2"),
"key", "value",
"count", 1
));
ActionParameter argument = new ActionParameter("missing", Source.HTTP, List.class, Types.listOf(String.class));
assertThat((List) Bindings.create(argument, ctx, engine)).isEmpty();
argument = new ActionParameter("missing", Source.HTTP, String.class, null);
assertThat((List) Bindings.create(argument, ctx, engine)).isNull();
}
@Test(expected = IllegalArgumentException.class)
public void testRequestScopeInjectionWithoutName() {
Request request = mock(Request.class);
Context ctx = mock(Context.class);
when(ctx.request()).thenReturn(request);
when(request.data()).thenReturn(ImmutableMap.<String, Object>of(
"data", ImmutableList.of("value1", "value2"),
"key", "value",
"count", 1
));
ActionParameter argument = new ActionParameter(null, Source.HTTP, List.class, Types.listOf(String.class));
Bindings.create(argument, ctx, engine);
fail("Should have failed");
}
}
| |
/*
* Copyright 1999,2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.webdav.locking;
import java.util.UUID;
import org.modeshape.common.i18n.TextI18n;
import org.modeshape.common.logging.Logger;
/**
* a helper class for ResourceLocks, represents the Locks
*
* @author re
*/
public class LockedObject {
private static final Logger LOGGER = Logger.getLogger(LockedObject.class);
private ResourceLocks resourceLocks;
private String path;
private String id;
/**
* Describing the depth of a locked collection. If the locked resource is not a collection, depth is 0 / doesn't matter.
*/
protected int lockDepth;
/**
* Describing the timeout of a locked object (ms)
*/
protected long expiresAt;
/**
* owner of the lock. shared locks can have multiple owners. is null if no owner is present
*/
protected String[] owner = null;
/**
* children of that lock
*/
protected LockedObject[] children = null;
protected LockedObject parent = null;
/**
* weather the lock is exclusive or not. if owner=null the exclusive value doesn't matter
*/
protected boolean exclusive = false;
/**
* weather the lock is a write or read lock
*/
protected String type = null;
/**
* @param resLocks the resourceLocks where locks are stored
* @param path the path to the locked object
* @param temporary indicates if the LockedObject should be temporary or not
*/
public LockedObject( ResourceLocks resLocks,
String path,
boolean temporary ) {
this.path = path;
id = UUID.randomUUID().toString();
resourceLocks = resLocks;
if (!temporary) {
resourceLocks.locks.put(path, this);
resourceLocks.locksByID.put(id, this);
} else {
resourceLocks.tempLocks.put(path, this);
resourceLocks.tempLocksByID.put(id, this);
}
resourceLocks.cleanupCounter++;
}
/**
* adds a new owner to a lock
*
* @param owner string that represents the owner
* @return true if the owner was added, false otherwise
*/
public boolean addLockedObjectOwner( String owner ) {
if (this.owner == null) {
this.owner = new String[1];
} else {
int size = this.owner.length;
String[] newLockObjectOwner = new String[size + 1];
// check if the owner is already here (that should actually not
// happen)
for (int i = 0; i < size; i++) {
if (this.owner[i].equals(owner)) {
return false;
}
}
System.arraycopy(this.owner, 0, newLockObjectOwner, 0, size);
this.owner = newLockObjectOwner;
}
this.owner[this.owner.length - 1] = owner;
return true;
}
/**
* tries to remove the owner from the lock
*
* @param owner string that represents the owner
*/
public void removeLockedObjectOwner( String owner ) {
try {
if (this.owner != null) {
int size = this.owner.length;
for (int i = 0; i < size; i++) {
// check every owner if it is the requested one
if (this.owner[i].equals(owner)) {
// remove the owner
size -= 1;
String[] newLockedObjectOwner = new String[size];
for (int j = 0; j < size; j++) {
if (j < i) {
newLockedObjectOwner[j] = this.owner[j];
} else {
newLockedObjectOwner[j] = this.owner[j + 1];
}
}
this.owner = newLockedObjectOwner;
}
}
if (this.owner.length == 0) {
this.owner = null;
}
}
} catch (ArrayIndexOutOfBoundsException e) {
LOGGER.error(e, new TextI18n("LockedObject.removeLockedObjectOwner()"));
}
}
/**
* adds a new child lock to this lock
*
* @param newChild new child
*/
public void addChild( LockedObject newChild ) {
if (children == null) {
children = new LockedObject[0];
}
int size = children.length;
LockedObject[] newChildren = new LockedObject[size + 1];
System.arraycopy(children, 0, newChildren, 0, size);
newChildren[size] = newChild;
children = newChildren;
}
/**
* deletes this Lock object. assumes that it has no children and no owners (does not check this itself)
*/
public void removeLockedObject() {
if (this != resourceLocks.root && !this.getPath().equals("/")) {
int size = parent.children.length;
for (int i = 0; i < size; i++) {
if (parent.children[i].equals(this)) {
LockedObject[] newChildren = new LockedObject[size - 1];
for (int i2 = 0; i2 < (size - 1); i2++) {
if (i2 < i) {
newChildren[i2] = parent.children[i2];
} else {
newChildren[i2] = parent.children[i2 + 1];
}
}
if (newChildren.length != 0) {
parent.children = newChildren;
} else {
parent.children = null;
}
break;
}
}
// removing from hashtable
resourceLocks.locksByID.remove(getID());
resourceLocks.locks.remove(getPath());
// now the garbage collector has some work to do
}
}
/**
* deletes this Lock object. assumes that it has no children and no owners (does not check this itself)
*/
public void removeTempLockedObject() {
if (this != resourceLocks.tempRoot) {
// removing from tree
if (parent != null && parent.children != null) {
int size = parent.children.length;
for (int i = 0; i < size; i++) {
if (parent.children[i].equals(this)) {
LockedObject[] newChildren = new LockedObject[size - 1];
for (int i2 = 0; i2 < (size - 1); i2++) {
if (i2 < i) {
newChildren[i2] = parent.children[i2];
} else {
newChildren[i2] = parent.children[i2 + 1];
}
}
if (newChildren.length != 0) {
parent.children = newChildren;
} else {
parent.children = null;
}
break;
}
}
// removing from hashtable
resourceLocks.tempLocksByID.remove(getID());
resourceLocks.tempLocks.remove(getPath());
// now the garbage collector has some work to do
}
}
}
/**
* checks if a lock of the given exclusivity can be placed, only considering children up to "depth"
*
* @param exclusive wheather the new lock should be exclusive
* @param depth the depth to which should be checked
* @return true if the lock can be placed
*/
public boolean checkLocks( boolean exclusive,
int depth ) {
if (checkParents(exclusive) && checkChildren(exclusive, depth)) {
return true;
}
return false;
}
/**
* helper of checkLocks(). looks if the parents are locked
*
* @param exclusive wheather the new lock should be exclusive
* @return true if no locks at the parent path are forbidding a new lock
*/
private boolean checkParents( boolean exclusive ) {
if (path.equals("/")) {
return true;
}
if (owner == null) {
// no owner, checking parents
return parent != null && parent.checkParents(exclusive);
}
// there already is a owner
return !(this.exclusive || exclusive) && parent.checkParents(exclusive);
}
/**
* helper of checkLocks(). looks if the children are locked
*
* @param exclusive whether the new lock should be exclusive
* @param depth depth
* @return true if no locks at the children paths are forbidding a new lock
*/
private boolean checkChildren( boolean exclusive,
int depth ) {
if (children == null) {
// a file
return owner == null || !(this.exclusive || exclusive);
}
// a folder
if (owner == null) {
// no owner, checking children
if (depth != 0) {
boolean canLock = true;
int limit = children.length;
for (int i = 0; i < limit; i++) {
if (!children[i].checkChildren(exclusive, depth - 1)) {
canLock = false;
}
}
return canLock;
}
// depth == 0 -> we don't care for children
return true;
}
// there already is a owner
return !(this.exclusive || exclusive);
}
/**
* Sets a new timeout for the LockedObject
*
* @param timeout
*/
public void refreshTimeout( int timeout ) {
expiresAt = System.currentTimeMillis() + (timeout * 1000);
}
/**
* Gets the timeout for the LockedObject
*
* @return timeout
*/
public long getTimeoutMillis() {
return (expiresAt - System.currentTimeMillis());
}
/**
* Return true if the lock has expired.
*
* @return true if timeout has passed
*/
public boolean hasExpired() {
if (expiresAt != 0) {
return (System.currentTimeMillis() > expiresAt);
}
return true;
}
/**
* Gets the LockID (locktoken) for the LockedObject
*
* @return locktoken
*/
public String getID() {
return id;
}
/**
* Gets the owners for the LockedObject
*
* @return owners
*/
public String[] getOwner() {
return owner;
}
/**
* Gets the path for the LockedObject
*
* @return path
*/
public String getPath() {
return path;
}
/**
* Sets the exclusivity for the LockedObject
*
* @param exclusive
*/
public void setExclusive( boolean exclusive ) {
this.exclusive = exclusive;
}
/**
* Gets the exclusivity for the LockedObject
*
* @return exclusivity
*/
public boolean isExclusive() {
return exclusive;
}
/**
* Gets the exclusivity for the LockedObject
*
* @return exclusivity
*/
public boolean isShared() {
return !exclusive;
}
/**
* Gets the type of the lock
*
* @return type
*/
public String getType() {
return type;
}
/**
* Gets the depth of the lock
*
* @return depth
*/
public int getLockDepth() {
return lockDepth;
}
}
| |
// This software code is made available "AS IS" without warranties of any
// kind. You may copy, display, modify and redistribute the software
// code either by itself or as incorporated into your code; provided that
// you do not remove any proprietary notices. Your use of this software
// code is at your own risk and you waive any claim against Amazon
// Digital Services, Inc. or its affiliates with respect to your use of
// this software code. (c) 2006 Amazon Digital Services, Inc. or its
// affiliates.
package com.amazon.s3;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URLEncoder;
import java.security.InvalidKeyException;
import java.security.NoSuchAlgorithmException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.XMLReaderFactory;
import org.xml.sax.SAXException;
import sun.misc.BASE64Encoder;
public class Utils {
static final String METADATA_PREFIX = "x-amz-meta-";
static final String AMAZON_HEADER_PREFIX = "x-amz-";
static final String ALTERNATIVE_DATE_HEADER = "x-amz-date";
static final String DEFAULT_HOST = "s3.amazonaws.com";
static final int SECURE_PORT = 443;
static final int INSECURE_PORT = 80;
/**
* HMAC/SHA1 Algorithm per RFC 2104.
*/
private static final String HMAC_SHA1_ALGORITHM = "HmacSHA1";
static String makeCanonicalString(String method, String resource, Map headers) {
return makeCanonicalString(method, resource, headers, null);
}
/**
* Calculate the canonical string. When expires is non-null, it will be
* used instead of the Date header.
*/
static String makeCanonicalString(String method, String resource,
Map headers, String expires)
{
StringBuffer buf = new StringBuffer();
buf.append(method + "\n");
// Add all interesting headers to a list, then sort them. "Interesting"
// is defined as Content-MD5, Content-Type, Date, and x-amz-
SortedMap interestingHeaders = new TreeMap();
if (headers != null) {
for (Iterator i = headers.keySet().iterator(); i.hasNext(); ) {
String key = (String)i.next();
if (key == null) continue;
String lk = key.toLowerCase();
// Ignore any headers that are not particularly interesting.
if (lk.equals("content-type") || lk.equals("content-md5") || lk.equals("date") ||
lk.startsWith(AMAZON_HEADER_PREFIX))
{
List s = (List)headers.get(key);
interestingHeaders.put(lk, concatenateList(s));
}
}
}
if (interestingHeaders.containsKey(ALTERNATIVE_DATE_HEADER)) {
interestingHeaders.put("date", "");
}
// if the expires is non-null, use that for the date field. this
// trumps the x-amz-date behavior.
if (expires != null) {
interestingHeaders.put("date", expires);
}
// these headers require that we still put a new line in after them,
// even if they don't exist.
if (! interestingHeaders.containsKey("content-type")) {
interestingHeaders.put("content-type", "");
}
if (! interestingHeaders.containsKey("content-md5")) {
interestingHeaders.put("content-md5", "");
}
// Finally, add all the interesting headers (i.e.: all that startwith x-amz- ;-))
for (Iterator i = interestingHeaders.keySet().iterator(); i.hasNext(); ) {
String key = (String)i.next();
if (key.startsWith(AMAZON_HEADER_PREFIX)) {
buf.append(key).append(':').append(interestingHeaders.get(key));
} else {
buf.append(interestingHeaders.get(key));
}
buf.append("\n");
}
// don't include the query parameters...
int queryIndex = resource.indexOf('?');
if (queryIndex == -1) {
buf.append("/" + resource);
} else {
buf.append("/" + resource.substring(0, queryIndex));
}
// ...unless there is an acl or torrent parameter
if (resource.matches(".*[&?]acl($|=|&).*")) {
buf.append("?acl");
} else if (resource.matches(".*[&?]torrent($|=|&).*")) {
buf.append("?torrent");
}
return buf.toString();
}
/**
* Calculate the HMAC/SHA1 on a string.
* @param data Data to sign
* @param passcode Passcode to sign it with
* @return Signature
* @throws NoSuchAlgorithmException If the algorithm does not exist. Unlikely
* @throws InvalidKeyException If the key is invalid.
*/
static String encode(String awsSecretAccessKey, String canonicalString,
boolean urlencode)
{
// The following HMAC/SHA1 code for the signature is taken from the
// AWS Platform's implementation of RFC2104 (amazon.webservices.common.Signature)
//
// Acquire an HMAC/SHA1 from the raw key bytes.
SecretKeySpec signingKey =
new SecretKeySpec(awsSecretAccessKey.getBytes(), HMAC_SHA1_ALGORITHM);
// Acquire the MAC instance and initialize with the signing key.
Mac mac = null;
try {
mac = Mac.getInstance(HMAC_SHA1_ALGORITHM);
} catch (NoSuchAlgorithmException e) {
// should not happen
throw new RuntimeException("Could not find sha1 algorithm", e);
}
try {
mac.init(signingKey);
} catch (InvalidKeyException e) {
// also should not happen
throw new RuntimeException("Could not initialize the MAC algorithm", e);
}
// Compute the HMAC on the digest, and set it.
String b64 = new BASE64Encoder().encode(mac.doFinal(canonicalString.getBytes()));
if (urlencode) {
return urlencode(b64);
} else {
return b64;
}
}
static String pathForListOptions(String bucket, String prefix, String marker, Integer maxKeys) {
StringBuffer path = new StringBuffer(bucket);
path.append("?");
// these two params must be url encoded
if (prefix != null) path.append("prefix=" + urlencode(prefix) + "&");
if (marker != null) path.append("marker=" + urlencode(marker) + "&");
if (maxKeys != null) path.append("max-keys=" + maxKeys + "&");
path.deleteCharAt(path.length()-1); // we've always added exactly one too many chars
return path.toString();
}
static String urlencode(String unencoded) {
try {
return URLEncoder.encode(unencoded, "UTF-8");
} catch (UnsupportedEncodingException e) {
// should never happen
throw new RuntimeException("Could not url encode to UTF-8", e);
}
}
static XMLReader createXMLReader() {
try {
return XMLReaderFactory.createXMLReader();
} catch (SAXException e) {
// oops, lets try doing this (needed in 1.4)
System.setProperty("org.xml.sax.driver", "org.apache.crimson.parser.XMLReaderImpl");
}
try {
// try once more
return XMLReaderFactory.createXMLReader();
} catch (SAXException e) {
throw new RuntimeException("Couldn't initialize a sax driver for the XMLReader");
}
}
/**
* Concatenates a bunch of header values, seperating them with a comma.
* @param values List of header values.
* @return String of all headers, with commas.
*/
private static String concatenateList(List values) {
StringBuffer buf = new StringBuffer();
for (int i = 0, size = values.size(); i < size; ++ i) {
buf.append(((String)values.get(i)).replaceAll("\n", "").trim());
if (i != (size - 1)) {
buf.append(",");
}
}
return buf.toString();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.