text
stringlengths
7
1.01M
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.servlet; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Enumeration; import java.util.EventListener; import java.util.Map; import java.util.Set; import javax.servlet.descriptor.JspConfigDescriptor; /** * Defines a set of methods that a servlet uses to communicate with its servlet * container, for example, to get the MIME type of a file, dispatch requests, or * write to a log file. * <p> * There is one context per "web application" per Java Virtual Machine. (A * "web application" is a collection of servlets and content installed under a * specific subset of the server's URL namespace such as <code>/catalog</code> * and possibly installed via a <code>.war</code> file.) * <p> * In the case of a web application marked "distributed" in its deployment * descriptor, there will be one context instance for each virtual machine. In * this situation, the context cannot be used as a location to share global * information (because the information won't be truly global). Use an external * resource like a database instead. * <p> * The <code>ServletContext</code> object is contained within the * {@link ServletConfig} object, which the Web server provides the servlet when * the servlet is initialized. * * @see Servlet#getServletConfig * @see ServletConfig#getServletContext */ public interface ServletContext { public static final String TEMPDIR = "javax.servlet.context.tempdir"; /** * @since Servlet 3.0 */ public static final String ORDERED_LIBS = "javax.servlet.context.orderedLibs"; /** * Return the main path associated with this context. * * @return The main context path * * @since Servlet 2.5 */ public String getContextPath(); /** * Returns a <code>ServletContext</code> object that corresponds to a * specified URL on the server. * <p> * This method allows servlets to gain access to the context for various * parts of the server, and as needed obtain {@link RequestDispatcher} * objects from the context. The given path must be begin with "/", is * interpreted relative to the server's document root and is matched against * the context roots of other web applications hosted on this container. * <p> * In a security conscious environment, the servlet container may return * <code>null</code> for a given URL. * * @param uripath * a <code>String</code> specifying the context path of another * web application in the container. * @return the <code>ServletContext</code> object that corresponds to the * named URL, or null if either none exists or the container wishes * to restrict this access. * @see RequestDispatcher */ public ServletContext getContext(String uripath); /** * Returns the major version of the Java Servlet API that this servlet * container supports. All implementations that comply with Version 3.0 must * have this method return the integer 3. * * @return 3 */ public int getMajorVersion(); /** * Returns the minor version of the Servlet API that this servlet container * supports. All implementations that comply with Version 3.0 must have this * method return the integer 0. * * @return 0 */ public int getMinorVersion(); /** * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * * @since Servlet 3.0 TODO SERVLET3 - Add comments */ public int getEffectiveMajorVersion(); /** * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 TODO SERVLET3 - Add comments */ public int getEffectiveMinorVersion(); /** * Returns the MIME type of the specified file, or <code>null</code> if the * MIME type is not known. The MIME type is determined by the configuration * of the servlet container, and may be specified in a web application * deployment descriptor. Common MIME types are <code>"text/html"</code> and * <code>"image/gif"</code>. * * @param file * a <code>String</code> specifying the name of a file * @return a <code>String</code> specifying the file's MIME type */ public String getMimeType(String file); /** * Returns a directory-like listing of all the paths to resources within the * web application whose longest sub-path matches the supplied path * argument. Paths indicating subdirectory paths end with a '/'. The * returned paths are all relative to the root of the web application and * have a leading '/'. For example, for a web application containing<br> * <br> * /welcome.html<br> * /catalog/index.html<br> * /catalog/products.html<br> * /catalog/offers/books.html<br> * /catalog/offers/music.html<br> * /customer/login.jsp<br> * /WEB-INF/web.xml<br> * /WEB-INF/classes/com.acme.OrderServlet.class,<br> * <br> * getResourcePaths("/") returns {"/welcome.html", "/catalog/", * "/customer/", "/WEB-INF/"}<br> * getResourcePaths("/catalog/") returns {"/catalog/index.html", * "/catalog/products.html", "/catalog/offers/"}.<br> * * @param path * the partial path used to match the resources, which must start * with a / * @return a Set containing the directory listing, or null if there are no * resources in the web application whose path begins with the * supplied path. * @since Servlet 2.3 */ public Set<String> getResourcePaths(String path); /** * Returns a URL to the resource that is mapped to a specified path. The * path must begin with a "/" and is interpreted as relative to the current * context root. * <p> * This method allows the servlet container to make a resource available to * servlets from any source. Resources can be located on a local or remote * file system, in a database, or in a <code>.war</code> file. * <p> * The servlet container must implement the URL handlers and * <code>URLConnection</code> objects that are necessary to access the * resource. * <p> * This method returns <code>null</code> if no resource is mapped to the * pathname. * <p> * Some containers may allow writing to the URL returned by this method * using the methods of the URL class. * <p> * The resource content is returned directly, so be aware that requesting a * <code>.jsp</code> page returns the JSP source code. Use a * <code>RequestDispatcher</code> instead to include results of an * execution. * <p> * This method has a different purpose than * <code>java.lang.Class.getResource</code>, which looks up resources based * on a class loader. This method does not use class loaders. * * @param path * a <code>String</code> specifying the path to the resource * @return the resource located at the named path, or <code>null</code> if * there is no resource at that path * @exception MalformedURLException * if the pathname is not given in the correct form */ public URL getResource(String path) throws MalformedURLException; /** * Returns the resource located at the named path as an * <code>InputStream</code> object. * <p> * The data in the <code>InputStream</code> can be of any type or length. * The path must be specified according to the rules given in * <code>getResource</code>. This method returns <code>null</code> if no * resource exists at the specified path. * <p> * Meta-information such as content length and content type that is * available via <code>getResource</code> method is lost when using this * method. * <p> * The servlet container must implement the URL handlers and * <code>URLConnection</code> objects necessary to access the resource. * <p> * This method is different from * <code>java.lang.Class.getResourceAsStream</code>, which uses a class * loader. This method allows servlet containers to make a resource * available to a servlet from any location, without using a class loader. * * @param path * a <code>String</code> specifying the path to the resource * @return the <code>InputStream</code> returned to the servlet, or * <code>null</code> if no resource exists at the specified path */ public InputStream getResourceAsStream(String path); /** * Returns a {@link RequestDispatcher} object that acts as a wrapper for the * resource located at the given path. A <code>RequestDispatcher</code> * object can be used to forward a request to the resource or to include the * resource in a response. The resource can be dynamic or static. * <p> * The pathname must begin with a "/" and is interpreted as relative to the * current context root. Use <code>getContext</code> to obtain a * <code>RequestDispatcher</code> for resources in foreign contexts. This * method returns <code>null</code> if the <code>ServletContext</code> * cannot return a <code>RequestDispatcher</code>. * * @param path * a <code>String</code> specifying the pathname to the resource * @return a <code>RequestDispatcher</code> object that acts as a wrapper for * the resource at the specified path, or <code>null</code> if the * <code>ServletContext</code> cannot return a * <code>RequestDispatcher</code> * @see RequestDispatcher * @see ServletContext#getContext */ public RequestDispatcher getRequestDispatcher(String path); /** * Returns a {@link RequestDispatcher} object that acts as a wrapper for the * named servlet. * <p> * Servlets (and JSP pages also) may be given names via server * administration or via a web application deployment descriptor. A servlet * instance can determine its name using * {@link ServletConfig#getServletName}. * <p> * This method returns <code>null</code> if the <code>ServletContext</code> * cannot return a <code>RequestDispatcher</code> for any reason. * * @param name * a <code>String</code> specifying the name of a servlet to wrap * @return a <code>RequestDispatcher</code> object that acts as a wrapper for * the named servlet, or <code>null</code> if the * <code>ServletContext</code> cannot return a * <code>RequestDispatcher</code> * @see RequestDispatcher * @see ServletContext#getContext * @see ServletConfig#getServletName */ public RequestDispatcher getNamedDispatcher(String name); /** * Do not use. This method was originally defined to retrieve a servlet from * a <code>ServletContext</code>. In this version, this method always * returns <code>null</code> and remains only to preserve binary * compatibility. This method will be permanently removed in a future * version of the Java Servlet API. * <p> * In lieu of this method, servlets can share information using the * <code>ServletContext</code> class and can perform shared business logic * by invoking methods on common non-servlet classes. * * @param name Not used * * @return Always <code>null</code> * * @throws ServletException never * * @deprecated As of Java Servlet API 2.1, with no direct replacement. */ @SuppressWarnings("dep-ann") // Spec API does not use @Deprecated public Servlet getServlet(String name) throws ServletException; /** * Do not use. This method was originally defined to return an * <code>Enumeration</code> of all the servlets known to this servlet * context. In this version, this method always returns an empty enumeration * and remains only to preserve binary compatibility. This method will be * permanently removed in a future version of the Java Servlet API. * * @return Always and empty Enumeration * * @deprecated As of Java Servlet API 2.0, with no replacement. */ @SuppressWarnings("dep-ann") // Spec API does not use @Deprecated public Enumeration<Servlet> getServlets(); /** * Do not use. This method was originally defined to return an * <code>Enumeration</code> of all the servlet names known to this context. * In this version, this method always returns an empty * <code>Enumeration</code> and remains only to preserve binary * compatibility. This method will be permanently removed in a future * version of the Java Servlet API. * * @return Always and empty Enumeration * * @deprecated As of Java Servlet API 2.1, with no replacement. */ @SuppressWarnings("dep-ann") // Spec API does not use @Deprecated public Enumeration<String> getServletNames(); /** * Writes the specified message to a servlet log file, usually an event log. * The name and type of the servlet log file is specific to the servlet * container. * * @param msg * a <code>String</code> specifying the message to be written to * the log file */ public void log(String msg); /** * Do not use. * @param exception The exception to log * @param msg The message to log with the exception * @deprecated As of Java Servlet API 2.1, use * {@link #log(String message, Throwable throwable)} instead. * <p> * This method was originally defined to write an exception's * stack trace and an explanatory error message to the servlet * log file. */ @SuppressWarnings("dep-ann") // Spec API does not use @Deprecated public void log(Exception exception, String msg); /** * Writes an explanatory message and a stack trace for a given * <code>Throwable</code> exception to the servlet log file. The name and * type of the servlet log file is specific to the servlet container, * usually an event log. * * @param message * a <code>String</code> that describes the error or exception * @param throwable * the <code>Throwable</code> error or exception */ public void log(String message, Throwable throwable); /** * Returns a <code>String</code> containing the real path for a given * virtual path. For example, the path "/index.html" returns the absolute * file path on the server's filesystem would be served by a request for * "http://host/contextPath/index.html", where contextPath is the context * path of this ServletContext.. * <p> * The real path returned will be in a form appropriate to the computer and * operating system on which the servlet container is running, including the * proper path separators. This method returns <code>null</code> if the * servlet container cannot translate the virtual path to a real path for * any reason (such as when the content is being made available from a * <code>.war</code> archive). * * @param path * a <code>String</code> specifying a virtual path * @return a <code>String</code> specifying the real path, or null if the * translation cannot be performed */ public String getRealPath(String path); /** * Returns the name and version of the servlet container on which the * servlet is running. * <p> * The form of the returned string is * <i>servername</i>/<i>versionnumber</i>. For example, the JavaServer Web * Development Kit may return the string * <code>JavaServer Web Dev Kit/1.0</code>. * <p> * The servlet container may return other optional information after the * primary string in parentheses, for example, * <code>JavaServer Web Dev Kit/1.0 (JDK 1.1.6; Windows NT 4.0 x86)</code>. * * @return a <code>String</code> containing at least the servlet container * name and version number */ public String getServerInfo(); /** * Returns a <code>String</code> containing the value of the named * context-wide initialization parameter, or <code>null</code> if the * parameter does not exist. * <p> * This method can make available configuration information useful to an * entire "web application". For example, it can provide a webmaster's email * address or the name of a system that holds critical data. * * @param name * a <code>String</code> containing the name of the parameter * whose value is requested * @return a <code>String</code> containing the value of the initialization * parameter * @throws NullPointerException If the provided parameter name is * <code>null</code> * @see ServletConfig#getInitParameter */ public String getInitParameter(String name); /** * Returns the names of the context's initialization parameters as an * <code>Enumeration</code> of <code>String</code> objects, or an empty * <code>Enumeration</code> if the context has no initialization parameters. * * @return an <code>Enumeration</code> of <code>String</code> objects * containing the names of the context's initialization parameters * @see ServletConfig#getInitParameter */ public Enumeration<String> getInitParameterNames(); /** * Set the given initialisation parameter to the given value. * @param name Name of initialisation parameter * @param value Value for initialisation parameter * @return <code>true</code> if the call succeeds or <code>false</code> if * the call fails because an initialisation parameter with the same * name has already been set * @throws IllegalStateException If initialisation of this ServletContext * has already completed * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @throws NullPointerException If the provided parameter name is * <code>null</code> * @since Servlet 3.0 */ public boolean setInitParameter(String name, String value); /** * Returns the servlet container attribute with the given name, or * <code>null</code> if there is no attribute by that name. An attribute * allows a servlet container to give the servlet additional information not * already provided by this interface. See your server documentation for * information about its attributes. A list of supported attributes can be * retrieved using <code>getAttributeNames</code>. * <p> * The attribute is returned as a <code>java.lang.Object</code> or some * subclass. Attribute names should follow the same convention as package * names. The Java Servlet API specification reserves names matching * <code>java.*</code>, <code>javax.*</code>, and <code>sun.*</code>. * * @param name * a <code>String</code> specifying the name of the attribute * @return an <code>Object</code> containing the value of the attribute, or * <code>null</code> if no attribute exists matching the given name * @throws NullPointerException If the provided attribute name is * <code>null</code> * @see ServletContext#getAttributeNames */ public Object getAttribute(String name); /** * Returns an <code>Enumeration</code> containing the attribute names * available within this servlet context. Use the {@link #getAttribute} * method with an attribute name to get the value of an attribute. * * @return an <code>Enumeration</code> of attribute names * @see #getAttribute */ public Enumeration<String> getAttributeNames(); /** * Binds an object to a given attribute name in this servlet context. If the * name specified is already used for an attribute, this method will replace * the attribute with the new to the new attribute. * <p> * If listeners are configured on the <code>ServletContext</code> the * container notifies them accordingly. * <p> * If a null value is passed, the effect is the same as calling * <code>removeAttribute()</code>. * <p> * Attribute names should follow the same convention as package names. The * Java Servlet API specification reserves names matching * <code>java.*</code>, <code>javax.*</code>, and <code>sun.*</code>. * * @param name * a <code>String</code> specifying the name of the attribute * @param object * an <code>Object</code> representing the attribute to be bound * @throws NullPointerException If the provided attribute name is * <code>null</code> */ public void setAttribute(String name, Object object); /** * Removes the attribute with the given name from the servlet context. After * removal, subsequent calls to {@link #getAttribute} to retrieve the * attribute's value will return <code>null</code>. * <p> * If listeners are configured on the <code>ServletContext</code> the * container notifies them accordingly. * * @param name * a <code>String</code> specifying the name of the attribute to * be removed */ public void removeAttribute(String name); /** * Returns the name of this web application corresponding to this * ServletContext as specified in the deployment descriptor for this web * application by the display-name element. * * @return The name of the web application or null if no name has been * declared in the deployment descriptor. * @since Servlet 2.3 */ public String getServletContextName(); /** * Register a servlet implementation for use in this ServletContext. * @param servletName The name of the servlet to register * @param className The implementation class for the servlet * @return The registration object that enables further configuration * @throws IllegalStateException * If the context has already been initialised * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public ServletRegistration.Dynamic addServlet(String servletName, String className); /** * Register a servlet instance for use in this ServletContext. * @param servletName The name of the servlet to register * @param servlet The Servlet instance to register * @return The registration object that enables further configuration * @throws IllegalStateException * If the context has already been initialised * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public ServletRegistration.Dynamic addServlet(String servletName, Servlet servlet); /** * Add servlet to context. * @param servletName Name of servlet to add * @param servletClass Class of servlet to add * @return <code>null</code> if the servlet has already been fully defined, * else a {@link javax.servlet.ServletRegistration.Dynamic} object * that can be used to further configure the servlet * @throws IllegalStateException * If the context has already been initialised * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public ServletRegistration.Dynamic addServlet(String servletName, Class<? extends Servlet> servletClass); /** * TODO SERVLET3 - Add comments * @param <T> TODO * @param c TODO * @return TODO * @throws ServletException TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public <T extends Servlet> T createServlet(Class<T> c) throws ServletException; /** * Obtain the details of the named servlet. * * @param servletName The name of the Servlet of interest * * @return The registration details for the named Servlet or * <code>null</code> if no Servlet has been registered with the * given name * * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * * @since Servlet 3.0 */ public ServletRegistration getServletRegistration(String servletName); /** * TODO SERVLET3 - Add comments * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public Map<String, ? extends ServletRegistration> getServletRegistrations(); /** * Add filter to context. * @param filterName Name of filter to add * @param className Name of filter class * @return <code>null</code> if the filter has already been fully defined, * else a {@link javax.servlet.FilterRegistration.Dynamic} object * that can be used to further configure the filter * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @throws IllegalStateException * If the context has already been initialised * @since Servlet 3.0 */ public FilterRegistration.Dynamic addFilter(String filterName, String className); /** * Add filter to context. * @param filterName Name of filter to add * @param filter Filter to add * @return <code>null</code> if the filter has already been fully defined, * else a {@link javax.servlet.FilterRegistration.Dynamic} object * that can be used to further configure the filter * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @throws IllegalStateException * If the context has already been initialised * @since Servlet 3.0 */ public FilterRegistration.Dynamic addFilter(String filterName, Filter filter); /** * Add filter to context. * @param filterName Name of filter to add * @param filterClass Class of filter to add * @return <code>null</code> if the filter has already been fully defined, * else a {@link javax.servlet.FilterRegistration.Dynamic} object * that can be used to further configure the filter * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @throws IllegalStateException * If the context has already been initialised * @since Servlet 3.0 */ public FilterRegistration.Dynamic addFilter(String filterName, Class<? extends Filter> filterClass); /** * TODO SERVLET3 - Add comments * @param <T> TODO * @param c TODO * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @throws ServletException TODO * @since Servlet 3. */ public <T extends Filter> T createFilter(Class<T> c) throws ServletException; /** * TODO SERVLET3 - Add comments * @param filterName TODO * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public FilterRegistration getFilterRegistration(String filterName); /** * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 TODO SERVLET3 - Add comments */ public Map<String, ? extends FilterRegistration> getFilterRegistrations(); /** * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 TODO SERVLET3 - Add comments */ public SessionCookieConfig getSessionCookieConfig(); /** * Configures the available session tracking modes for this web application. * @param sessionTrackingModes The session tracking modes to use for this * web application * @throws IllegalArgumentException * If sessionTrackingModes specifies * {@link SessionTrackingMode#SSL} in combination with any other * {@link SessionTrackingMode} * @throws IllegalStateException * If the context has already been initialised * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public void setSessionTrackingModes( Set<SessionTrackingMode> sessionTrackingModes); /** * Obtains the default session tracking modes for this web application. * By default {@link SessionTrackingMode#URL} is always supported, {@link * SessionTrackingMode#COOKIE} is supported unless the <code>cookies</code> * attribute has been set to <code>false</code> for the context and {@link * SessionTrackingMode#SSL} is supported if at least one of the connectors * used by this context has the attribute <code>secure</code> set to * <code>true</code>. * @return The set of default session tracking modes for this web * application * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public Set<SessionTrackingMode> getDefaultSessionTrackingModes(); /** * Obtains the currently enabled session tracking modes for this web * application. * @return The value supplied via {@link #setSessionTrackingModes(Set)} if * one was previously set, else return the defaults * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public Set<SessionTrackingMode> getEffectiveSessionTrackingModes(); /** * TODO SERVLET3 - Add comments * @param className TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public void addListener(String className); /** * TODO SERVLET3 - Add comments * @param <T> TODO * @param t TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public <T extends EventListener> void addListener(T t); /** * TODO SERVLET3 - Add comments * @param listenerClass TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public void addListener(Class<? extends EventListener> listenerClass); /** * TODO SERVLET3 - Add comments * @param <T> TODO * @param c TODO * @return TODO * @throws ServletException TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 */ public <T extends EventListener> T createListener(Class<T> c) throws ServletException; /** * @return TODO * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @since Servlet 3.0 TODO SERVLET3 - Add comments */ public JspConfigDescriptor getJspConfigDescriptor(); /** * Get the web application class loader associated with this ServletContext. * * @return The associated web application class loader * * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @throws SecurityException if access to the class loader is prevented by a * SecurityManager * @since Servlet 3.0 */ public ClassLoader getClassLoader(); /** * Add to the declared roles for this ServletContext. * @param roleNames The roles to add * @throws UnsupportedOperationException If called from a * {@link ServletContextListener#contextInitialized(ServletContextEvent)} * method of a {@link ServletContextListener} that was not defined in a * web.xml file, a web-fragment.xml file nor annotated with * {@link javax.servlet.annotation.WebListener}. For example, a * {@link ServletContextListener} defined in a TLD would not be able to * use this method. * @throws IllegalArgumentException If the list of roleNames is null or * empty * @throws IllegalStateException If the ServletContext has already been * initialised * @since Servlet 3.0 */ public void declareRoles(String... roleNames); }
/** * Copyright 2015 Smart Society Services B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 */ package org.opensmartgridplatform.adapter.protocol.oslp.elster.application.mapping; import java.net.InetAddress; import java.net.UnknownHostException; import org.joda.time.DateTime; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.opensmartgridplatform.dto.valueobjects.ConfigurationDto; import org.opensmartgridplatform.oslp.Oslp; import org.opensmartgridplatform.oslp.Oslp.SetConfigurationRequest; import com.google.protobuf.ByteString; import ma.glasnost.orika.CustomConverter; import ma.glasnost.orika.MappingContext; import ma.glasnost.orika.metadata.Type; public class ConfigurationToOslpSetConfigurationRequestConverter extends CustomConverter<ConfigurationDto, Oslp.SetConfigurationRequest> { private static final Logger LOGGER = LoggerFactory .getLogger(ConfigurationToOslpSetConfigurationRequestConverter.class); private static final int SECONDS_PER_MINUTE = 60; @Override public SetConfigurationRequest convert(final ConfigurationDto source, final Type<? extends Oslp.SetConfigurationRequest> destinationType, final MappingContext context) { final Oslp.SetConfigurationRequest.Builder setConfigurationRequest = Oslp.SetConfigurationRequest.newBuilder(); if (source.getLightType() != null) { setConfigurationRequest.setLightType(this.mapperFacade.map(source.getLightType(), Oslp.LightType.class)); } if (source.getDaliConfiguration() != null) { setConfigurationRequest.setDaliConfiguration( this.mapperFacade.map(source.getDaliConfiguration(), Oslp.DaliConfiguration.class)); } if (source.getRelayConfiguration() != null) { setConfigurationRequest.setRelayConfiguration( this.mapperFacade.map(source.getRelayConfiguration(), Oslp.RelayConfiguration.class)); } if (source.getShortTermHistoryIntervalMinutes() != null) { setConfigurationRequest.setShortTermHistoryIntervalMinutes( this.mapperFacade.map(source.getShortTermHistoryIntervalMinutes(), Integer.class)); } if (source.getLongTermHistoryInterval() != null) { setConfigurationRequest.setLongTermHistoryInterval( this.mapperFacade.map(source.getLongTermHistoryInterval(), Integer.class)); } if (source.getLongTermHistoryIntervalType() != null) { setConfigurationRequest.setLongTermHistoryIntervalType( this.mapperFacade.map(source.getLongTermHistoryIntervalType(), Oslp.LongTermIntervalType.class)); } if (source.getPreferredLinkType() != null) { setConfigurationRequest .setPreferredLinkType(this.mapperFacade.map(source.getPreferredLinkType(), Oslp.LinkType.class)); } if (source.getMeterType() != null) { setConfigurationRequest.setMeterType(this.mapperFacade.map(source.getMeterType(), Oslp.MeterType.class)); } if (source.getAstroGateSunRiseOffset() != null) { setConfigurationRequest.setAstroGateSunRiseOffset(source.getAstroGateSunRiseOffset() * SECONDS_PER_MINUTE); } if (source.getAstroGateSunSetOffset() != null) { setConfigurationRequest.setAstroGateSunSetOffset(source.getAstroGateSunSetOffset() * SECONDS_PER_MINUTE); } if (source.isAutomaticSummerTimingEnabled() != null) { setConfigurationRequest.setIsAutomaticSummerTimingEnabled(source.isAutomaticSummerTimingEnabled()); } if (source.getCommunicationNumberOfRetries() != null) { setConfigurationRequest.setCommunicationNumberOfRetries(source.getCommunicationNumberOfRetries()); } if (source.getCommunicationPauseTimeBetweenConnectionTrials() != null) { setConfigurationRequest.setCommunicationPauseTimeBetweenConnectionTrials( source.getCommunicationPauseTimeBetweenConnectionTrials()); } if (source.getCommunicationTimeout() != null) { setConfigurationRequest.setCommunicationTimeout(source.getCommunicationTimeout()); } if (source.getDeviceFixedIp() != null) { setConfigurationRequest.setDeviceFixIpValue( this.convertTextualIpAddressToByteString(source.getDeviceFixedIp().getIpAddress())); setConfigurationRequest .setNetMask(this.convertTextualIpAddressToByteString(source.getDeviceFixedIp().getNetMask())); setConfigurationRequest .setGateWay(this.convertTextualIpAddressToByteString(source.getDeviceFixedIp().getGateWay())); } if (source.isDhcpEnabled() != null) { setConfigurationRequest.setIsDhcpEnabled(source.isDhcpEnabled()); } // if (source.isTlsEnabled() != null) { // setConfigurationRequest.setIsTlsEnabled(source.isTlsEnabled()); // } // if (source.getTlsPortNumber() != null) { // setConfigurationRequest.setOslpBindPortNumber(source.getTlsPortNumber()); // } // if (source.getCommonNameString() != null) { // setConfigurationRequest.setCommonNameString(source.getCommonNameString()); // } if (source.getOsgpPortNumber() != null) { setConfigurationRequest.setOsgpPortNumber(source.getOsgpPortNumber()); } if (source.getOsgpIpAddres() != null) { setConfigurationRequest .setOspgIpAddress(this.convertTextualIpAddressToByteString(source.getOsgpIpAddres())); } if (source.isRelayRefreshing() != null) { setConfigurationRequest.setRelayRefreshing(source.isRelayRefreshing()); } if (source.getSummerTimeDetails() != null) { final String summerTimeDetails = this.convertSummerTimeWinterTimeDetails(source.getSummerTimeDetails()); setConfigurationRequest.setSummerTimeDetails(summerTimeDetails); } if (source.isTestButtonEnabled() != null) { setConfigurationRequest.setIsTestButtonEnabled(source.isTestButtonEnabled()); } if (source.getTimeSyncFrequency() != null) { setConfigurationRequest.setTimeSyncFrequency(source.getTimeSyncFrequency()); } if (source.getWinterTimeDetails() != null) { final String winterTimeDetails = this.convertSummerTimeWinterTimeDetails(source.getWinterTimeDetails()); setConfigurationRequest.setWinterTimeDetails(winterTimeDetails); } if (source.getSwitchingDelays() != null) { setConfigurationRequest.addAllSwitchingDelay(source.getSwitchingDelays()); } if (source.getRelayLinking() != null) { setConfigurationRequest .addAllRelayLinking(this.mapperFacade.mapAsList(source.getRelayLinking(), Oslp.RelayMatrix.class)); } return setConfigurationRequest.build(); } private ByteString convertTextualIpAddressToByteString(final String ipAddress) { try { LOGGER.info("textual IP address or netmask: {}", ipAddress); final InetAddress inetAddress = InetAddress.getByName(ipAddress); final byte[] bytes = inetAddress.getAddress(); LOGGER.info("bytes.length: {}", bytes.length); for (final byte b : bytes) { LOGGER.info("byte: {}", b); } return ByteString.copyFrom(bytes); } catch (final UnknownHostException e) { LOGGER.error("UnknownHostException", e); return null; } } // @formatter:off /* * SummerTimeDetails/WinterTimeDetails string: MMWHHmi * * where: (note, north hemisphere summer begins at the end of march) MM: * month W: day of the week (0- Monday, 6- Sunday) HH: hour of the changing * time mi: minutes of the changing time * * Default value for summer time: 0360100 Default value for summer time: * 1060200 */ // @formatter:on private String convertSummerTimeWinterTimeDetails(final DateTime dateTime) { LOGGER.info("dateTime: {}", dateTime); final String formattedTimeDetails = String.format("%02d", dateTime.getMonthOfYear()) + (dateTime.getDayOfWeek() - 1) + String.format("%02d", dateTime.getHourOfDay()) + String.format("%02d", dateTime.getMinuteOfHour()); LOGGER.info("formattedTimeDetails: {}", formattedTimeDetails); return formattedTimeDetails; } }
// // Diese Datei wurde mit der JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.3.2 generiert // Siehe <a href="https://javaee.github.io/jaxb-v2/">https://javaee.github.io/jaxb-v2/</a> // Änderungen an dieser Datei gehen bei einer Neukompilierung des Quellschemas verloren. // Generiert: 2019.02.03 um 11:14:53 PM CET // package net.opengis.gml; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlSeeAlso; import javax.xml.bind.annotation.XmlType; /** * The abstract supertype for temporal complexes. * * <p>Java-Klasse für AbstractTimeComplexType complex type. * * <p>Das folgende Schemafragment gibt den erwarteten Content an, der in dieser Klasse enthalten ist. * * <pre> * &lt;complexType name="AbstractTimeComplexType"&gt; * &lt;complexContent&gt; * &lt;extension base="{http://www.opengis.net/gml}AbstractTimeObjectType"&gt; * &lt;/extension&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "AbstractTimeComplexType") @XmlSeeAlso({ TimeTopologyComplexType.class }) public abstract class AbstractTimeComplexType extends AbstractTimeObjectType { }
package com.iptv.ijkplayer; import android.content.Context; import android.support.annotation.NonNull; import android.view.View; import java.lang.ref.WeakReference; import com.iptv.browser.R; public final class MeasureHelper { private WeakReference<View> mWeakView; private int mVideoWidth; private int mVideoHeight; private int mVideoSarNum; private int mVideoSarDen; private int mVideoRotationDegree; private int mMeasuredWidth; private int mMeasuredHeight; private int mCurrentAspectRatio = IRenderView.AR_ASPECT_FIT_PARENT; public MeasureHelper(View view) { mWeakView = new WeakReference<View>(view); } public View getView() { if (mWeakView == null) return null; return mWeakView.get(); } public void setVideoSize(int videoWidth, int videoHeight) { mVideoWidth = videoWidth; mVideoHeight = videoHeight; } public void setVideoSampleAspectRatio(int videoSarNum, int videoSarDen) { mVideoSarNum = videoSarNum; mVideoSarDen = videoSarDen; } public void setVideoRotation(int videoRotationDegree) { mVideoRotationDegree = videoRotationDegree; } /** * Must be called by View.onMeasure(int, int) * * @param widthMeasureSpec * @param heightMeasureSpec */ public void doMeasure(int widthMeasureSpec, int heightMeasureSpec) { //Log.i("@@@@", "onMeasure(" + MeasureSpec.toString(widthMeasureSpec) + ", " // + MeasureSpec.toString(heightMeasureSpec) + ")"); if (mVideoRotationDegree == 90 || mVideoRotationDegree == 270) { int tempSpec = widthMeasureSpec; widthMeasureSpec = heightMeasureSpec; heightMeasureSpec = tempSpec; } int width = View.getDefaultSize(mVideoWidth, widthMeasureSpec); int height = View.getDefaultSize(mVideoHeight, heightMeasureSpec); if (mCurrentAspectRatio == IRenderView.AR_MATCH_PARENT) { width = widthMeasureSpec; height = heightMeasureSpec; } else if (mVideoWidth > 0 && mVideoHeight > 0) { int widthSpecMode = View.MeasureSpec.getMode(widthMeasureSpec); int widthSpecSize = View.MeasureSpec.getSize(widthMeasureSpec); int heightSpecMode = View.MeasureSpec.getMode(heightMeasureSpec); int heightSpecSize = View.MeasureSpec.getSize(heightMeasureSpec); if (widthSpecMode == View.MeasureSpec.AT_MOST && heightSpecMode == View.MeasureSpec.AT_MOST) { float specAspectRatio = (float) widthSpecSize / (float) heightSpecSize; float displayAspectRatio; switch (mCurrentAspectRatio) { case IRenderView.AR_16_9_FIT_PARENT: displayAspectRatio = 16.0f / 9.0f; if (mVideoRotationDegree == 90 || mVideoRotationDegree == 270) displayAspectRatio = 1.0f / displayAspectRatio; break; case IRenderView.AR_4_3_FIT_PARENT: displayAspectRatio = 4.0f / 3.0f; if (mVideoRotationDegree == 90 || mVideoRotationDegree == 270) displayAspectRatio = 1.0f / displayAspectRatio; break; case IRenderView.AR_ASPECT_FIT_PARENT: case IRenderView.AR_ASPECT_FILL_PARENT: case IRenderView.AR_ASPECT_WRAP_CONTENT: default: displayAspectRatio = (float) mVideoWidth / (float) mVideoHeight; if (mVideoSarNum > 0 && mVideoSarDen > 0) displayAspectRatio = displayAspectRatio * mVideoSarNum / mVideoSarDen; break; } boolean shouldBeWider = displayAspectRatio > specAspectRatio; switch (mCurrentAspectRatio) { case IRenderView.AR_ASPECT_FIT_PARENT: case IRenderView.AR_16_9_FIT_PARENT: case IRenderView.AR_4_3_FIT_PARENT: if (shouldBeWider) { // too wide, fix width width = widthSpecSize; height = (int) (width / displayAspectRatio); } else { // too high, fix height height = heightSpecSize; width = (int) (height * displayAspectRatio); } break; case IRenderView.AR_ASPECT_FILL_PARENT: if (shouldBeWider) { // not high enough, fix height height = heightSpecSize; width = (int) (height * displayAspectRatio); } else { // not wide enough, fix width width = widthSpecSize; height = (int) (width / displayAspectRatio); } break; case IRenderView.AR_ASPECT_WRAP_CONTENT: default: if (shouldBeWider) { // too wide, fix width width = Math.min(mVideoWidth, widthSpecSize); height = (int) (width / displayAspectRatio); } else { // too high, fix height height = Math.min(mVideoHeight, heightSpecSize); width = (int) (height * displayAspectRatio); } break; } } else if (widthSpecMode == View.MeasureSpec.EXACTLY && heightSpecMode == View.MeasureSpec.EXACTLY) { // the size is fixed width = widthSpecSize; height = heightSpecSize; // for compatibility, we adjust size based on aspect ratio if (mVideoWidth * height < width * mVideoHeight) { //Log.i("@@@", "image too wide, correcting"); width = height * mVideoWidth / mVideoHeight; } else if (mVideoWidth * height > width * mVideoHeight) { //Log.i("@@@", "image too tall, correcting"); height = width * mVideoHeight / mVideoWidth; } } else if (widthSpecMode == View.MeasureSpec.EXACTLY) { // only the width is fixed, adjust the height to match aspect ratio if possible width = widthSpecSize; height = width * mVideoHeight / mVideoWidth; if (heightSpecMode == View.MeasureSpec.AT_MOST && height > heightSpecSize) { // couldn't match aspect ratio within the constraints height = heightSpecSize; } } else if (heightSpecMode == View.MeasureSpec.EXACTLY) { // only the height is fixed, adjust the width to match aspect ratio if possible height = heightSpecSize; width = height * mVideoWidth / mVideoHeight; if (widthSpecMode == View.MeasureSpec.AT_MOST && width > widthSpecSize) { // couldn't match aspect ratio within the constraints width = widthSpecSize; } } else { // neither the width nor the height are fixed, try to use actual video size width = mVideoWidth; height = mVideoHeight; if (heightSpecMode == View.MeasureSpec.AT_MOST && height > heightSpecSize) { // too tall, decrease both width and height height = heightSpecSize; width = height * mVideoWidth / mVideoHeight; } if (widthSpecMode == View.MeasureSpec.AT_MOST && width > widthSpecSize) { // too wide, decrease both width and height width = widthSpecSize; height = width * mVideoHeight / mVideoWidth; } } } else { // no size yet, just adopt the given spec sizes } mMeasuredWidth = width; mMeasuredHeight = height; } public int getMeasuredWidth() { return mMeasuredWidth; } public int getMeasuredHeight() { return mMeasuredHeight; } public void setAspectRatio(int aspectRatio) { mCurrentAspectRatio = aspectRatio; } @NonNull public static String getAspectRatioText(Context context, int aspectRatio) { String text; switch (aspectRatio) { case IRenderView.AR_ASPECT_FIT_PARENT: text = context.getString(R.string.VideoView_ar_aspect_fit_parent); break; case IRenderView.AR_ASPECT_FILL_PARENT: text = context.getString(R.string.VideoView_ar_aspect_fill_parent); break; case IRenderView.AR_ASPECT_WRAP_CONTENT: text = context.getString(R.string.VideoView_ar_aspect_wrap_content); break; case IRenderView.AR_MATCH_PARENT: text = context.getString(R.string.VideoView_ar_match_parent); break; case IRenderView.AR_16_9_FIT_PARENT: text = context.getString(R.string.VideoView_ar_16_9_fit_parent); break; case IRenderView.AR_4_3_FIT_PARENT: text = context.getString(R.string.VideoView_ar_4_3_fit_parent); break; default: text = context.getString(R.string.N_A); break; } return text; } }
/* * Copyright 2013-2017 consulo.io * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package consulo.web.gwt.client.ui; import com.google.gwt.event.logical.shared.ResizeEvent; import com.google.gwt.event.logical.shared.ResizeHandler; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.Widget; import com.vaadin.client.ConnectorHierarchyChangeEvent; import com.vaadin.client.communication.StateChangeEvent; import consulo.web.gwt.client.util.ArrayUtil2; import consulo.web.gwt.client.util.GwtUIUtil; import consulo.web.gwt.shared.ui.state.layout.SplitLayoutState; import javax.annotation.Nullable; import java.util.List; /** * @author VISTALL * @since 13-Sep-17 */ public abstract class GwtSplitLayoutImplConnector extends GwtLayoutConnector implements ResizeHandler { private HandlerRegistration myCloseRegister; @Override public void onConnectorHierarchyChange(ConnectorHierarchyChangeEvent connectorHierarchyChangeEvent) { List<Widget> widgets = GwtUIUtil.remapWidgets(this); setWidget(getWidget(), ArrayUtil2.safeGet(widgets, 0), ArrayUtil2.safeGet(widgets, 1)); } private void setWidget(GwtSplitLayoutImpl panel, @Nullable Widget o1, @Nullable Widget o2) { if (o1 != null) { GwtUIUtil.fill(o1); } panel.setFirstWidget(o1); if (o2 != null) { GwtUIUtil.fill(o2); } panel.setSecondWidget(o2); } @Override public void onStateChanged(StateChangeEvent stateChangeEvent) { super.onStateChanged(stateChangeEvent); getWidget().setSplitPosition(getState().myProportion + "%"); } @Override protected void init() { super.init(); myCloseRegister = Window.addResizeHandler(this); } @Override public void onUnregister() { super.onUnregister(); myCloseRegister.removeHandler(); } @Override public SplitLayoutState getState() { return (SplitLayoutState)super.getState(); } @Override public GwtSplitLayoutImpl getWidget() { return (GwtSplitLayoutImpl)super.getWidget(); } @Override public void onResize(ResizeEvent event) { getWidget().updateOnResize(); } }
/******************************************************************************* * Copyright 2018 IIT-CNR * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package oasis.names.tc.xacml.core.schema.wd_17; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElementRef; import javax.xml.bind.annotation.XmlType; @XmlAccessorType( XmlAccessType.FIELD ) @XmlType( name = "VariableDefinitionType", propOrder = { "expression" } ) public class VariableDefinitionType { @XmlElementRef( name = "Expression", namespace = "urn:oasis:names:tc:xacml:3.0:core:schema:wd-17", type = JAXBElement.class ) protected JAXBElement<?> expression; @XmlAttribute( name = "VariableId", required = true ) protected String variableId; public JAXBElement<?> getExpression() { // NOSONAR return expression; } public void setExpression( JAXBElement<?> value ) { this.expression = value; } public String getVariableId() { return variableId; } public void setVariableId( String value ) { this.variableId = value; } }
/* * Copyright 2013-2018 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elastictranscoder.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; /** * <p> * The <code>ListJobsByStatusRequest</code> structure. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListJobsByStatusRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable { /** * <p> * To get information about all of the jobs associated with the current AWS account that have a given status, * specify the following status: <code>Submitted</code>, <code>Progressing</code>, <code>Complete</code>, * <code>Canceled</code>, or <code>Error</code>. * </p> */ private String status; /** * <p> * To list jobs in chronological order by the date and time that they were submitted, enter <code>true</code>. To * list jobs in reverse chronological order, enter <code>false</code>. * </p> */ private String ascending; /** * <p> * When Elastic Transcoder returns more than one page of results, use <code>pageToken</code> in subsequent * <code>GET</code> requests to get each successive page of results. * </p> */ private String pageToken; /** * <p> * To get information about all of the jobs associated with the current AWS account that have a given status, * specify the following status: <code>Submitted</code>, <code>Progressing</code>, <code>Complete</code>, * <code>Canceled</code>, or <code>Error</code>. * </p> * * @param status * To get information about all of the jobs associated with the current AWS account that have a given status, * specify the following status: <code>Submitted</code>, <code>Progressing</code>, <code>Complete</code>, * <code>Canceled</code>, or <code>Error</code>. */ public void setStatus(String status) { this.status = status; } /** * <p> * To get information about all of the jobs associated with the current AWS account that have a given status, * specify the following status: <code>Submitted</code>, <code>Progressing</code>, <code>Complete</code>, * <code>Canceled</code>, or <code>Error</code>. * </p> * * @return To get information about all of the jobs associated with the current AWS account that have a given * status, specify the following status: <code>Submitted</code>, <code>Progressing</code>, * <code>Complete</code>, <code>Canceled</code>, or <code>Error</code>. */ public String getStatus() { return this.status; } /** * <p> * To get information about all of the jobs associated with the current AWS account that have a given status, * specify the following status: <code>Submitted</code>, <code>Progressing</code>, <code>Complete</code>, * <code>Canceled</code>, or <code>Error</code>. * </p> * * @param status * To get information about all of the jobs associated with the current AWS account that have a given status, * specify the following status: <code>Submitted</code>, <code>Progressing</code>, <code>Complete</code>, * <code>Canceled</code>, or <code>Error</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ListJobsByStatusRequest withStatus(String status) { setStatus(status); return this; } /** * <p> * To list jobs in chronological order by the date and time that they were submitted, enter <code>true</code>. To * list jobs in reverse chronological order, enter <code>false</code>. * </p> * * @param ascending * To list jobs in chronological order by the date and time that they were submitted, enter <code>true</code> * . To list jobs in reverse chronological order, enter <code>false</code>. */ public void setAscending(String ascending) { this.ascending = ascending; } /** * <p> * To list jobs in chronological order by the date and time that they were submitted, enter <code>true</code>. To * list jobs in reverse chronological order, enter <code>false</code>. * </p> * * @return To list jobs in chronological order by the date and time that they were submitted, enter * <code>true</code>. To list jobs in reverse chronological order, enter <code>false</code>. */ public String getAscending() { return this.ascending; } /** * <p> * To list jobs in chronological order by the date and time that they were submitted, enter <code>true</code>. To * list jobs in reverse chronological order, enter <code>false</code>. * </p> * * @param ascending * To list jobs in chronological order by the date and time that they were submitted, enter <code>true</code> * . To list jobs in reverse chronological order, enter <code>false</code>. * @return Returns a reference to this object so that method calls can be chained together. */ public ListJobsByStatusRequest withAscending(String ascending) { setAscending(ascending); return this; } /** * <p> * When Elastic Transcoder returns more than one page of results, use <code>pageToken</code> in subsequent * <code>GET</code> requests to get each successive page of results. * </p> * * @param pageToken * When Elastic Transcoder returns more than one page of results, use <code>pageToken</code> in subsequent * <code>GET</code> requests to get each successive page of results. */ public void setPageToken(String pageToken) { this.pageToken = pageToken; } /** * <p> * When Elastic Transcoder returns more than one page of results, use <code>pageToken</code> in subsequent * <code>GET</code> requests to get each successive page of results. * </p> * * @return When Elastic Transcoder returns more than one page of results, use <code>pageToken</code> in subsequent * <code>GET</code> requests to get each successive page of results. */ public String getPageToken() { return this.pageToken; } /** * <p> * When Elastic Transcoder returns more than one page of results, use <code>pageToken</code> in subsequent * <code>GET</code> requests to get each successive page of results. * </p> * * @param pageToken * When Elastic Transcoder returns more than one page of results, use <code>pageToken</code> in subsequent * <code>GET</code> requests to get each successive page of results. * @return Returns a reference to this object so that method calls can be chained together. */ public ListJobsByStatusRequest withPageToken(String pageToken) { setPageToken(pageToken); return this; } /** * Returns a string representation of this object; useful for testing and debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getStatus() != null) sb.append("Status: ").append(getStatus()).append(","); if (getAscending() != null) sb.append("Ascending: ").append(getAscending()).append(","); if (getPageToken() != null) sb.append("PageToken: ").append(getPageToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListJobsByStatusRequest == false) return false; ListJobsByStatusRequest other = (ListJobsByStatusRequest) obj; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; if (other.getAscending() == null ^ this.getAscending() == null) return false; if (other.getAscending() != null && other.getAscending().equals(this.getAscending()) == false) return false; if (other.getPageToken() == null ^ this.getPageToken() == null) return false; if (other.getPageToken() != null && other.getPageToken().equals(this.getPageToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); hashCode = prime * hashCode + ((getAscending() == null) ? 0 : getAscending().hashCode()); hashCode = prime * hashCode + ((getPageToken() == null) ? 0 : getPageToken().hashCode()); return hashCode; } @Override public ListJobsByStatusRequest clone() { return (ListJobsByStatusRequest) super.clone(); } }
/* * Copyright 1999-2019 Seata.io Group. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.seata.integration.dubbo.alibaba; import com.alibaba.dubbo.common.extension.Activate; import com.alibaba.dubbo.rpc.Filter; import com.alibaba.dubbo.rpc.Invocation; import com.alibaba.dubbo.rpc.Invoker; import com.alibaba.dubbo.rpc.Result; import com.alibaba.dubbo.rpc.RpcContext; import com.alibaba.dubbo.rpc.RpcException; import io.seata.core.context.RootContext; import io.seata.core.constants.DubboConstants; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * The type Transaction propagation filter. * * @author sharajava */ @Activate(group = {DubboConstants.PROVIDER, DubboConstants.CONSUMER}, order = 100) public class AlibabaDubboTransactionPropagationFilter implements Filter { private static final Logger LOGGER = LoggerFactory.getLogger(AlibabaDubboTransactionPropagationFilter.class); @Override public Result invoke(Invoker<?> invoker, Invocation invocation) throws RpcException { if (!DubboConstants.ALIBABADUBBO) { return invoker.invoke(invocation); } String xid = RootContext.getXID(); String rpcXid = getRpcXid(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("xid in RootContext[{}] xid in RpcContext[{}]", xid, rpcXid); } boolean bind = false; if (xid != null) { RpcContext.getContext().setAttachment(RootContext.KEY_XID, xid); } else { if (rpcXid != null) { RootContext.bind(rpcXid); bind = true; if (LOGGER.isDebugEnabled()) { LOGGER.debug("bind[{}] to RootContext", rpcXid); } } } try { return invoker.invoke(invocation); } finally { if (bind) { String unbindXid = RootContext.unbind(); if (LOGGER.isDebugEnabled()) { LOGGER.debug("unbind[{}] from RootContext", unbindXid); } if (!rpcXid.equalsIgnoreCase(unbindXid)) { LOGGER.warn("xid in change during RPC from {} to {} ", rpcXid, unbindXid); if (unbindXid != null) { RootContext.bind(unbindXid); LOGGER.warn("bind [{}] back to RootContext", unbindXid); } } } } } /** * get rpc xid * @return */ private String getRpcXid() { String rpcXid = RpcContext.getContext().getAttachment(RootContext.KEY_XID); if (rpcXid == null) { rpcXid = RpcContext.getContext().getAttachment(RootContext.KEY_XID.toLowerCase()); } return rpcXid; } }
/*    Copyright (c) 2016-2017 Slamtec Co., Ltd. All Rights Reserved.    Licensed under the Apache License, Version 2.0 (the "License");    you may not use this file except in compliance with the License.    You may obtain a copy of the License at        http://www.apache.org/licenses/LICENSE-2.0    Unless required by applicable law or agreed to in writing, software    distributed under the License is distributed on an "AS IS" BASIS,    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.    See the License for the specific language governing permissions and    limitations under the License. */ package com.slamtec.android.uicommander.open.views.controls; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Point; import com.slamtec.android.uicommander.open.agent.RPSlamwareSdpAgent; import com.slamtec.slamware.geometry.PointF; import com.slamtec.slamware.robot.LaserPoint; import com.slamtec.slamware.robot.LaserScan; import com.slamtec.slamware.robot.Pose; import java.lang.ref.WeakReference; import java.util.Vector; public class RPLaserScanView extends RPSlamwareBaseView { private final static String TAG = "RPLaserScanView"; private LaserScan laserScan; private Pose pose; private Paint paint; private PointF centerPosition; private Point uiCenter; public RPLaserScanView(Context context, WeakReference<RPSlamwareSdpAgent> agent) { super(context, agent); paint = new Paint(Paint.ANTI_ALIAS_FLAG); centerPosition = new PointF(); uiCenter = new Point(); setBackgroundColor(Color.TRANSPARENT); setWillNotDraw(false); } public void updateLaserScan(LaserScan laserScan, Pose pose) { this.laserScan = laserScan; this.pose = pose; invalidate(); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (laserScan != null) { Vector<LaserPoint> scanPoints = laserScan.getLaserPoints(); Pose robotPose = this.pose; paint.setColor(Color.RED); for (LaserPoint scanPoint : scanPoints) { if (!scanPoint.isValid()) { continue; } double phi = scanPoint.getAngle() + robotPose.getYaw(); double r = scanPoint.getDistance(); double physicalX = robotPose.getX() + r * Math.cos(phi); double physicalY = robotPose.getY() + r * Math.sin(phi); centerPosition.setX((float)physicalX); centerPosition.setY((float)physicalY); uiCenter = layoutRotatedCoordinateForPhysicalCoordinate( centerPosition, getLayoutOffset()); canvas.drawRect(uiCenter.x - 1, uiCenter.y - 1, uiCenter.x + 1, uiCenter.y + 1, paint); } } } }
package org.apache.spark.sql.mlsql.sources.hbase.wal.io; import com.fasterxml.jackson.core.JsonFactory; import com.fasterxml.jackson.core.JsonGenerator; import org.apache.spark.sql.mlsql.sources.hbase.wal.RawHBaseWALEvent; import java.io.IOException; import java.io.StringWriter; /** * 12/12/2019 WilliamZhu(allwefantasy@gmail.com) */ public abstract class AbstractEventWriter { private final JsonFactory JSON_FACTORY = new JsonFactory(); protected JsonGenerator jsonGenerator; protected void startJson(StringWriter outputStream, RawHBaseWALEvent event) throws IOException { jsonGenerator = createJsonGenerator(outputStream); jsonGenerator.writeStartObject(); String eventType = event.put() != null ? "put" : "delete"; if (eventType == null) { jsonGenerator.writeNullField("type"); } else { jsonGenerator.writeStringField("type", eventType); } Long timestamp = event.time(); if (timestamp == null) { jsonGenerator.writeNullField("timestamp"); } else { jsonGenerator.writeNumberField("timestamp", timestamp); } if (event.db() != null) { String db = event.db(); String tableName = event.table(); jsonGenerator.writeStringField("databaseName", db); jsonGenerator.writeStringField("tableName", tableName); jsonGenerator.writeNullField("schema"); } else { jsonGenerator.writeNullField("databaseName"); jsonGenerator.writeNullField("tableName"); jsonGenerator.writeNullField("schema"); } } protected void endJson() throws IOException { if (jsonGenerator == null) { throw new IOException("endJson called without a JsonGenerator"); } jsonGenerator.writeEndObject(); jsonGenerator.flush(); jsonGenerator.close(); } private JsonGenerator createJsonGenerator(StringWriter out) throws IOException { return JSON_FACTORY.createGenerator(out); } public abstract java.util.List<String> writeEvent(RawHBaseWALEvent event); }
package com.gwong.thereisaplace.data; public class XmlData { private String uid; private String name; private String writer; private String msg; private String reg_date; public XmlData(String uid, String name, String writer, String msg, String reg_date) { super(); this.uid = uid; this.name = name; this.writer = writer; this.msg = msg; this.reg_date = reg_date; } public XmlData(){ } public String getUid() { return uid; } public void setUid(String uid) { this.uid = uid; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getWriter() { return writer; } public void setWriter(String writer) { this.writer = writer; } public String getMsg() { return msg; } public void setMsg(String msg) { this.msg = msg; } public String getReg_date() { return reg_date; } public void setReg_date(String reg_date) { this.reg_date = reg_date; } }
package seedu.address.logic.parser; /** * Contains Command Line Interface (CLI) syntax definitions common to multiple commands */ public class CliSyntax { /* Prefix definitions */ public static final Prefix PREFIX_NAME = new Prefix("n/"); public static final Prefix PREFIX_QUANTITY = new Prefix("q/"); public static final Prefix PREFIX_EXPIRY = new Prefix("e/"); public static final Prefix PREFIX_COMPANY = new Prefix("c/"); public static final Prefix PREFIX_TAG = new Prefix("t/"); public static final Prefix PREFIX_BATCHNUMBER = new Prefix("b/"); public static final Prefix PREFIX_FILE = new Prefix("f/"); public static final Prefix PREFIX_PROPERTY = new Prefix("p/"); public static final Prefix PREFIX_DIRECTION = new Prefix("d/"); }
package classifier.actions; import java.awt.BorderLayout; import java.awt.Dialog.ModalityType; import java.awt.Dimension; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Insets; import java.awt.Window; import java.awt.event.ActionEvent; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.text.NumberFormat; import java.text.ParseException; import javax.swing.AbstractAction; import javax.swing.AbstractButton; import javax.swing.JCheckBox; import javax.swing.JDialog; import javax.swing.JFormattedTextField; import javax.swing.JLabel; import javax.swing.JList; import javax.swing.JOptionPane; import javax.swing.JPanel; import javax.swing.JProgressBar; import javax.swing.SwingUtilities; import javax.swing.SwingWorker; import javax.swing.event.DocumentEvent; import javax.swing.event.DocumentListener; import javax.swing.text.NumberFormatter; import classifier.core.Common; import classifier.core.Console; import classifier.workers.TrainModelWorker; public class TrainModelAction extends AbstractAction { /** * */ private static final long serialVersionUID = -6585711817103433804L; protected Console console; protected JList<String> list; public TrainModelAction(JList<String> list, Console console) { this.list = list; this.console = console; } @Override public void actionPerformed(ActionEvent evt) { try { JPanel optionsPanel = new JPanel(new GridBagLayout()); // FlowLayout GridBagConstraints c = new GridBagConstraints(); c.fill = GridBagConstraints.HORIZONTAL; c.weightx = 0.5; c.gridx = 0; c.gridy = 0; optionsPanel.add(new JLabel("Baum-Welch iterations to perform (0 = use pruning set)"), c); NumberFormat format = NumberFormat.getInstance(); NumberFormatter formatter = new NumberFormatter(format) { /** * */ private static final long serialVersionUID = -927292327813026204L; @Override public Object stringToValue(String string) throws ParseException { if (string == null || string.length() == 0) { return null; } return super.stringToValue(string); } }; formatter.setValueClass(Integer.class); formatter.setMinimum(0); formatter.setMaximum(Integer.MAX_VALUE); formatter.setAllowsInvalid(false); formatter.setCommitsOnValidEdit(true); JFormattedTextField iterations = new JFormattedTextField(formatter) { /** * */ private static final long serialVersionUID = 4365656020045455764L; @Override public Dimension getPreferredSize() { return new Dimension(50, 25); } }; iterations.setText("1"); iterations.setEditable(true); iterations.getDocument().addDocumentListener(new DocumentListener(){ @Override public void removeUpdate(DocumentEvent e){ iterations.selectAll(); } @Override public void insertUpdate(DocumentEvent e){ iterations.selectAll(); } @Override public void changedUpdate(DocumentEvent e){ //nothing to do.. } }); c.insets = new Insets(0, 10, 0, 0); // top padding c.fill = GridBagConstraints.HORIZONTAL; c.weightx = 0.5; c.gridx = 1; c.gridy = 0; optionsPanel.add(iterations, c); JCheckBox savePartialModels = new JCheckBox("Save partial models"); c.fill = GridBagConstraints.HORIZONTAL; c.ipady = 0; // reset to default c.weighty = 0.5; // request any extra vertical space c.insets = new Insets(10, 0, 0, 0); // top padding c.gridx = 0; // aligned with button 2 c.gridy = 1; // second row c.gridwidth = 1; // 1 columns wide optionsPanel.add(savePartialModels, c); // Custom button text Object[] options = { "Start", "Abort" }; int choice = JOptionPane.showOptionDialog(null, optionsPanel, "Train settings", JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, options[0]); if (choice == JOptionPane.YES_OPTION) { int index = list.getSelectedIndex(); TrainModelWorker worker = new TrainModelWorker(index, Integer.parseInt(iterations.getText()), savePartialModels.isSelected()); console.addText("Training " + list.getSelectedValue() + " model, it may takes a while..."); Window win = SwingUtilities.getWindowAncestor((AbstractButton) evt.getSource()); JDialog dialog = new JDialog(win, "Dialog", ModalityType.APPLICATION_MODAL); worker.addPropertyChangeListener(new PropertyChangeListener() { @Override public void propertyChange(PropertyChangeEvent evt) { if (evt.getPropertyName().equals("state")) { if (evt.getNewValue() == SwingWorker.StateValue.DONE) { try { worker.get(); console.addText("Training session ended successfully!"); synchronized(Common.loaded) { console.addText(Common.loaded.get(list.getSelectedIndex()).hmm.toString()); } } catch (Exception ex) { console.addText("There was an error while training this model: " + ex.getCause().getMessage()); ex.printStackTrace(); JOptionPane.showMessageDialog(null, ex.getCause().getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } finally { dialog.dispose(); } } } } }); worker.execute(); JProgressBar progressBar = new JProgressBar(); progressBar.setIndeterminate(true); JPanel panel = new JPanel(new BorderLayout()); panel.add(progressBar, BorderLayout.CENTER); panel.add(new JLabel("Please wait......."), BorderLayout.PAGE_START); dialog.add(panel); dialog.setUndecorated(true); dialog.pack(); dialog.setLocationRelativeTo(win); dialog.setVisible(true); } } catch (NumberFormatException nex) { JOptionPane.showMessageDialog(null, "Please specify a valid number of runs!", "Error", JOptionPane.ERROR_MESSAGE); } catch (Exception ex) { console.addText("Error while loading dataset: " + ex.getMessage()); JOptionPane.showMessageDialog(null, ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE); } } }
/* * Copyright (C) 2020 Temporal Technologies, Inc. All Rights Reserved. * * Copyright 2012-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Modifications copyright (C) 2017 Uber Technologies, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not * use this file except in compliance with the License. A copy of the License is * located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package io.temporal.workflow; import io.temporal.client.WorkflowClient; import io.temporal.testing.internal.SDKTestWorkflowRule; import io.temporal.workflow.shared.TestMultiArgWorkflowFunctions.*; import java.util.concurrent.ExecutionException; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; public class ExecuteTest { @Rule public SDKTestWorkflowRule testWorkflowRule = SDKTestWorkflowRule.newBuilder().setWorkflowTypes(TestMultiArgWorkflowImpl.class).build(); @Test public void testExecute() throws ExecutionException, InterruptedException { TestNoArgsWorkflowFunc stubF = testWorkflowRule.newWorkflowStubTimeoutOptions(TestNoArgsWorkflowFunc.class); Assert.assertEquals("func", WorkflowClient.execute(stubF::func).get()); Test1ArgWorkflowFunc stubF1 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test1ArgWorkflowFunc.class); Assert.assertEquals(1, (int) WorkflowClient.execute(stubF1::func1, 1).get()); Assert.assertEquals(1, stubF1.func1(1)); // Check that duplicated start just returns the result. Test2ArgWorkflowFunc stubF2 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test2ArgWorkflowFunc.class); Assert.assertEquals("12", WorkflowClient.execute(stubF2::func2, "1", 2).get()); Test3ArgWorkflowFunc stubF3 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test3ArgWorkflowFunc.class); Assert.assertEquals("123", WorkflowClient.execute(stubF3::func3, "1", 2, 3).get()); Test4ArgWorkflowFunc stubF4 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test4ArgWorkflowFunc.class); Assert.assertEquals("1234", WorkflowClient.execute(stubF4::func4, "1", 2, 3, 4).get()); Test5ArgWorkflowFunc stubF5 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test5ArgWorkflowFunc.class); Assert.assertEquals("12345", WorkflowClient.execute(stubF5::func5, "1", 2, 3, 4, 5).get()); Test6ArgWorkflowFunc stubF6 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test6ArgWorkflowFunc.class); Assert.assertEquals("123456", WorkflowClient.execute(stubF6::func6, "1", 2, 3, 4, 5, 6).get()); TestNoArgsWorkflowProc stubP = testWorkflowRule.newWorkflowStubTimeoutOptions(TestNoArgsWorkflowProc.class); WorkflowClient.execute(stubP::proc).get(); Test1ArgWorkflowProc stubP1 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test1ArgWorkflowProc.class); WorkflowClient.execute(stubP1::proc1, "1").get(); Test2ArgWorkflowProc stubP2 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test2ArgWorkflowProc.class); WorkflowClient.execute(stubP2::proc2, "1", 2).get(); Test3ArgWorkflowProc stubP3 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test3ArgWorkflowProc.class); WorkflowClient.execute(stubP3::proc3, "1", 2, 3).get(); Test4ArgWorkflowProc stubP4 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test4ArgWorkflowProc.class); WorkflowClient.execute(stubP4::proc4, "1", 2, 3, 4).get(); Test5ArgWorkflowProc stubP5 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test5ArgWorkflowProc.class); WorkflowClient.execute(stubP5::proc5, "1", 2, 3, 4, 5).get(); Test6ArgWorkflowProc stubP6 = testWorkflowRule.newWorkflowStubTimeoutOptions(Test6ArgWorkflowProc.class); WorkflowClient.execute(stubP6::proc6, "1", 2, 3, 4, 5, 6).get(); Assert.assertEquals("proc", stubP.query()); Assert.assertEquals("1", stubP1.query()); Assert.assertEquals("12", stubP2.query()); Assert.assertEquals("123", stubP3.query()); Assert.assertEquals("1234", stubP4.query()); Assert.assertEquals("12345", stubP5.query()); Assert.assertEquals("123456", stubP6.query()); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.raptor; import com.facebook.presto.spi.BucketFunction; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.ConnectorSplit; import com.facebook.presto.spi.Node; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.connector.ConnectorNodePartitioningProvider; import com.facebook.presto.spi.connector.ConnectorPartitioningHandle; import com.facebook.presto.spi.connector.ConnectorTransactionHandle; import com.facebook.presto.spi.type.Type; import com.google.common.collect.ImmutableMap; import javax.inject.Inject; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.function.ToIntFunction; import static com.facebook.presto.raptor.util.Types.checkType; import static com.facebook.presto.spi.StandardErrorCode.NO_NODES_AVAILABLE; import static com.google.common.collect.Maps.uniqueIndex; import static java.util.Objects.requireNonNull; public class RaptorNodePartitioningProvider implements ConnectorNodePartitioningProvider { private final NodeSupplier nodeSupplier; @Inject public RaptorNodePartitioningProvider(NodeSupplier nodeSupplier) { this.nodeSupplier = requireNonNull(nodeSupplier, "nodeSupplier is null"); } @Override public Map<Integer, Node> getBucketToNode(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorPartitioningHandle partitioning) { RaptorPartitioningHandle handle = checkType(partitioning, RaptorPartitioningHandle.class, "distributionHandle"); Map<String, Node> nodesById = uniqueIndex(nodeSupplier.getWorkerNodes(), Node::getNodeIdentifier); ImmutableMap.Builder<Integer, Node> bucketToNode = ImmutableMap.builder(); for (Entry<Integer, String> entry : handle.getBucketToNode().entrySet()) { Node node = nodesById.get(entry.getValue()); if (node == null) { throw new PrestoException(NO_NODES_AVAILABLE, "Node for bucket is offline: " + entry.getValue()); } bucketToNode.put(entry.getKey(), node); } return bucketToNode.build(); } @Override public ToIntFunction<ConnectorSplit> getSplitBucketFunction(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorPartitioningHandle partitioning) { return value -> checkType(value, RaptorSplit.class, "value").getBucketNumber().getAsInt(); } @Override public BucketFunction getBucketFunction(ConnectorTransactionHandle transaction, ConnectorSession session, ConnectorPartitioningHandle partitioning, List<Type> partitionChannelTypes, int bucketCount) { return new RaptorBucketFunction(bucketCount, partitionChannelTypes); } }
/* * tapi-common,tapi-dsr,tapi-path-computation,tapi-eth,tapi-virtual-network,tapi-topology,tapi-notification,tapi-oam,tapi-photonic-media,tapi-connectivity API * tapi-common,tapi-dsr,tapi-path-computation,tapi-eth,tapi-virtual-network,tapi-topology,tapi-notification,tapi-oam,tapi-photonic-media,tapi-connectivity API generated from yang definitions * * OpenAPI spec version: 1.0 * * * NOTE: This class is auto generated by the swagger code generator program. * https://github.com/swagger-api/swagger-codegen.git * Do not edit the class manually. */ package io.swagger.model; import java.util.Objects; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonCreator; import io.swagger.annotations.ApiModel; import io.swagger.annotations.ApiModelProperty; import io.swagger.model.TapiPathComputationPathObjectiveFunction; import io.swagger.model.TapiPathComputationPathServiceEndPoint; import io.swagger.model.TapiPathComputationRoutingConstraint; import io.swagger.model.TapiPathComputationTopologyConstraint; import java.util.ArrayList; import java.util.List; import javax.validation.constraints.*; /** * TapiPathComputationComputep2ppathInput */ @javax.annotation.Generated(value = "io.swagger.codegen.languages.JavaJerseyServerCodegen", date = "2018-11-14T14:58:12.974+01:00") public class TapiPathComputationComputep2ppathInput { @JsonProperty("topology-constraint") private TapiPathComputationTopologyConstraint topologyConstraint = null; @JsonProperty("routing-constraint") private TapiPathComputationRoutingConstraint routingConstraint = null; @JsonProperty("objective-function") private TapiPathComputationPathObjectiveFunction objectiveFunction = null; @JsonProperty("sep") private List<TapiPathComputationPathServiceEndPoint> sep = null; public TapiPathComputationComputep2ppathInput topologyConstraint(TapiPathComputationTopologyConstraint topologyConstraint) { this.topologyConstraint = topologyConstraint; return this; } /** * none * @return topologyConstraint **/ @JsonProperty("topology-constraint") @ApiModelProperty(value = "none") public TapiPathComputationTopologyConstraint getTopologyConstraint() { return topologyConstraint; } public void setTopologyConstraint(TapiPathComputationTopologyConstraint topologyConstraint) { this.topologyConstraint = topologyConstraint; } public TapiPathComputationComputep2ppathInput routingConstraint(TapiPathComputationRoutingConstraint routingConstraint) { this.routingConstraint = routingConstraint; return this; } /** * none * @return routingConstraint **/ @JsonProperty("routing-constraint") @ApiModelProperty(value = "none") public TapiPathComputationRoutingConstraint getRoutingConstraint() { return routingConstraint; } public void setRoutingConstraint(TapiPathComputationRoutingConstraint routingConstraint) { this.routingConstraint = routingConstraint; } public TapiPathComputationComputep2ppathInput objectiveFunction(TapiPathComputationPathObjectiveFunction objectiveFunction) { this.objectiveFunction = objectiveFunction; return this; } /** * none * @return objectiveFunction **/ @JsonProperty("objective-function") @ApiModelProperty(value = "none") public TapiPathComputationPathObjectiveFunction getObjectiveFunction() { return objectiveFunction; } public void setObjectiveFunction(TapiPathComputationPathObjectiveFunction objectiveFunction) { this.objectiveFunction = objectiveFunction; } public TapiPathComputationComputep2ppathInput sep(List<TapiPathComputationPathServiceEndPoint> sep) { this.sep = sep; return this; } public TapiPathComputationComputep2ppathInput addSepItem(TapiPathComputationPathServiceEndPoint sepItem) { if (this.sep == null) { this.sep = new ArrayList<TapiPathComputationPathServiceEndPoint>(); } this.sep.add(sepItem); return this; } /** * none * @return sep **/ @JsonProperty("sep") @ApiModelProperty(value = "none") public List<TapiPathComputationPathServiceEndPoint> getSep() { return sep; } public void setSep(List<TapiPathComputationPathServiceEndPoint> sep) { this.sep = sep; } @Override public boolean equals(java.lang.Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } TapiPathComputationComputep2ppathInput tapiPathComputationComputep2ppathInput = (TapiPathComputationComputep2ppathInput) o; return Objects.equals(this.topologyConstraint, tapiPathComputationComputep2ppathInput.topologyConstraint) && Objects.equals(this.routingConstraint, tapiPathComputationComputep2ppathInput.routingConstraint) && Objects.equals(this.objectiveFunction, tapiPathComputationComputep2ppathInput.objectiveFunction) && Objects.equals(this.sep, tapiPathComputationComputep2ppathInput.sep); } @Override public int hashCode() { return Objects.hash(topologyConstraint, routingConstraint, objectiveFunction, sep); } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("class TapiPathComputationComputep2ppathInput {\n"); sb.append(" topologyConstraint: ").append(toIndentedString(topologyConstraint)).append("\n"); sb.append(" routingConstraint: ").append(toIndentedString(routingConstraint)).append("\n"); sb.append(" objectiveFunction: ").append(toIndentedString(objectiveFunction)).append("\n"); sb.append(" sep: ").append(toIndentedString(sep)).append("\n"); sb.append("}"); return sb.toString(); } /** * Convert the given object to string with each line indented by 4 spaces * (except the first line). */ private String toIndentedString(java.lang.Object o) { if (o == null) { return "null"; } return o.toString().replace("\n", "\n "); } }
/* * Copyright (C) 2012 ZXing authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.wavesplatform.wallet.ui.zxing.common.executor; import android.annotation.TargetApi; import android.os.AsyncTask; /** * On Honeycomb and later, {@link AsyncTask} returns to serial execution by default which is undesirable. * This calls Honeycomb-only APIs to request parallel execution. */ @TargetApi(11) public final class HoneycombAsyncTaskExecInterface implements AsyncTaskExecInterface { @Override public <T> void execute(AsyncTask<T,?,?> task, T... args) { task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR, args); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.contrib.elasticsearch; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.delete.DeleteIndexResponse; import org.elasticsearch.action.percolate.PercolateResponse; import org.elasticsearch.action.percolate.PercolateResponse.Match; import org.elasticsearch.client.transport.NoNodeAvailableException; import org.elasticsearch.index.query.TermQueryBuilder; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.datatorrent.lib.testbench.CollectorTestSink; /** * Test class for percolate operator */ public class ElasticSearchPercolateTest { private static final String INDEX_NAME = "mails"; private static final String DOCUMENT_TYPE = "emailText"; private static final String HOST_NAME = "localhost"; private static final int PORT = 9300; private static final String GITHUB_TOPIC = "onGithub"; private static final String MALHAR_TOPIC = "onMalhar"; private static final Logger logger = LoggerFactory.getLogger(ElasticSearchPercolateTest.class); private ElasticSearchPercolatorStore store; @Before public void setup() throws IOException { store = new ElasticSearchPercolatorStore(HOST_NAME, PORT); store.connect(); } @Test public void testPercolate() throws IOException { try{ registerPercolateQueries(); checkPercolateResponse(); } catch(NoNodeAvailableException e){ //This indicates that elasticsearch is not running on a particular machine. //Silently ignore in this case. } } /** * Register percolate queries on ElasticSearch * * @throws IOException * */ private void registerPercolateQueries() throws IOException { store.registerPercolateQuery(INDEX_NAME, GITHUB_TOPIC, new TermQueryBuilder("content", "github")); store.registerPercolateQuery(INDEX_NAME, MALHAR_TOPIC, new TermQueryBuilder("content", "malhar")); } /** * */ private void checkPercolateResponse() { ElasticSearchPercolatorOperator oper = new ElasticSearchPercolatorOperator(); oper.hostName = HOST_NAME; oper.port = PORT; oper.indexName = INDEX_NAME; oper.documentType = DOCUMENT_TYPE; oper.setup(null); String[] messages = { "{content:'This will match only with malhar'}", "{content:'This will match only with github'}", "{content:'This will match with both github and malhar'}", "{content:'This will not match with any of them'}" }; String[][] matches = { { MALHAR_TOPIC }, { GITHUB_TOPIC }, { GITHUB_TOPIC, MALHAR_TOPIC }, {} }; CollectorTestSink<PercolateResponse> sink = new CollectorTestSink<PercolateResponse>(); oper.outputPort.setSink((CollectorTestSink) sink); for (String message : messages) { oper.inputPort.process(message); } int i = 0; for (PercolateResponse response : sink.collectedTuples) { List<String> matchIds = new ArrayList<String>(); for (Match match : response.getMatches()) { matchIds.add(match.getId().toString()); } Collections.sort(matchIds); Assert.assertArrayEquals(matchIds.toArray(), matches[i]); i++; } } @After public void cleanup() throws IOException { try{ DeleteIndexResponse delete = store.client.admin().indices().delete(new DeleteIndexRequest(INDEX_NAME)).actionGet(); if (!delete.isAcknowledged()) { logger.error("Index wasn't deleted"); } store.disconnect(); } catch(NoNodeAvailableException e){ //This indicates that elasticsearch is not running on a particular machine. //Silently ignore in this case. } } }
/* * Copyright (c) 2018, Bepal * All rights reserved. * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * Neither the name of the University of California, Berkeley nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS "AS IS" AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package bepal.eosio.transaction.message; public class ByteMessageData implements MessageData { public byte[] Data; public ByteMessageData() { } public ByteMessageData(byte[] data) { Data = data; } @Override public byte[] toByte() { return Data; } @Override public void parse(byte[] data) { Data = data; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.store.jdbc; import javax.sql.DataSource; import java.util.Properties; import java.util.Set; import com.zaxxer.hikari.HikariConfig; import com.zaxxer.hikari.HikariDataSource; import org.apache.calcite.adapter.jdbc.JdbcSchema; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlDialectFactoryImpl; import org.apache.drill.common.AutoCloseables; import org.apache.drill.common.exceptions.UserException; import org.apache.drill.exec.ops.OptimizerRulesContext; import org.apache.drill.exec.server.DrillbitContext; import org.apache.drill.exec.store.AbstractStoragePlugin; import org.apache.drill.exec.store.SchemaConfig; import org.apache.drill.exec.store.security.UsernamePasswordCredentials; import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class JdbcStoragePlugin extends AbstractStoragePlugin { private static final Logger logger = LoggerFactory.getLogger(JdbcStoragePlugin.class); private final JdbcStorageConfig config; private final HikariDataSource dataSource; private final SqlDialect dialect; private final DrillJdbcConvention convention; public JdbcStoragePlugin(JdbcStorageConfig config, DrillbitContext context, String name) { super(context, name); this.config = config; this.dataSource = initDataSource(config); this.dialect = JdbcSchema.createDialect(SqlDialectFactoryImpl.INSTANCE, dataSource); this.convention = new DrillJdbcConvention(dialect, name, this); } @Override public void registerSchemas(SchemaConfig config, SchemaPlus parent) { JdbcCatalogSchema schema = new JdbcCatalogSchema(getName(), dataSource, dialect, convention, !this.config.areTableNamesCaseInsensitive()); SchemaPlus holder = parent.add(getName(), schema); schema.setHolder(holder); } @Override public JdbcStorageConfig getConfig() { return config; } @Override public boolean supportsRead() { return true; } public DataSource getDataSource() { return dataSource; } public SqlDialect getDialect() { return dialect; } @Override public Set<RelOptRule> getPhysicalOptimizerRules(OptimizerRulesContext context) { return convention.getRules(); } @Override public void close() { AutoCloseables.closeSilently(dataSource); } /** * Initializes {@link HikariDataSource} instance and configures it based on given * storage plugin configuration. * Basic parameters such as driver, url, user name and password are set using setters. * Other source parameters are set dynamically through the properties. See the list * of available Hikari properties: <a href="https://github.com/brettwooldridge/HikariCP">. * * @param config storage plugin config * @return Hikari data source instance * @throws UserException if unable to configure Hikari data source */ @VisibleForTesting static HikariDataSource initDataSource(JdbcStorageConfig config) { try { Properties properties = new Properties(); properties.putAll(config.getSourceParameters()); HikariConfig hikariConfig = new HikariConfig(properties); hikariConfig.setDriverClassName(config.getDriver()); hikariConfig.setJdbcUrl(config.getUrl()); UsernamePasswordCredentials credentials = config.getUsernamePasswordCredentials(); hikariConfig.setUsername(credentials.getUsername()); hikariConfig.setPassword(credentials.getPassword()); return new HikariDataSource(hikariConfig); } catch (RuntimeException e) { throw UserException.connectionError(e) .message("Unable to configure data source: %s", e.getMessage()) .build(logger); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.server.coordinator; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import io.druid.client.indexing.IndexingServiceClient; import io.druid.client.indexing.NoopIndexingServiceClient; import io.druid.common.config.JacksonConfigManager; import io.druid.java.util.common.Intervals; import io.druid.java.util.emitter.service.ServiceEmitter; import io.druid.server.coordinator.helper.DruidCoordinatorSegmentMerger; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.LinearShardSpec; import org.easymock.EasyMock; import org.junit.Assert; import org.junit.Test; import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicReference; public class DruidCoordinatorSegmentMergerTest { private static final long mergeBytesLimit = 100; private static final int mergeSegmentsLimit = 8; @Test public void testNoMerges() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of(), merge(segments) ); } @Test public void testMergeAtStart() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(90).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(0), segments.get(1)) ), merge(segments) ); } @Test public void testMergeAtEnd() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(20).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(2), segments.get(3)) ), merge(segments) ); } @Test public void testMergeInMiddle() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(10).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(20).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(1), segments.get(2)) ), merge(segments) ); } @Test public void testMergeNoncontiguous() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(10).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(10).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(10).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(0), segments.get(1), segments.get(2)) ), merge(segments) ); } @Test public void testMergeSeriesByteLimited() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(40).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(40).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(40).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(40).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("2").size(40).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(40).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(0), segments.get(1)), ImmutableList.of(segments.get(2), segments.get(3)), ImmutableList.of(segments.get(4), segments.get(5)) ), merge(segments) ); } @Test public void testMergeSeriesSegmentLimited() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-07/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-08/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-10/P1D")).version("2").size(1).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of( segments.get(0), segments.get(1), segments.get(2), segments.get(3), segments.get(4), segments.get(5), segments.get(6), segments.get(7) ), ImmutableList.of(segments.get(8), segments.get(9)) ), merge(segments) ); } @Test public void testOverlappingMergeWithBacktracking() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("2").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P4D")).version("2").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("4").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("3").size(20).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-07/P1D")).version("2").size(20).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(0), segments.get(1)), ImmutableList.of(segments.get(2), segments.get(3), segments.get(4), segments.get(5), segments.get(6)) ), merge(segments) ); } @Test public void testOverlappingMergeWithGapsAlignedStart() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P8D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("3").size(8).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(8).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("3").size(8).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(1), segments.get(0), segments.get(2)) ), merge(segments) ); } @Test public void testOverlappingMergeWithGapsNonalignedStart() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P8D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P1D")).version("3").size(8).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("3").size(8).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-09/P1D")).version("3").size(8).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(0), segments.get(1), segments.get(2)) ), merge(segments) ); } @Test public void testOverlappingMerge1() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of(), merge(segments) ); } @Test public void testOverlappingMerge2() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(15).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(2), segments.get(3), segments.get(4)) ), merge(segments) ); } @Test public void testOverlappingMerge3() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(1), segments.get(2), segments.get(4)) ), merge(segments) ); } @Test public void testOverlappingMerge4() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(1), segments.get(2), segments.get(3), segments.get(4)) ), merge(segments) ); } @Test public void testOverlappingMerge5() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of(), merge(segments) ); } @Test public void testOverlappingMerge6() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(25).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(2), segments.get(3), segments.get(4)) ), merge(segments) ); } @Test public void testOverlappingMerge7() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(120).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("4").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(2), segments.get(3), segments.get(4), segments.get(5)) ), merge(segments) ); } @Test public void testOverlappingMerge8() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-01/P1D")).version("2").size(80).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-02/P4D")).version("2").size(120).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-03/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-04/P1D")).version("1").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-05/P1D")).version("3").size(1).build(), DataSegment.builder().dataSource("foo").interval(Intervals.of("2012-01-06/P1D")).version("2").size(80).build() ); Assert.assertEquals(ImmutableList.of(ImmutableList.of(segments.get(4), segments.get(5))), merge(segments)); } @Test public void testMergeLinearShardSpecs() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") .interval(Intervals.of("2012-01-01/P1D")) .version("1") .shardSpec(new LinearShardSpec(1)) .build(), DataSegment.builder() .dataSource("foo") .interval(Intervals.of("2012-01-02/P1D")) .version("1") .shardSpec(new LinearShardSpec(7)) .build(), DataSegment.builder().dataSource("foo") .interval(Intervals.of("2012-01-03/P1D")) .version("1") .shardSpec(new LinearShardSpec(1500)) .build() ); Assert.assertEquals( ImmutableList.of(), merge(segments) ); } @Test public void testMergeMixedShardSpecs() { final List<DataSegment> segments = ImmutableList.of( DataSegment.builder() .dataSource("foo") .interval(Intervals.of("2012-01-01/P1D")) .version("1") .build(), DataSegment.builder() .dataSource("foo") .interval(Intervals.of("2012-01-02/P1D")) .version("1") .build(), DataSegment.builder().dataSource("foo") .interval(Intervals.of("2012-01-03/P1D")) .version("1") .shardSpec(new LinearShardSpec(1500)) .build(), DataSegment.builder().dataSource("foo") .interval(Intervals.of("2012-01-04/P1D")) .version("1") .build(), DataSegment.builder().dataSource("foo") .interval(Intervals.of("2012-01-05/P1D")) .version("1") .build() ); Assert.assertEquals( ImmutableList.of( ImmutableList.of(segments.get(0), segments.get(1)), ImmutableList.of(segments.get(3), segments.get(4)) ), merge(segments) ); } /** * Runs DruidCoordinatorSegmentMerger on a particular set of segments and returns the list of requested merges. */ private static List<List<DataSegment>> merge(final Collection<DataSegment> segments) { final JacksonConfigManager configManager = EasyMock.createMock(JacksonConfigManager.class); EasyMock.expect(configManager.watch(DatasourceWhitelist.CONFIG_KEY, DatasourceWhitelist.class)) .andReturn(new AtomicReference<DatasourceWhitelist>(null)).anyTimes(); EasyMock.replay(configManager); final List<List<DataSegment>> retVal = Lists.newArrayList(); final IndexingServiceClient indexingServiceClient = new NoopIndexingServiceClient() { @Override public void mergeSegments(List<DataSegment> segmentsToMerge) { retVal.add(segmentsToMerge); } }; final DruidCoordinatorSegmentMerger merger = new DruidCoordinatorSegmentMerger( indexingServiceClient, configManager ); final DruidCoordinatorRuntimeParams params = DruidCoordinatorRuntimeParams.newBuilder() .withAvailableSegments(ImmutableSet.copyOf(segments)) .withDynamicConfigs(CoordinatorDynamicConfig.builder().withMergeBytesLimit( mergeBytesLimit).withMergeSegmentsLimit(mergeSegmentsLimit).build()) .withEmitter(EasyMock.createMock(ServiceEmitter.class)) .build(); merger.run(params); return retVal; } }
package ru.job4j.crudservletwebapp.logic; import ru.job4j.crudservletwebapp.models.User; import ru.job4j.crudservletwebapp.persistent.DbStore; import ru.job4j.crudservletwebapp.persistent.Store; import java.util.List; public class ValidateService implements Validate { private final static ValidateService SERVICE = new ValidateService(); private final Store store = DbStore.getInstance(); private ValidateService() { } public static ValidateService getInstance() { return SERVICE; } @Override public boolean add(User user) { return this.store.add(user); } @Override public boolean update(User user) { return this.store.update(user); } @Override public boolean delete(int id) { return this.store.delete(id); } @Override public boolean uploadImg(User user) { return this.store.uploadImg(user); } @Override public List<User> findAll() { return this.store.findAll(); } @Override public User findById(int id) { return this.store.findById(id); } @Override public User isCredentional(String login, String password) { User result = null; List<User> list = this.store.findAll(); for (User u : list) { if (u.getLogin().equals(login) && u.getPassword().equals(password)) { result = u; break; } } return result; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.admin.remote; import java.io.DataInput; import java.io.DataOutput; import java.io.IOException; import org.apache.geode.DataSerializable; import org.apache.geode.DataSerializer; /** * Used to name an object in a region. This class is needed so that the console will not need to * load the user defined classes. */ public class RemoteObjectName implements DataSerializable { private static final long serialVersionUID = 5076319310507575418L; private String className; private String value; private int hashCode; public RemoteObjectName(Object name) { className = name.getClass().getName(); value = name.toString(); hashCode = name.hashCode(); } /** * This constructor is only for use by the DataSerializable mechanism */ public RemoteObjectName() {} @Override public boolean equals(Object o) { if (o == null) { return false; } if (o instanceof RemoteObjectName) { RemoteObjectName n = (RemoteObjectName) o; return (hashCode == n.hashCode) && className.equals(n.className) && value.equals(n.value); } else { // this should only happen on the server side when we are trying // to find the original object if (hashCode != o.hashCode()) { return false; } if (!className.equals(o.getClass().getName())) { return false; } return value.equals(o.toString()); } } @Override public int hashCode() { return hashCode; } @Override public String toString() { return className + " \"" + value + "\""; } public void toData(DataOutput out) throws IOException { DataSerializer.writeString(this.className, out); DataSerializer.writeString(this.value, out); out.writeInt(this.hashCode); } public void fromData(DataInput in) throws IOException, ClassNotFoundException { this.className = DataSerializer.readString(in); this.value = DataSerializer.readString(in); this.hashCode = in.readInt(); } }
/* Copyright 2014-2016 Intel Corporation Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package apple.foundation.enums; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.NUInt; @Generated public final class NSNumberFormatterPadPosition { @Generated @NUInt public static final long BeforePrefix = 0x0000000000000000L; @Generated @NUInt public static final long AfterPrefix = 0x0000000000000001L; @Generated @NUInt public static final long BeforeSuffix = 0x0000000000000002L; @Generated @NUInt public static final long AfterSuffix = 0x0000000000000003L; @Generated private NSNumberFormatterPadPosition() { } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openhubframework.openhub.component; import org.springframework.context.annotation.ComponentScan; import org.springframework.context.annotation.PropertySource; /** * Configuration for tests in "components" module. * * @author Petr Juza * @since 2.0 */ @ComponentScan(basePackages = {"org.openhubframework.openhub.component"}) @PropertySource(value = {"classpath:/config/application-test-default.properties"}) public class ComponentTestConfig { }
package com.hedera.services.bdd.spec.infrastructure.providers.ops.meta; /*- * ‌ * Hedera Services Test Clients * ​ * Copyright (C) 2018 - 2021 Hedera Hashgraph, LLC * ​ * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ‍ */ import com.hedera.services.bdd.spec.HapiSpecOperation; import com.hedera.services.bdd.spec.infrastructure.OpProvider; import com.hedera.services.bdd.spec.queries.QueryVerbs; import com.hedera.services.bdd.spec.queries.meta.HapiGetReceipt; import com.hedera.services.bdd.spec.queries.meta.HapiGetTxnRecord; import com.hedera.services.bdd.spec.transactions.TxnFactory; import com.hederahashgraph.api.proto.java.Query; import com.hederahashgraph.api.proto.java.TransactionID; import static com.hederahashgraph.api.proto.java.ResponseCodeEnum.*; import static com.hedera.services.bdd.spec.queries.QueryVerbs.getTxnRecord; import java.util.List; import java.util.Optional; import static java.util.Collections.EMPTY_LIST; public class RandomRecord implements OpProvider { private final TxnFactory txns; public RandomRecord(TxnFactory txns) { this.txns = txns; } @Override public List<HapiSpecOperation> suggestedInitializers() { return EMPTY_LIST; } @Override public Optional<HapiSpecOperation> get() { TransactionID txnId = txns.sampleRecentTxnId(); if (txnId == TransactionID.getDefaultInstance()) { return Optional.empty(); } else { HapiGetTxnRecord op = getTxnRecord(txnId) .hasCostAnswerPrecheckFrom(OK, RECORD_NOT_FOUND) .hasAnswerOnlyPrecheckFrom(OK, RECORD_NOT_FOUND); return Optional.of(op); } } }
package thosakwe.fray.server.shims; import org.antlr.v4.runtime.tree.ParseTree; import thosakwe.fray.interpreter.FrayInterpreter; import thosakwe.fray.lang.FrayDatum; import thosakwe.fray.lang.FrayDatumUtils; import thosakwe.fray.lang.FrayString; public class FrayHttpCookie extends FrayDatum { private String name, value, expires, domain; public FrayHttpCookie(ParseTree source, FrayInterpreter interpreter) { super(source, interpreter); initSymbols(); } private void initSymbols() { FrayDatumUtils.addField(this, "name", () -> FrayString.parse(getName()), (FrayDatum name) -> { setName(name.toString()); }); FrayDatumUtils.addField(this, "value", () -> FrayString.parse(getValue()), (FrayDatum value) -> { setValue(value.toString()); }); FrayDatumUtils.addField(this, "expires", () -> FrayString.parse(getName()), (FrayDatum expires) -> { setExpires(expires.toString()); }); FrayDatumUtils.addField(this, "domain", () -> FrayString.parse(getName()), (FrayDatum domain) -> { setDomain(domain.toString()); }); } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getValue() { return value; } public void setValue(String value) { this.value = value; } public String getExpires() { return expires; } public void setExpires(String expires) { this.expires = expires; } public String getDomain() { return domain; } public void setDomain(String domain) { this.domain = domain; } @Override public String toString() { return "[Instance of HttpCookie]"; } }
package com.sap.iot.azure.ref.integration.commons.mapping.token; import com.microsoft.azure.functions.HttpStatus; import com.sap.iot.azure.ref.integration.commons.context.InvocationContext; import com.sap.iot.azure.ref.integration.commons.context.InvocationContextTestUtil; import org.asynchttpclient.HttpResponseStatus; import org.asynchttpclient.filter.FilterContext; import org.asynchttpclient.filter.FilterException; import org.junit.BeforeClass; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnitRunner; import static org.mockito.Mockito.*; @RunWith(MockitoJUnitRunner.class) public class MappingResponseFilterTest { @Mock FilterContext mockContext; @Mock HttpResponseStatus mockResponseStatus; @BeforeClass public static void setupClass() { InvocationContextTestUtil.initInvocationContext(); } @Test public void testAuthorized() throws FilterException { doReturn(HttpStatus.ACCEPTED.value()).when(mockResponseStatus).getStatusCode(); doReturn(mockResponseStatus).when(mockContext).getResponseStatus(); MappingResponseFilter mappingResponseFilter = new MappingResponseFilter(); mappingResponseFilter.filter(mockContext); verify(mockResponseStatus, times(1)).getStatusCode(); } }
package gov.nasa.jpf.constraints.expressions; import com.microsoft.z3.*; import gov.nasa.jpf.constraints.api.ConstraintSolver; import gov.nasa.jpf.constraints.api.SolverContext; import gov.nasa.jpf.constraints.api.Valuation; import gov.nasa.jpf.constraints.api.Variable; import gov.nasa.jpf.constraints.expressions.functions.Function; import gov.nasa.jpf.constraints.smtlib.LoadingUtil; import gov.nasa.jpf.constraints.smtlibUtility.SMTProblem; import gov.nasa.jpf.constraints.smtlibUtility.parser.SMTLIBParserException; import gov.nasa.jpf.constraints.solvers.ConstraintSolverFactory; import gov.nasa.jpf.constraints.solvers.nativez3.NativeZ3Solver; import gov.nasa.jpf.constraints.types.ArrayType; import gov.nasa.jpf.constraints.types.BuiltinTypes; import org.testng.Assert; import org.testng.annotations.Test; import org.testng.asserts.Assertion; import java.io.IOException; import java.net.URISyntaxException; import java.util.*; import static gov.nasa.jpf.constraints.api.ConstraintSolver.Result.SAT; import static gov.nasa.jpf.constraints.api.ConstraintSolver.Result.UNSAT; import static org.testng.Assert.assertEquals; @Test public class ArrayTest { @Test public void arrayTest() { final NativeZ3Solver solver = new NativeZ3Solver(); final SMTProblem problem; List<Assertion> assertions = new ArrayList<>(); List<Variable> variables = new ArrayList<>(); variables.add(new Variable(BuiltinTypes.INTEGER, "x1")); assertions.add(new Assertion()); variables.add(new Variable(BuiltinTypes.INTEGER, "x2")); variables.add(new Variable(BuiltinTypes.INTEGER, "y1")); variables.add(new Variable(BuiltinTypes.INTEGER, "y2")); variables.add(new Variable(BuiltinTypes.INTEGER, "z1")); variables.add(new Variable(BuiltinTypes.INTEGER, "z2")); List<Function> functions = new ArrayList<>(); //functions.add(new Function()); } @Test public void runArrayTestFromFile() throws SMTLIBParserException, IOException, URISyntaxException { final SMTProblem problem = LoadingUtil.loadProblemFromResources("test_inputs/array_constraints.smt2"); final NativeZ3Solver solver = new NativeZ3Solver(); final Valuation val = new Valuation(); final ConstraintSolver.Result res = solver.solve(problem.getAllAssertionsAsConjunction(), val); /* assertEquals(res, SAT); assertEquals(val.getVariables().size(), 2); */ } @Test public void nativeZ3ArrayTest() { final Map<String, String> cfg = Collections.singletonMap("model", "true"); Context z3Context = new Context(cfg); IntSort intSort = z3Context.mkIntSort(); ArrayExpr arr1 = z3Context.mkArrayConst("array1", intSort, intSort); IntExpr x1 = (IntExpr) z3Context.mkConst("x1", z3Context.getIntSort()); IntExpr y1 = (IntExpr) z3Context.mkConst("y1", z3Context.getIntSort()); arr1 = z3Context.mkStore(arr1, x1, y1); ArrayExpr arr2 = z3Context.mkArrayConst("array2", intSort, intSort); IntExpr x2 = (IntExpr) z3Context.mkConst("x2", z3Context.getIntSort()); IntExpr y2 = (IntExpr) z3Context.mkConst("y2", z3Context.getIntSort()); arr2 = z3Context.mkStore(arr2, x2, y2); IntExpr z1 = (IntExpr) z3Context.mkSelect(arr1, z3Context.mkInt(3)); IntExpr z2 = (IntExpr) z3Context.mkSelect(arr2, z3Context.mkInt(3)); ArraySort intArray = z3Context.mkArraySort(intSort, intSort); ArrayExpr arr3 = (ArrayExpr) z3Context.mkConst("array3", intArray); //This equals to z3Context.mkArrayConst("array3", intSort, intSort); BoolExpr boolAssumption = z3Context.mkEq(z1, z2); Solver solver = z3Context.mkSolver(); Status solverStatus = solver.check(boolAssumption); System.out.println(solver.getModel().toString()); assertEquals(solverStatus, Status.SATISFIABLE); } @Test public void arrayTestJConstraints() { Properties conf = new Properties(); conf.setProperty("model", "true"); conf.setProperty("symbolic.dp", "NativeZ3"); ConstraintSolverFactory factory = new ConstraintSolverFactory(); ConstraintSolver solver = factory.createSolver(conf); SolverContext ctx = solver.createContext(); ArrayType arrayType = new ArrayType(BuiltinTypes.INTEGER, BuiltinTypes.INTEGER); Variable array1 = Variable.create(arrayType, "array1"); Variable x1 = Variable.create(BuiltinTypes.INTEGER, "x1"); Variable y1 = Variable.create(BuiltinTypes.INTEGER, "y1"); ArrayStoreExpression arrayStore1 = new ArrayStoreExpression(array1, x1, y1); Variable array2 = Variable.create(arrayType, "array2"); Variable x2 = Variable.create(BuiltinTypes.INTEGER, "x2"); Variable y2 = Variable.create(BuiltinTypes.INTEGER, "y2"); ArrayStoreExpression arrayStore2 = new ArrayStoreExpression(array2, x2, y2); /*ArraySelectExpression arraySelect1 = new ArraySelectExpression(array1, new Constant(BuiltinTypes.INTEGER, BigInteger.valueOf(3L))); ArraySelectExpression arraySelect2 = new ArraySelectExpression(array2, new Constant(BuiltinTypes.INTEGER, BigInteger.valueOf(3L)));*/ ArraySelectExpression arraySelect1 = new ArraySelectExpression(array1, y1); ArraySelectExpression arraySelect2 = new ArraySelectExpression(array2, y2); Valuation valuation = new Valuation(); //valuation.addEntry(new ValuationEntry<>(array1, arrayStore1)); //valuation.addEntry(new ValuationEntry<>(array2, arrayStore2)); NumericBooleanExpression numBo = new NumericBooleanExpression(arraySelect1, NumericComparator.EQ, arraySelect2); ctx.add(numBo); ConstraintSolver.Result result = solver.solve(numBo, valuation); System.out.println(arrayStore1); System.out.println(arraySelect1); System.out.println(numBo); System.out.println(result); System.out.println(valuation); Assert.assertEquals(solver.isSatisfiable(numBo), ConstraintSolver.Result.SAT); } @Test public void testParser() throws SMTLIBParserException, IOException, URISyntaxException { final SMTProblem problem = LoadingUtil.loadProblemFromResources("test_inputs/array_constraints.smt2"); final NativeZ3Solver solver = new NativeZ3Solver(); final Valuation val = new Valuation(); final ConstraintSolver.Result res = solver.solve(problem.getAllAssertionsAsConjunction(), val); assertEquals(res, SAT); } @Test public void testPointerSafe() throws SMTLIBParserException, IOException, URISyntaxException { final SMTProblem problem = LoadingUtil.loadProblemFromResources("test_inputs/pointer-safe-5.smt2"); final NativeZ3Solver solver = new NativeZ3Solver(); final Valuation val = new Valuation(); final ConstraintSolver.Result res = solver.solve(problem.getAllAssertionsAsConjunction(), val); assertEquals(res, UNSAT); } }
package com.cp.service.impl; import com.cp.service.WelComeService; import org.springframework.stereotype.Service; @Service public class WelComeServiceImpl implements WelComeService { @Override public String sayHello(String name) { System.out.println("欢迎你,"+name); return "success"; } }
// ---------------------------------------------------------------------------- // Copyright 2007-2014, GeoTelematic Solutions, Inc. // All rights reserved // ---------------------------------------------------------------------------- // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // ---------------------------------------------------------------------------- // Change History: // 2011/08/21 Martin D. Flynn // -Initial release // ---------------------------------------------------------------------------- package org.opengts.war.tools; import java.lang.*; import java.util.*; import org.opengts.util.*; public interface FileUploadHandler { /** *** Handle a File Upload request *** @param context The "context" of the File Upload *** @param name The MIME name *** @param contentType The MIME "content-type" value *** @param contentDisposition The MIME "content-disposition" value *** @param fileName The MIME upload file name *** @param fileBytes The MIME upload file bytes *** @return The response String **/ public String handleFileUpload( String context, RequestProperties reqState, String name, String contentType, String contentDisposition, String fileName, byte fileBytes[]); }
/* * The MIT License (MIT) * * Copyright (c) 2017 heimuheimu * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.heimuheimu.naivemonitor.monitor; import com.heimuheimu.naivemonitor.util.MonitorUtil; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicLong; /** * 操作执行信息监控器,可提供执行总次数、执行错误数、执行时间等信息。例如你需要在产品中对用户注册这个操作进行监控,可使用下面代码实现: * <pre> {@code * public void register(User user) { * long startTime = System.nanoTime(); * try { * ... //业务逻辑 * } catch (Exception e) { * executionMonitor.onError(-1); //对注册错误进行监控 * } finally { * executionMonitor.onExecuted(startTime); //对注册操作进行监控 * } * }}</pre> * * <p><strong>说明:</strong>{@code ExecutionMonitor} 类是线程安全的,可在多个线程中使用同一个实例。</p> * * @see com.heimuheimu.naivemonitor.monitor.factory.NaiveExecutionMonitorFactory * @see com.heimuheimu.naivemonitor.falcon.support.AbstractExecutionDataCollector * @author heimuheimu */ public class ExecutionMonitor { /** * 操作执行总次数 */ private final AtomicLong totalCount = new AtomicLong(); /** * 操作执行失败总次数 Map,Key 为操作失败错误码,Value 为该错误码对应的失败次数 */ private final ConcurrentHashMap<Integer, AtomicLong> errorCountMap = new ConcurrentHashMap<>(); /** * 每秒最大操作执行数 */ private volatile long peakTps = 0; /** * 当前秒操作执行数 */ private final AtomicLong currentTps = new AtomicLong(); /** * 当前秒开始时间戳 */ private volatile long currentTpsTimestamp = 0; /** * 操作最大执行时间,单位:纳秒 */ private volatile long maxExecutionTime = 0; /** * 操作总执行时间,单位:纳秒 */ private final AtomicLong totalExecutionTime = new AtomicLong(); /** * 对执行完成的操作进行监控,执行开始时间应该在操作开始前执行 {@link System#nanoTime()} 方法获取。 * * @param startNanoTime 操作执行开始时间,单位:纳秒 */ public void onExecuted(long startNanoTime) { long estimatedTime = System.nanoTime() - startNanoTime; //最大执行时间仅使用了 volatile 来保证可见性,并没有保证操作的原子性,极端情况下,真正的最大值可能会被覆盖,但做统计影响不大 if (estimatedTime > maxExecutionTime) { maxExecutionTime = estimatedTime; } MonitorUtil.safeAdd(totalCount, 1); //操作执行总次数 +1 MonitorUtil.safeAdd(totalExecutionTime, estimatedTime); //操作总执行时间增加 //计算每秒操作执行次数,非精确计算 long currentTimestamp = System.currentTimeMillis(); if (currentTimestamp - currentTpsTimestamp <= 1000) { currentTps.incrementAndGet(); } else { long currentTpsValue = currentTps.get(); currentTps.set(0); currentTpsTimestamp = currentTimestamp; if (currentTpsValue > peakTps) { peakTps = currentTpsValue; } } } /** * 对执行过程中发生的错误进行监控,失败错误码对应的错误次数 +1,可通过 {@link #getErrorCount(int)} 方法进行错误次数获取。 * * @param errorCode 操作失败错误码,由使用方自行定义 */ public void onError(int errorCode) { //操作执行失败总次数 +1 AtomicLong existedErrorCount = errorCountMap.get(errorCode); if (existedErrorCount == null) { existedErrorCount = new AtomicLong(); errorCountMap.put(errorCode, existedErrorCount); } MonitorUtil.safeAdd(existedErrorCount, 1); } /** * 获得操作执行总次数。 * * @return 操作执行总次数 */ public long getTotalCount() { return totalCount.get(); } /** * 获得错误码对应的操作执行失败总次数。 * * @param errorCode 错误码 * @return 错误码对应的操作执行失败总次数 */ public long getErrorCount(int errorCode) { AtomicLong errorCount = errorCountMap.get(errorCode); if (errorCount != null) { return errorCount.get(); } else { return 0; } } /** * 获得每秒最大操作执行次数。 * * @return 每秒最大操作执行数 */ public long getPeakTps() { return peakTps; } /** * 获得操作执行成功的最大执行时间,单位:纳秒。 * * @return 操作执行成功的最大执行时间,单位:纳秒 */ public long getMaxExecutionTime() { return maxExecutionTime; } /** * 获得操作总执行时间,单位:纳秒。 * * @return 操作总执行时间,单位:纳秒 */ public long getTotalExecutionTime() { return totalExecutionTime.get(); } /** * 重置操作最大执行时间,单位:纳秒。 */ public void resetMaxExecutionTime() { maxExecutionTime = 0; } /** * 重置每秒最大操作执行次数。 */ public void resetPeakTps() { peakTps = 0; } @Override public String toString() { return "ExecutionMonitor{" + "totalCount=" + totalCount + ", errorCountMap=" + errorCountMap + ", peakTps=" + peakTps + ", currentTps=" + currentTps + ", currentTpsTimestamp=" + currentTpsTimestamp + ", maxExecutionTime=" + maxExecutionTime + ", totalExecutionTime=" + totalExecutionTime + '}'; } }
package edu.harvard.iq.dataverse.search.query; import com.google.common.base.Preconditions; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; /** * Class indicating what dvObjects will be returned from search * * @author madryk */ public class SearchForTypes { private Set<SearchObjectType> types = new HashSet<>(); // -------------------- CONSTRUCTORS -------------------- private SearchForTypes(Set<SearchObjectType> types) { Preconditions.checkArgument(types.size() > 0, "At least one dvObject type is required"); this.types = types; } // -------------------- GETTERS -------------------- public Set<SearchObjectType> getTypes() { return types; } // -------------------- LOGIC -------------------- public boolean contains(SearchObjectType type) { return types.contains(type); } public boolean containsOnly(SearchObjectType type) { return types.size() == 1 && types.contains(type); } /** * Returns new {@link SearchForTypes} object with * either: * <p> * additional type if original {@link SearchForTypes} * does not contain it. * <p> * removed type if original {@link SearchForTypes} does * contain it. * <p> * Method do not modify original {@link SearchForTypes} */ public SearchForTypes toggleType(SearchObjectType type) { Set<SearchObjectType> newTypes = new HashSet<>(types); if (newTypes.contains(type)) { newTypes.remove(type); } else { newTypes.add(type); } return new SearchForTypes(newTypes); } /** * Returns {@link SearchForTypes} with assigned dvObject types according * to the given types */ public static SearchForTypes byTypes(List<SearchObjectType> types) { return new SearchForTypes(new HashSet<>(types)); } /** * Returns {@link SearchForTypes} with assigned dvObject types according * to the given types */ public static SearchForTypes byTypes(SearchObjectType ... types) { return byTypes(Arrays.asList(types)); } /** * Returns {@link SearchForTypes} with assigned all possible dvObject types */ public static SearchForTypes all() { return byTypes(SearchObjectType.values()); } }
/* * * * Copyright 2014 Orient Technologies LTD (info(at)orientechnologies.com) * * * * Licensed under the Apache License, Version 2.0 (the "License"); * * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, software * * distributed under the License is distributed on an "AS IS" BASIS, * * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * * limitations under the License. * * * * For more information: http://www.orientechnologies.com * */ package com.orientechnologies.orient.core.command; import com.orientechnologies.orient.core.record.ORecord; import com.orientechnologies.orient.core.record.impl.ODocument; /** * Command predicate to be evaluated against a record and a context. * * @author Luca Garulli (l.garulli--at--orientechnologies.com) * */ public interface OCommandPredicate { /** * Evaluates the predicate. * * @param iRecord * Target record * @param iCurrentResult TODO * @param iContext * Context of execution * @return The result of predicate */ public Object evaluate(final ORecord iRecord, ODocument iCurrentResult, final OCommandContext iContext); }
package com.wcc.dataextract; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @Data @Builder @NoArgsConstructor @AllArgsConstructor public class Contactjobinfo { private String guid; private String companyname; private String companypronunce; private String department; private String jobtitle; private String recognizesource; private Integer fieldorder; }
/**************************************************************** * Licensed to the Apache Software Foundation (ASF) under one * * or more contributor license agreements. See the NOTICE file * * distributed with this work for additional information * * regarding copyright ownership. The ASF licenses this file * * to you under the Apache License, Version 2.0 (the * * "License"); you may not use this file except in compliance * * with the License. You may obtain a copy of the License at * * * * http://www.apache.org/licenses/LICENSE-2.0 * * * * Unless required by applicable law or agreed to in writing, * * software distributed under the License is distributed on an * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * * KIND, either express or implied. See the License for the * * specific language governing permissions and limitations * * under the License. * ****************************************************************/ package org.apache.james.jspf.terms; import org.apache.james.jspf.core.Inet6Util; /** * This class represent the ip6 mechanism * */ public class IP6Mechanism extends IP4Mechanism { /** * ABNF: IP6 = "ip6" ":" ip6-network [ ip6-cidr-length ] */ public static final String REGEX = "[iI][pP][6]" + "\\:([0-9A-Fa-f\\:\\.]+)" + "(?:" + IP6_CIDR_LENGTH_REGEX + ")?"; /** * @see org.apache.james.jspf.terms.IP4Mechanism#isValidAddress(String) */ protected boolean isValidAddress(String ipString) { return Inet6Util.isValidIP6Address(ipString); } /** * @see org.apache.james.jspf.terms.IP4Mechanism#getMaxCidr() */ protected int getMaxCidr() { return 128; } /** * @see java.lang.Object#toString() */ public String toString() { if (getIp().getMaskLength() == getMaxCidr()) { return "ip6:"+getIp().getIPAddress(); } else { return "ip6:"+getIp().getIPAddress()+"/"+getIp().getMaskLength(); } } }
package com.bayraktar.graduationproject.springboot.service.entityservice; import com.bayraktar.graduationproject.springboot.dao.UserDao; import com.bayraktar.graduationproject.springboot.entity.User; import com.bayraktar.graduationproject.springboot.exception.NotFoundException; import com.bayraktar.graduationproject.springboot.service.entityservice.baseentityservice.BaseEntityService; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Service; import java.util.List; @Service @Slf4j public class UserEntityService extends BaseEntityService<User, UserDao> { public UserEntityService(UserDao dao) { super(dao); } public List<User> findAllUsers(){ return getDao().findAll(); } public User findUserById(Long id){ return getDao().findById(id) .orElseThrow(() -> new NotFoundException("UserEntityService.findUserById method returned null. User with id:" + id + " not found.")); } public User findUserByIdentificationNumber(String id) { return getDao().findByIdentificationNumber(id) .orElseThrow(() -> new NotFoundException("UserEntityService.findUserByIdentificationNumber method returned null. User with id:" + id + " not found.")); } public Integer findCreditScoreById(Long id) { return findUserById(id).getCreditScore(); } public User saveUser(User user){ return getDao().save(user); } public User updateUser(User user){ findUserById(user.getId()); return getDao().save(user); } public int deleteUserById(Long id){ return getDao().deleteUserById(id); } }
/* * Symphony - A modern community (forum/BBS/SNS/blog) platform written in Java. * Copyright (C) 2012-present, b3log.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ package org.b3log.symphony.model; import org.apache.commons.lang.StringUtils; import org.json.JSONObject; /** * This class defines all article model relevant keys. * * @author <a href="http://88250.b3log.org">Liang Ding</a> * @version 1.34.0.1, Apr 9, 2019 * @since 0.2.0 */ public final class Article { /** * Article. */ public static final String ARTICLE = "article"; /** * Articles. */ public static final String ARTICLES = "articles"; /** * Key of article title. */ public static final String ARTICLE_TITLE = "articleTitle"; /** * Key of article content. */ public static final String ARTICLE_CONTENT = "articleContent"; /** * Key of article reward content. */ public static final String ARTICLE_REWARD_CONTENT = "articleRewardContent"; /** * Key of article reward point. */ public static final String ARTICLE_REWARD_POINT = "articleRewardPoint"; /** * Key of article tags. */ public static final String ARTICLE_TAGS = "articleTags"; /** * Key of article author id. */ public static final String ARTICLE_AUTHOR_ID = "articleAuthorId"; /** * Key of article comment count. */ public static final String ARTICLE_COMMENT_CNT = "articleCommentCount"; /** * Key of article view count. */ public static final String ARTICLE_VIEW_CNT = "articleViewCount"; /** * Key of article permalink. */ public static final String ARTICLE_PERMALINK = "articlePermalink"; /** * Key of article create time. */ public static final String ARTICLE_CREATE_TIME = "articleCreateTime"; /** * Key of article create time str. */ public static final String ARTICLE_CREATE_TIME_STR = "articleCreateTimeStr"; /** * Key of article update time. */ public static final String ARTICLE_UPDATE_TIME = "articleUpdateTime"; /** * Key of article update time str. */ public static final String ARTICLE_UPDATE_TIME_STR = "articleUpdateTimeStr"; /** * Key of article latest comment time. */ public static final String ARTICLE_LATEST_CMT_TIME = "articleLatestCmtTime"; /** * Key of article latest comment time str. */ public static final String ARTICLE_LATEST_CMT_TIME_STR = "articleLatestCmtTimeStr"; /** * Key of article latest commenter name. */ public static final String ARTICLE_LATEST_CMTER_NAME = "articleLatestCmterName"; /** * Key of article random double value. */ public static final String ARTICLE_RANDOM_DOUBLE = "articleRandomDouble"; /** * Key of article commentable. */ public static final String ARTICLE_COMMENTABLE = "articleCommentable"; /** * Key of article editor type. */ public static final String ARTICLE_EDITOR_TYPE = "articleEditorType"; /** * Key of article status. */ public static final String ARTICLE_STATUS = "articleStatus"; /** * Key of article type. */ public static final String ARTICLE_TYPE = "articleType"; /** * Key of article thank count. */ public static final String ARTICLE_THANK_CNT = "articleThankCnt"; /** * Key of article good count. */ public static final String ARTICLE_GOOD_CNT = "articleGoodCnt"; /** * Key of article bad count. */ public static final String ARTICLE_BAD_CNT = "articleBadCnt"; /** * Key of article collection count. */ public static final String ARTICLE_COLLECT_CNT = "articleCollectCnt"; /** * Key of article watch count. */ public static final String ARTICLE_WATCH_CNT = "articleWatchCnt"; /** * Key of reddit score. */ public static final String REDDIT_SCORE = "redditScore"; /** * Key of article city. */ public static final String ARTICLE_CITY = "articleCity"; /** * Key of article IP. */ public static final String ARTICLE_IP = "articleIP"; /** * Key of article UA. */ public static final String ARTICLE_UA = "articleUA"; /** * Key of article stick. */ public static final String ARTICLE_STICK = "articleStick"; /** * Key of article anonymous. */ public static final String ARTICLE_ANONYMOUS = "articleAnonymous"; /** * Key of article perfect. */ public static final String ARTICLE_PERFECT = "articlePerfect"; /** * Key of article anonymous view. */ public static final String ARTICLE_ANONYMOUS_VIEW = "articleAnonymousView"; /** * Key of article audio URL. */ public static final String ARTICLE_AUDIO_URL = "articleAudioURL"; /** * Key of article qna offer point. https://github.com/b3log/symphony/issues/486 */ public static final String ARTICLE_QNA_OFFER_POINT = "articleQnAOfferPoint"; /** * Key of article push order. https://github.com/b3log/symphony/issues/537 */ public static final String ARTICLE_PUSH_ORDER = "articlePushOrder"; /** * Key of article image1 URL. https://github.com/b3log/symphony/issues/705 */ public static final String ARTICLE_IMG1_URL = "articleImg1URL"; //// Transient //// /** * Key of article revision count. */ public static final String ARTICLE_REVISION_COUNT = "articleRevisionCount"; /** * Key of article latest comment. */ public static final String ARTICLE_T_LATEST_CMT = "articleLatestCmt"; /** * Key of previous article. */ public static final String ARTICLE_T_PREVIOUS = "articlePrevious"; /** * Key of next article. */ public static final String ARTICLE_T_NEXT = "articleNext"; /** * Key of article tag objects. */ public static final String ARTICLE_T_TAG_OBJS = "articleTagObjs"; /** * Key of article vote. */ public static final String ARTICLE_T_VOTE = "articleVote"; /** * Key of article stick flag. */ public static final String ARTICLE_T_IS_STICK = "articleIsStick"; /** * Key of article stick remains. */ public static final String ARTICLE_T_STICK_REMAINS = "articleStickRemains"; /** * Key of article preview content. */ public static final String ARTICLE_T_PREVIEW_CONTENT = "articlePreviewContent"; /** * Key of article thumbnail URL. */ public static final String ARTICLE_T_THUMBNAIL_URL = "articleThumbnailURL"; /** * Key of article view count display format. */ public static final String ARTICLE_T_VIEW_CNT_DISPLAY_FORMAT = "articleViewCntDisplayFormat"; /** * Key of article id. */ public static final String ARTICLE_T_ID = "articleId"; /** * Key of article ids. */ public static final String ARTICLE_T_IDS = "articleIds"; /** * Key of article author. */ public static final String ARTICLE_T_AUTHOR = "articleAuthor"; /** * Key of article author thumbnail URL. */ public static final String ARTICLE_T_AUTHOR_THUMBNAIL_URL = "articleAuthorThumbnailURL"; /** * Key of article author name. */ public static final String ARTICLE_T_AUTHOR_NAME = "articleAuthorName"; /** * Key of article author URL. */ public static final String ARTICLE_T_AUTHOR_URL = "articleAuthorURL"; /** * Key of article author intro. */ public static final String ARTICLE_T_AUTHOR_INTRO = "articleAuthorIntro"; /** * Key of article comments. */ public static final String ARTICLE_T_COMMENTS = "articleComments"; /** * Key of article nice comments. */ public static final String ARTICLE_T_NICE_COMMENTS = "articleNiceComments"; /** * Key of article offered (accepted) comment(answer). */ public static final String ARTICLE_T_OFFERED_COMMENT = "articleOfferedComment"; /** * Key of article participants. */ public static final String ARTICLE_T_PARTICIPANTS = "articleParticipants"; /** * Key of article participant name. */ public static final String ARTICLE_T_PARTICIPANT_NAME = "articleParticipantName"; /** * Key of article participant thumbnail URL. */ public static final String ARTICLE_T_PARTICIPANT_THUMBNAIL_URL = "articleParticipantThumbnailURL"; /** * Key of article participant URL. */ public static final String ARTICLE_T_PARTICIPANT_URL = "articleParticipantURL"; /** * Key of article title with Emoj. */ public static final String ARTICLE_T_TITLE_EMOJI = "articleTitleEmoj"; /** * Key of article title with Emoji unicode. */ public static final String ARTICLE_T_TITLE_EMOJI_UNICODE = "articleTitleEmojUnicode"; /** * Key of article heat. */ public static final String ARTICLE_T_HEAT = "articleHeat"; /** * Key of article ToC. */ public static final String ARTICLE_T_TOC = "articleToC"; /** * Key of article original content. */ public static final String ARTICLE_T_ORIGINAL_CONTENT = "articleOriginalContent"; /** * Key of flag of notifying followers. */ public static final String ARTICLE_T_NOTIFY_FOLLOWERS = "articleNotifyFollowers"; // Anonymous constants /** * Article anonymous - public. */ public static final int ARTICLE_ANONYMOUS_C_PUBLIC = 0; /** * Article anonymous - anonymous. */ public static final int ARTICLE_ANONYMOUS_C_ANONYMOUS = 1; // Perfect constants /** * Article perfect - not perfect. */ public static final int ARTICLE_PERFECT_C_NOT_PERFECT = 0; /** * Article perfect - perfect. */ public static final int ARTICLE_PERFECT_C_PERFECT = 1; // Anonymous view constants /** * Article anonymous view - use global. */ public static final int ARTICLE_ANONYMOUS_VIEW_C_USE_GLOBAL = 0; /** * Article anonymous view - not allow. */ public static final int ARTICLE_ANONYMOUS_VIEW_C_NOT_ALLOW = 1; /** * Article anonymous view - allow. */ public static final int ARTICLE_ANONYMOUS_VIEW_C_ALLOW = 2; // Status constants /** * Article status - valid. */ public static final int ARTICLE_STATUS_C_VALID = 0; /** * Article status - invalid. */ public static final int ARTICLE_STATUS_C_INVALID = 1; /** * Article status - locked. */ public static final int ARTICLE_STATUS_C_LOCKED = 2; // Type constants /** * Article type - normal. */ public static final int ARTICLE_TYPE_C_NORMAL = 0; /** * Article type - discussion. */ public static final int ARTICLE_TYPE_C_DISCUSSION = 1; /** * Article type - city broadcast. */ public static final int ARTICLE_TYPE_C_CITY_BROADCAST = 2; /** * Article type - <a href="https://hacpai.com/article/1441942422856">thought</a>. */ public static final int ARTICLE_TYPE_C_THOUGHT = 3; /** * Article type - <a href="https://github.com/b3log/symphony/issues/486">QnA</a>. */ public static final int ARTICLE_TYPE_C_QNA = 5; /** * Checks the specified article1 is different from the specified article2. * * @param a1 the specified article1 * @param a2 the specified article2 * @return {@code true} if they are different, otherwise returns {@code false} */ public static boolean isDifferent(final JSONObject a1, final JSONObject a2) { final String title1 = a1.optString(Article.ARTICLE_TITLE); final String title2 = a2.optString(Article.ARTICLE_TITLE); if (!StringUtils.equalsIgnoreCase(title1, title2)) { return true; } final String tags1 = a1.optString(Article.ARTICLE_TAGS); final String tags2 = a2.optString(Article.ARTICLE_TAGS); if (!StringUtils.equalsIgnoreCase(tags1, tags2)) { return true; } final String content1 = a1.optString(Article.ARTICLE_CONTENT); final String content2 = a2.optString(Article.ARTICLE_CONTENT); if (!StringUtils.equalsIgnoreCase(content1, content2)) { return true; } return false; } /** * Checks the specified article type is whether invalid. * * @param articleType the specified article type * @return {@code true} if it is invalid, otherwise returns {@code false} */ public static boolean isInvalidArticleType(final int articleType) { return articleType < 0 || articleType > Article.ARTICLE_TYPE_C_QNA; } /** * Private constructor. */ private Article() { } }
package com.atguigu.gmall.sms.service; import com.baomidou.mybatisplus.extension.service.IService; import com.atguigu.gmall.common.bean.PageResultVo; import com.atguigu.gmall.common.bean.PageParamVo; import com.atguigu.gmall.sms.entity.SeckillPromotionEntity; import java.util.Map; /** * 秒杀活动 * * @author kunkun * @email kunkun@atguigu.com * @date 2020-12-15 00:11:57 */ public interface SeckillPromotionService extends IService<SeckillPromotionEntity> { PageResultVo queryPage(PageParamVo paramVo); }
/* * IdTbl.java * * Created on December 5, 2006, 4:37 PM */ package idmapmaker; import java.awt.Color; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.util.logging.Level; import java.util.logging.Logger; /** * Class to read and store data from id.tbl. * <p> * File description (from inferis.org): * <blockquote> * This file is a sort of "hit-test" matrix. With the data contained in this * file, you can easily find out which pixel on the map belongs to which * province. * <p> * This file is made up of two parts: offsets and data. * <p> * The <b>offsets</b> are a list of 7296 4-byte integers, which represent an * offset into the file pointing to the datablocks. Since there are 7296 * offsets, this means that each datablock contains information about each * <i>scanline</i> on the map (since the map is 18944x7296 pixels). * <br> * The offsets are relative into the file: offset 0 does not mean the beginning * of the file, but the first byte right after the offsets are done plus 4 bytes. * <pre> * # index is the index of the line you want to read (0-7295) * relative_offset = read_file( index ) * real_offset = 4 * ( map_height + 1 + relative_offset ) * </pre> * <p> * Now we know the offset of the datablock for each line. * Each of these lines contains a number of smaller blocks of 6 bytes each. * <br> * A small block represents a <i>span</i> in the line. It contains a start X * coordinate, an end X coordinate and an province Id. * <p> * The blocks are formatted like this: * <table border="1" cellpadding="2" cellspacing="0"> * <tr> * <td>bytes 1 & 2</td> * <td>bytes 3 & 4</td> * <td>bytes 5 & 6</td> * </tr> * <tr> * <td>start X value</td> * <td>province ID</td> * <td>end X value</td> * </tr> * </table> * <p> * So, each line contains a number of spans. All spans combines form a whole * line: the first block starts with start-x value 0, the last block has and * end-x value of 18944 (the map size). So you know when to stop reading when * the end-x value is equal or greater than the map size. The block after the * last block (in a line) is the first block of the next line. * </blockquote> * @author Michael Myers */ public class IdTbl { private static final int MAP_WIDTH = 18944; private static final int MAP_HEIGHT = 7296; // private static final int OFFSET_END = MAP_HEIGHT*4; // 4 bytes per int private final ProvinceData province; private BufferedImage image; private final ByteBuffer file; private static final Color oceanColor = new Color(111, 168, 223); // private static final Color landColor = new Color(255, 243, 200); private static final Color ptiColor = Color.BLACK; public IdTbl(String filename, ProvinceData data) { this(readRawFile(filename), data); } public IdTbl(final byte[] file, ProvinceData data) { this.file = ByteBuffer.wrap(file); this.file.order(ByteOrder.LITTLE_ENDIAN); province = data; } private static byte[] readRawFile(String filename) { FileInputStream stream = null; try { File file = new File(filename); byte[] buf = new byte[(int) file.length()]; stream = new FileInputStream(file); if (stream.read(buf) != buf.length) { System.err.println("???"); } stream.close(); return buf; } catch (FileNotFoundException ex) { Logger.getLogger(IdTbl.class.getName()).log(Level.SEVERE, null, ex); } catch (IOException ex) { Logger.getLogger(IdTbl.class.getName()).log(Level.SEVERE, null, ex); } finally { try { stream.close(); } catch (IOException ex) { Logger.getLogger(IdTbl.class.getName()).log(Level.SEVERE, null, ex); } } return null; } public int getId(int x, int y) { int relativeOffset = file.getInt(y*4); int realOffset = 4 * (MAP_HEIGHT + 1 + relativeOffset); // Now read the line until we come to the right x int endOffset = 4 * (MAP_HEIGHT + 1 + file.getInt((y+1)*4)); if (realOffset+1 > file.capacity()) return -1; short lastX = file.getShort(realOffset); realOffset += 2; for (; realOffset < endOffset; realOffset += 4) { short startX = lastX; if (startX > x) continue; lastX = file.getShort(realOffset + 2); if (lastX > x) { // found what we want. return (int) file.getShort(realOffset); } } return -1; } private final BufferedImage draw() { final BufferedImage img = new BufferedImage(MAP_WIDTH, MAP_HEIGHT, BufferedImage.TYPE_BYTE_INDEXED); final Graphics2D g = img.createGraphics(); g.setColor(Color.WHITE); g.fillRect(0, 0, MAP_WIDTH, MAP_HEIGHT); g.setColor(Color.BLACK); // final int end = file.getInt(0); for (int y = 1; y < MAP_HEIGHT; y++) { int relativeOffset = file.getInt(y*4); int realOffset = 4 * (MAP_HEIGHT + 1 + relativeOffset); // System.out.println("relativeOffset = " + relativeOffset); // System.out.println("realOffset = " + realOffset); short x = file.getShort(realOffset); realOffset += 2; while (true) { short startX = x; short id = file.getShort(realOffset); x = file.getShort(realOffset + 2); if (province.isPTI(id)) { g.setColor(ptiColor); g.drawLine(startX, y, x, y); } else if (!province.isLand(id)) { g.setColor(oceanColor); g.drawLine(startX, y, x, y); } /*else { g.setColor(landColor); g.drawLine(startX, y, endX, y); }*/ g.setColor(Color.BLACK); // borders g.drawLine(startX, y, startX+1, y); g.drawLine(x-1, y, x, y); if (x >= MAP_WIDTH) { break; } realOffset += 4; } } g.dispose(); return img; } public BufferedImage getImage() { if (image == null) image = draw(); return image; } public ProvinceData getProvinceData() { return province; } }
package xyz.sidetrip.banutil.commands.wizard; import sx.blah.discord.api.events.EventSubscriber; import sx.blah.discord.handle.impl.events.guild.channel.message.MessageReceivedEvent; import sx.blah.discord.handle.obj.IChannel; import sx.blah.discord.handle.obj.IMessage; import sx.blah.discord.handle.obj.IUser; import xyz.sidetrip.banutil.BanUtil; import xyz.sidetrip.banutil.UtilDue; import xyz.sidetrip.banutil.events.WizardEndEvent; /* * A simple wizzard to make long commands easy for new users. * Needs a link to what command handler it is for. * Needs an expire time. * Needs to block commands for user while in wizzard. * Needs to have target for it's answers. */ public class InputWizard { private final WizardQuestion[] questions; private final IUser target; private final IChannel channel; private String[] answers; private int questionNumber = 0; public InputWizard(IChannel channel, IUser target, WizardQuestion[] questions) { this.questions = questions; this.target = target; this.channel = channel; this.answers = new String[questions.length]; askQuestion(); } public String[] getAnswers() { return answers; } private void askQuestion() { if (questionNumber >= questions.length) { endWizzard(); return; } UtilDue.sendMessage(channel, questions[questionNumber].getQuestion()); } private void endWizzard() { BanUtil.getClient().getDispatcher().dispatch(new WizardEndEvent(this)); BanUtil.getClient().getDispatcher().unregisterListener(this); } @EventSubscriber public void readInput(MessageReceivedEvent event) { IMessage message = event.getMessage(); String content = message.getContent(); if (message.getAuthor() == target && message.getChannel() == channel) { String answer = content; if (questions[questionNumber].validAnswer(answer)) addAnswer(answer); else invalidAnswer(answer); } } private void invalidAnswer(String answer) { UtilDue.sendMessage(channel, ":bangbang:**" + questions[questionNumber].getError(answer)); askQuestion(); } private void addAnswer(String answer) { answers[questionNumber++] = answer; askQuestion(); } public IChannel getChannel() { return channel; } public IUser getTarget() { return target; } }
package com.camunda.demo.springboot.readytoreceive; import java.util.HashMap; import java.util.Map; import org.camunda.bpm.engine.delegate.DelegateExecution; import org.camunda.bpm.engine.delegate.JavaDelegate; import org.springframework.stereotype.Component; @Component public class ForwardResponseAdapter implements JavaDelegate { @Override public void execute(DelegateExecution ctx) throws Exception { String uuid = (String) ctx.getVariable("targetCorrelationId"); String messageName = (String) ctx.getVariable("targetMessage"); Map<String, Object> variables = new HashMap<String, Object>(ctx.getVariables()); variables.remove("targetCorrelationId"); variables.remove("targetMessage"); ctx.getProcessEngineServices().getRuntimeService().createMessageCorrelation(messageName) // .processInstanceVariableEquals("correlationId", uuid) // .setVariables(variables) // .correlateWithResult(); } }
package com.github.scaronthesky.eternalwinterwars.model.units.actionpointbehaviours; public interface ActionPointBehaviour { public int spendActionPoints(int points); public int gainActionPoints(int points); }
/* * Copyright 2014 Grow Bit * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gwt.http.client; public interface RequestProgress { boolean isLengthComputable(); Number loaded(); Number total(); }
import java.util.HashMap; import java.util.Scanner; public class Solution { public static void main(String[] args) { Scanner in = new Scanner(System.in); in.next(); String genomeSequence = in.next(); if (genomeSequence.length() % 4 != 0) { System.out.println("==="); // Invalid sequence return; } HashMap<Character, Integer> nucleotideCnt = new HashMap<>(); int countPerNucleotide = genomeSequence.length() / 4; for (char nucleotide : "AGCT".toCharArray()) { nucleotideCnt.put(nucleotide, countPerNucleotide); // Init count } for (char ch : genomeSequence.toCharArray()) { if (ch != '?') { nucleotideCnt.compute(ch, (_k, _v) -> _v - 1); } } for (char nucleotide : "AGCT".toCharArray()) { int countLeft = nucleotideCnt.get(nucleotide); if (countLeft < 0) { System.out.println("==="); // Invalid sequence return; } if (countLeft == 0) { nucleotideCnt.remove(nucleotide); } } for (char ch : genomeSequence.toCharArray()) { if (ch == '?') { char chosenNucleotide = (Character) nucleotideCnt.keySet().toArray()[0]; System.out.print(chosenNucleotide); nucleotideCnt.compute(chosenNucleotide, (_k, _v) -> (_v == 1) ? null : _v - 1); } else { System.out.print(ch); } } } }
/******************************************************************************* * Copyright (c) 2000, 2013 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.jdt.internal.compiler.batch; import java.io.File; import java.util.ArrayList; @SuppressWarnings({"rawtypes", "unchecked"}) public class FileFinder { public static String[] find(File f, String pattern) { ArrayList files = new ArrayList(); find0(f, pattern, files); String[] result = new String[files.size()]; files.toArray(result); return result; } private static void find0(File f, String pattern, ArrayList collector) { if (f.isDirectory()) { String[] files = f.list(); if (files == null) return; for (int i = 0, max = files.length; i < max; i++) { File current = new File(f, files[i]); if (current.isDirectory()) { find0(current, pattern, collector); } else { if (current.getName().toUpperCase().endsWith(pattern)) { collector.add(current.getAbsolutePath()); } } } } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p/> * http://www.apache.org/licenses/LICENSE-2.0 * <p/> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package org.apache.kafka.copycat.storage; import org.apache.kafka.copycat.errors.CopycatException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.*; import java.nio.ByteBuffer; import java.util.HashMap; import java.util.Map; /** * Implementation of OffsetBackingStore that saves data locally to a file. To ensure this behaves * similarly to a real backing store, operations are executed asynchronously on a background thread. */ public class FileOffsetBackingStore extends MemoryOffsetBackingStore { private static final Logger log = LoggerFactory.getLogger(FileOffsetBackingStore.class); public final static String OFFSET_STORAGE_FILE_FILENAME_CONFIG = "offset.storage.file.filename"; private File file; public FileOffsetBackingStore() { } @Override public void configure(Map<String, ?> props) { super.configure(props); String filename = (String) props.get(OFFSET_STORAGE_FILE_FILENAME_CONFIG); file = new File(filename); } @Override public synchronized void start() { super.start(); log.info("Starting FileOffsetBackingStore with file {}", file); load(); } @Override public synchronized void stop() { super.stop(); // Nothing to do since this doesn't maintain any outstanding connections/data log.info("Stopped FileOffsetBackingStore"); } @SuppressWarnings("unchecked") private void load() { try { ObjectInputStream is = new ObjectInputStream(new FileInputStream(file)); Object obj = is.readObject(); if (!(obj instanceof HashMap)) throw new CopycatException("Expected HashMap but found " + obj.getClass()); Map<byte[], byte[]> raw = (Map<byte[], byte[]>) obj; data = new HashMap<>(); for (Map.Entry<byte[], byte[]> mapEntry : raw.entrySet()) { ByteBuffer key = (mapEntry.getKey() != null) ? ByteBuffer.wrap(mapEntry.getKey()) : null; ByteBuffer value = (mapEntry.getValue() != null) ? ByteBuffer.wrap(mapEntry.getValue()) : null; data.put(key, value); } is.close(); } catch (FileNotFoundException | EOFException e) { // FileNotFoundException: Ignore, may be new. // EOFException: Ignore, this means the file was missing or corrupt } catch (IOException | ClassNotFoundException e) { throw new CopycatException(e); } } protected void save() { try { ObjectOutputStream os = new ObjectOutputStream(new FileOutputStream(file)); Map<byte[], byte[]> raw = new HashMap<>(); for (Map.Entry<ByteBuffer, ByteBuffer> mapEntry : data.entrySet()) { byte[] key = (mapEntry.getKey() != null) ? mapEntry.getKey().array() : null; byte[] value = (mapEntry.getValue() != null) ? mapEntry.getValue().array() : null; raw.put(key, value); } os.writeObject(raw); os.close(); } catch (IOException e) { throw new CopycatException(e); } } }
/* * Copyright 2012-2016 Brian Campbell * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jose4j.jwt.consumer; import org.jose4j.base64url.Base64Url; import org.jose4j.jwa.AlgorithmConstraints; import org.jose4j.jwa.JceProviderTestSupport; import org.jose4j.jwe.ContentEncryptionAlgorithmIdentifiers; import org.jose4j.jwe.JsonWebEncryption; import org.jose4j.jwe.KeyManagementAlgorithmIdentifiers; import org.jose4j.jwk.JsonWebKey; import org.jose4j.jwk.JsonWebKeySet; import org.jose4j.jwk.OctJwkGenerator; import org.jose4j.jwk.OctetSequenceJsonWebKey; import org.jose4j.jwk.PublicJsonWebKey; import org.jose4j.jwk.SimpleJwkFilter; import org.jose4j.jws.AlgorithmIdentifiers; import org.jose4j.jws.JsonWebSignature; import org.jose4j.jwt.JwtClaims; import org.jose4j.jwt.MalformedClaimException; import org.jose4j.jwt.NumericDate; import org.jose4j.jwx.HeaderParameterNames; import org.jose4j.jwx.JsonWebStructure; import org.jose4j.keys.AesKey; import org.jose4j.keys.ExampleEcKeysFromJws; import org.jose4j.keys.ExampleRsaJwksFromJwe; import org.jose4j.keys.ExampleRsaKeyFromJws; import org.jose4j.keys.FakeHsmNonExtractableSecretKeySpec; import org.jose4j.keys.PbkdfKey; import org.jose4j.keys.resolvers.DecryptionKeyResolver; import org.jose4j.keys.resolvers.JwksDecryptionKeyResolver; import org.jose4j.keys.resolvers.JwksVerificationKeyResolver; import org.jose4j.keys.resolvers.VerificationKeyResolver; import org.jose4j.lang.JoseException; import org.jose4j.lang.UnresolvableKeyException; import org.junit.Assert; import org.junit.Test; import java.security.Key; import java.security.PrivateKey; import java.security.interfaces.RSAPublicKey; import java.util.Collections; import java.util.Iterator; import java.util.List; import static org.hamcrest.CoreMatchers.equalTo; import static org.jose4j.jwe.ContentEncryptionAlgorithmIdentifiers.AES_128_GCM; import static org.jose4j.jwe.ContentEncryptionAlgorithmIdentifiers.AES_192_GCM; import static org.jose4j.jwe.ContentEncryptionAlgorithmIdentifiers.AES_256_GCM; import static org.jose4j.jwe.KeyManagementAlgorithmIdentifiers.A128GCMKW; import static org.jose4j.jwe.KeyManagementAlgorithmIdentifiers.A192GCMKW; import static org.jose4j.jwe.KeyManagementAlgorithmIdentifiers.A256GCMKW; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; /** * */ public class JwtConsumerTest { @Test public void jwt61ExampleUnsecuredJwt() throws InvalidJwtException, MalformedClaimException { // an Example Unsecured JWT from https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-6.1 String jwt = "eyJhbGciOiJub25lIn0" + "." + "eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFt" + "cGxlLmNvbS9pc19yb290Ijp0cnVlfQ" + "."; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); Assert.assertThat("joe", equalTo(jwtContext.getJwtClaims().getIssuer())); Assert.assertThat(NumericDate.fromSeconds(1300819380), equalTo(jwtContext.getJwtClaims().getExpirationTime())); Assert.assertTrue(jwtContext.getJwtClaims().getClaimValue("http://example.com/is_root", Boolean.class)); // works w/ 'NO_CONSTRAINTS' and setDisableRequireSignature() and null key JwtConsumer consumer = new JwtConsumerBuilder() .setVerificationKey(null) .setExpectedIssuer("joe") .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819343)) .setJwsAlgorithmConstraints(AlgorithmConstraints.NO_CONSTRAINTS) .setDisableRequireSignature() .build(); JwtClaims jcs = consumer.processToClaims(jwt); Assert.assertThat("joe", equalTo(jcs.getIssuer())); Assert.assertThat(NumericDate.fromSeconds(1300819380), equalTo(jcs.getExpirationTime())); Assert.assertTrue(jcs.getClaimValue("http://example.com/is_root", Boolean.class)); consumer.processContext(jwtContext); // just ensure that getting claims that aren't there returns null (or empty for string list) and doesn't throw an exception Assert.assertNull(jcs.getStringClaimValue("no-such-claim")); Assert.assertNull(jcs.getClaimValue("no way jose", Boolean.class)); Assert.assertFalse(jcs.hasClaim("nope")); Assert.assertTrue(jcs.getStringListClaimValue("nope").isEmpty()); Assert.assertTrue(jcs.hasClaim("http://example.com/is_root")); Object objectClaimValue = jcs.getClaimValue("http://example.com/is_root"); Assert.assertNotNull(objectClaimValue); Assert.assertFalse(jcs.hasClaim("nope")); objectClaimValue = jcs.getClaimValue("nope"); Assert.assertNull(objectClaimValue); // fails w/ default constraints consumer = new JwtConsumerBuilder() .setVerificationKey(null) .setExpectedIssuer("joe") .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819343)) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); // fails w/ explicit constraints consumer = new JwtConsumerBuilder() .setVerificationKey(null) .setExpectedIssuer("joe") .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819343)) .setJwsAlgorithmConstraints(new AlgorithmConstraints(AlgorithmConstraints.ConstraintType.BLACKLIST, AlgorithmIdentifiers.NONE, AlgorithmIdentifiers.RSA_PSS_USING_SHA256)) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); // fail w/ 'NO_CONSTRAINTS' but a key provided consumer = new JwtConsumerBuilder() .setVerificationKey(ExampleRsaJwksFromJwe.APPENDIX_A_1.getKey()) .setExpectedIssuer("joe") .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819343)) .setJwsAlgorithmConstraints(AlgorithmConstraints.NO_CONSTRAINTS) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); // fail w/ 'NO_CONSTRAINTS' and no key but sig required (by default) consumer = new JwtConsumerBuilder() .setExpectedIssuer("joe") .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819343)) .setJwsAlgorithmConstraints(AlgorithmConstraints.NO_CONSTRAINTS) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); } @Test public void jwtA1ExampleEncryptedJWT() throws InvalidJwtException, MalformedClaimException { // https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#appendix-A.1 String jwt = "eyJhbGciOiJSU0ExXzUiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2In0." + "QR1Owv2ug2WyPBnbQrRARTeEk9kDO2w8qDcjiHnSJflSdv1iNqhWXaKH4MqAkQtM" + "oNfABIPJaZm0HaA415sv3aeuBWnD8J-Ui7Ah6cWafs3ZwwFKDFUUsWHSK-IPKxLG" + "TkND09XyjORj_CHAgOPJ-Sd8ONQRnJvWn_hXV1BNMHzUjPyYwEsRhDhzjAD26ima" + "sOTsgruobpYGoQcXUwFDn7moXPRfDE8-NoQX7N7ZYMmpUDkR-Cx9obNGwJQ3nM52" + "YCitxoQVPzjbl7WBuB7AohdBoZOdZ24WlN1lVIeh8v1K4krB8xgKvRU8kgFrEn_a" + "1rZgN5TiysnmzTROF869lQ." + "AxY8DCtDaGlsbGljb3RoZQ." + "MKOle7UQrG6nSxTLX6Mqwt0orbHvAKeWnDYvpIAeZ72deHxz3roJDXQyhxx0wKaM" + "HDjUEOKIwrtkHthpqEanSBNYHZgmNOV7sln1Eu9g3J8." + "fiK51VwhsxJ-siBMR-YFiA"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setDecryptionKey(ExampleRsaJwksFromJwe.APPENDIX_A_2.getPrivateKey()) .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); JwtConsumer c = new JwtConsumerBuilder() .setExpectedIssuer("joe") .setEvaluationTime(NumericDate.fromSeconds(1300819300)) .setDecryptionKey(ExampleRsaJwksFromJwe.APPENDIX_A_2.getPrivateKey()) .setDisableRequireSignature() .build(); c.processContext(jwtContext); JwtContext context = c.process(jwt); JwtClaims jcs = context.getJwtClaims(); Assert.assertTrue(jcs.getClaimValue("http://example.com/is_root", Boolean.class)); String expectedPayload = "{\"iss\":\"joe\",\r\n \"exp\":1300819380,\r\n \"http://example.com/is_root\":true}"; assertThat(jcs.getRawJson(), equalTo(expectedPayload)); assertThat(1, equalTo(context.getJoseObjects().size())); assertThat(context.getJwt(), equalTo(jwt)); } @Test public void jwtA2ExampleNestedJWT() throws InvalidJwtException, MalformedClaimException { // an Example Nested JWT from https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#appendix-A.2 String jwt = "eyJhbGciOiJSU0ExXzUiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiY3R5IjoiSldU" + "In0." + "g_hEwksO1Ax8Qn7HoN-BVeBoa8FXe0kpyk_XdcSmxvcM5_P296JXXtoHISr_DD_M" + "qewaQSH4dZOQHoUgKLeFly-9RI11TG-_Ge1bZFazBPwKC5lJ6OLANLMd0QSL4fYE" + "b9ERe-epKYE3xb2jfY1AltHqBO-PM6j23Guj2yDKnFv6WO72tteVzm_2n17SBFvh" + "DuR9a2nHTE67pe0XGBUS_TK7ecA-iVq5COeVdJR4U4VZGGlxRGPLRHvolVLEHx6D" + "YyLpw30Ay9R6d68YCLi9FYTq3hIXPK_-dmPlOUlKvPr1GgJzRoeC9G5qCvdcHWsq" + "JGTO_z3Wfo5zsqwkxruxwA." + "UmVkbW9uZCBXQSA5ODA1Mg." + "VwHERHPvCNcHHpTjkoigx3_ExK0Qc71RMEParpatm0X_qpg-w8kozSjfNIPPXiTB" + "BLXR65CIPkFqz4l1Ae9w_uowKiwyi9acgVztAi-pSL8GQSXnaamh9kX1mdh3M_TT" + "-FZGQFQsFhu0Z72gJKGdfGE-OE7hS1zuBD5oEUfk0Dmb0VzWEzpxxiSSBbBAzP10" + "l56pPfAtrjEYw-7ygeMkwBl6Z_mLS6w6xUgKlvW6ULmkV-uLC4FUiyKECK4e3WZY" + "Kw1bpgIqGYsw2v_grHjszJZ-_I5uM-9RA8ycX9KqPRp9gc6pXmoU_-27ATs9XCvr" + "ZXUtK2902AUzqpeEUJYjWWxSNsS-r1TJ1I-FMJ4XyAiGrfmo9hQPcNBYxPz3GQb2" + "8Y5CLSQfNgKSGt0A4isp1hBUXBHAndgtcslt7ZoQJaKe_nNJgNliWtWpJ_ebuOpE" + "l8jdhehdccnRMIwAmU1n7SPkmhIl1HlSOpvcvDfhUN5wuqU955vOBvfkBOh5A11U" + "zBuo2WlgZ6hYi9-e3w29bR0C2-pp3jbqxEDw3iWaf2dc5b-LnR0FEYXvI_tYk5rd" + "_J9N0mg0tQ6RbpxNEMNoA9QWk5lgdPvbh9BaO195abQ." + "AVO9iT5AV4CzvDJCdhSFlQ"; PrivateKey decryptionKey = ExampleRsaJwksFromJwe.APPENDIX_A_2.getPrivateKey(); JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setDecryptionKey(decryptionKey) .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); RSAPublicKey verificationKey = ExampleRsaKeyFromJws.PUBLIC_KEY; JwtConsumerBuilder builder = new JwtConsumerBuilder() .setDecryptionKey(decryptionKey) .setEnableRequireEncryption() .setVerificationKey(verificationKey) .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819380)) .setAllowedClockSkewInSeconds(30) .setExpectedIssuer("joe"); JwtConsumer jwtConsumer = builder.build(); jwtConsumer.processContext(jwtContext); JwtContext jwtInfo = jwtConsumer.process(jwt); for (JwtContext ctx : new JwtContext[] {jwtContext, jwtInfo}) { Assert.assertThat(2, equalTo(ctx.getJoseObjects().size())); Assert.assertTrue(ctx.getJoseObjects().get(0) instanceof JsonWebSignature); Assert.assertTrue(ctx.getJoseObjects().get(1) instanceof JsonWebEncryption); assertThat(ctx.getJwt(), equalTo(jwt)); JwtClaims jcs = ctx.getJwtClaims(); Assert.assertThat("joe", equalTo(jcs.getIssuer())); Assert.assertThat(NumericDate.fromSeconds(1300819380), equalTo(jcs.getExpirationTime())); Assert.assertTrue(jcs.getClaimValue("http://example.com/is_root", Boolean.class)); } // then some negative tests w/ null or wrong keys builder = new JwtConsumerBuilder() .setDecryptionKey(null) .setEnableRequireEncryption() .setVerificationKey(verificationKey) .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819380)) .setAllowedClockSkewInSeconds(30) .setExpectedIssuer("joe"); jwtConsumer = builder.build(); // no decryption key so we expect this jwtConsumer to fail on the raw JWT SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, null, jwtConsumer); // but it will work on the jwtContext because the JWE was already decrypted jwtConsumer.processContext(jwtContext); builder = new JwtConsumerBuilder() .setDecryptionKey(decryptionKey) .setEnableRequireEncryption() .setVerificationKey(null) .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819380)) .setAllowedClockSkewInSeconds(30) .setExpectedIssuer("joe"); jwtConsumer = builder.build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, jwtConsumer); builder = new JwtConsumerBuilder() .setDecryptionKey(decryptionKey) .setEnableRequireEncryption() .setVerificationKey(ExampleRsaJwksFromJwe.APPENDIX_A_1.getPublicKey()) .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819380)) .setAllowedClockSkewInSeconds(30) .setExpectedIssuer("joe"); jwtConsumer = builder.build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, jwtConsumer); builder = new JwtConsumerBuilder() .setDecryptionKey(ExampleRsaKeyFromJws.PRIVATE_KEY) .setEnableRequireEncryption() .setVerificationKey(verificationKey) .setRequireExpirationTime() .setEvaluationTime(NumericDate.fromSeconds(1300819380)) .setAllowedClockSkewInSeconds(30) .setExpectedIssuer("joe"); jwtConsumer = builder.build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, jwtConsumer); // already decrypted but different key so seems good to fail } @Test public void jwtSec31ExampleJWT() throws Exception { // https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-3.1 String jwt = "eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9." + "eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ." + "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); Assert.assertTrue(jwtContext.getJwtClaims().getClaimValue("http://example.com/is_root", Boolean.class)); assertThat(1, equalTo(jwtContext.getJoseObjects().size())); String jwk = "{\"kty\":\"oct\",\"k\":\"AyM1SysPpbyDfgZld3umj1qzKObwVMkoqQ-EstJQLr_T-1qS0gZH75aKtMN3Yj0iPS4hcgUuTwjAzZr1Z9CAow\"}"; JsonWebKey jsonWebKey = JsonWebKey.Factory.newJwk(jwk); JwksVerificationKeyResolver resolver = new JwksVerificationKeyResolver(Collections.singletonList(jsonWebKey)); JwtConsumer consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(resolver) .setEvaluationTime(NumericDate.fromSeconds(1300819372)) .setExpectedIssuer("joe") .setRequireExpirationTime() .build(); JwtContext context = consumer.process(jwt); Assert.assertTrue(context.getJwtClaims().getClaimValue("http://example.com/is_root", Boolean.class)); assertThat(1, equalTo(context.getJoseObjects().size())); consumer.processContext(jwtContext); // require encryption and it will fail consumer = new JwtConsumerBuilder() .setEnableRequireEncryption() .setVerificationKey(jsonWebKey.getKey()) .setEvaluationTime(NumericDate.fromSeconds(1300819372)) .setExpectedIssuer("joe") .setRequireExpirationTime() .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); } @Test public void skipSignatureVerification() throws Exception { String jwt = "eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9." + "eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ." + "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"; JwtConsumer consumer = new JwtConsumerBuilder() .setSkipSignatureVerification() .setEvaluationTime(NumericDate.fromSeconds(1300819372)) .setExpectedIssuer("joe") .setRequireExpirationTime() .build(); JwtContext context = consumer.process(jwt); Assert.assertTrue(context.getJwtClaims().getClaimValue("http://example.com/is_root", Boolean.class)); assertThat(1, equalTo(context.getJoseObjects().size())); } @Test (expected = InvalidJwtSignatureException.class) public void jwtBadSig() throws Exception { String jwt = "eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9." + "eyJpc3MiOiJqb2UiLAogImV4cCI6MTkwMDgxOTM4MCwKICJodHRwOi8vZXhhbXBsZS5jb20vaXNfcm9vdCI6dHJ1ZX0." + "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"; String jwk = "{\"kty\":\"oct\",\"k\":\"AyM1SysPpbyDfgZld3umj1qzKObwVMkoqQ-EstJQLr_T-1qS0gZH75aKtMN3Yj0iPS4hcgUuTwjAzZr1Z9CAow\"}"; JwtConsumer consumer = new JwtConsumerBuilder() .setVerificationKey(JsonWebKey.Factory.newJwk(jwk).getKey()) .setEvaluationTime(NumericDate.fromSeconds(1900000380)) .setExpectedIssuer("joe") .setRequireExpirationTime() .build(); consumer.process(jwt); } @Test public void algConstraints() throws Exception { String jwt = "eyJ6aXAiOiJERUYiLCJhbGciOiJBMTI4S1ciLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiY3R5IjoiSldUIn0" + ".DDyrirrztC88OaDtTkkNgNIyZqQd4gjWrab9KkiBnyOULjWZWt-IAg" + ".Obun_t7l3FYqNUqyW46syg" + ".ChlzoLTN1ovJP9PLHlirc-_yvP4ya_5gdhDSKiZnifS9MjCbeMYebkOCxSHexs09PBbPv30JwtIyM7caqkSNggA8HT_ub1moMpx0uOFhTE9dpdY4Wb4Ym6mqtIQhdwLymDVCI6vRn-NH88vdLluGSYYLhelgcL05qeWJQKzV3mxopgM-Q7N7LycXrodqTdvM" + ".ay9pwehz96tJgRKvSwASDg"; JsonWebKey wrapKey = JsonWebKey.Factory.newJwk("{\"kty\":\"oct\",\"k\":\"sUMs42PKNsKn9jeGJ2szKA\"}"); JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(wrapKey.getKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); Assert.assertThat("eh", equalTo(jwtContext.getJwtClaims().getStringClaimValue("message"))); JsonWebKey macKey = JsonWebKey.Factory.newJwk("{\"kty\":\"oct\",\"k\":\"j-QRollN4PYjebWYcTl32YOGWfdpXi_YYHu03Ifp8K4\"}"); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKey(wrapKey.getKey()) .setVerificationKey(macKey.getKey()) .setEvaluationTime(NumericDate.fromSeconds(1419982016)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); JwtClaims jwtClaims = consumer.processToClaims(jwt); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); consumer.processContext(jwtContext); consumer = new JwtConsumerBuilder() .setDecryptionKey(wrapKey.getKey()) .setVerificationKey(macKey.getKey()) .setEvaluationTime(NumericDate.fromSeconds(1419982016)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .setJwsAlgorithmConstraints(new AlgorithmConstraints(AlgorithmConstraints.ConstraintType.WHITELIST, AlgorithmIdentifiers.HMAC_SHA256)) .setJweAlgorithmConstraints(new AlgorithmConstraints(AlgorithmConstraints.ConstraintType.WHITELIST, KeyManagementAlgorithmIdentifiers.A128KW)) .setJweContentEncryptionAlgorithmConstraints(new AlgorithmConstraints(AlgorithmConstraints.ConstraintType.WHITELIST, ContentEncryptionAlgorithmIdentifiers.AES_128_CBC_HMAC_SHA_256)) .build(); jwtClaims = consumer.processToClaims(jwt); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); consumer.processContext(jwtContext); consumer = new JwtConsumerBuilder() .setDecryptionKey(wrapKey.getKey()) .setVerificationKey(macKey.getKey()) .setEvaluationTime(NumericDate.fromSeconds(1419982016)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .setJwsAlgorithmConstraints(new AlgorithmConstraints(AlgorithmConstraints.ConstraintType.BLACKLIST, AlgorithmIdentifiers.HMAC_SHA256)) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); consumer = new JwtConsumerBuilder() .setDecryptionKey(wrapKey.getKey()) .setVerificationKey(macKey.getKey()) .setEvaluationTime(NumericDate.fromSeconds(1419982016)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .setJweAlgorithmConstraints(new AlgorithmConstraints(AlgorithmConstraints.ConstraintType.BLACKLIST, KeyManagementAlgorithmIdentifiers.A128KW)) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); consumer = new JwtConsumerBuilder() .setDecryptionKey(wrapKey.getKey()) .setVerificationKey(macKey.getKey()) .setEvaluationTime(NumericDate.fromSeconds(1419982016)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .setJweContentEncryptionAlgorithmConstraints(new AlgorithmConstraints(AlgorithmConstraints.ConstraintType.BLACKLIST, ContentEncryptionAlgorithmIdentifiers.AES_128_CBC_HMAC_SHA_256)) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); // wrong mac key consumer = new JwtConsumerBuilder() .setDecryptionKey(wrapKey.getKey()) .setVerificationKey(JsonWebKey.Factory.newJwk("{\"kty\":\"oct\",\"k\":\"___RollN4PYjebWYcTl32YOGWfdpXi_YYHu03Ifp8K4\"}").getKey()) .setSkipAllValidators() .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); } @Test public void customValidatorTest() throws Exception { // {"iss":"same","aud":"same","exp":1420046060} String jwt = "eyJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzYW1lIiwiYXVkIjoic2FtZSIsImV4cCI6MTQyMDA0NjA2MH0.O1w_nkfQMZvEEvJ0Pach0gPmJUMW8o4aFlA1f2c8m-I"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); JsonWebKey jsonWebKey = JsonWebKey.Factory.newJwk("{\"kty\":\"oct\",\"k\":\"IWlxz1h43wKzyigIXNn-dTRBu89M9L8wmJK4zZmUXrQ\"}"); JwtConsumer consumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1420046040)) .setExpectedAudience("same", "different") .setExpectedIssuer("same") .setRequireExpirationTime() .setVerificationKey(jsonWebKey.getKey()) .build(); JwtContext process = consumer.process(jwt); Assert.assertThat(1, equalTo(process.getJoseObjects().size())); consumer.processContext(jwtContext); consumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1420046040)) .setExpectedAudience("same", "different") .setExpectedIssuer("same") .setRequireExpirationTime() .setVerificationKey(jsonWebKey.getKey()) .registerValidator(new Validator() { @Override public String validate(JwtContext jwtContext) throws MalformedClaimException { JwtClaims jcs = jwtContext.getJwtClaims(); String audience = jcs.getAudience().iterator().next(); String issuer = jcs.getIssuer(); if (issuer.equals(audience)) { return "You can go blind issuing tokens to yourself..."; } return null; } }) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); } @Test public void wrappedNpeFromCustomValidatorTest() throws Exception { String jwt = "eyJhbGciOiJIUzI1NiJ9.eyJpc3MiOiJzYW1lIiwiZXhwIjoxNDIwMDQ2ODE0fQ.LUViXhiMJRZa5veg6ayZCDQaIc0GfVDJDx-878WbFzg"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); JsonWebKey jsonWebKey = JsonWebKey.Factory.newJwk("{\"kty\":\"oct\",\"k\":\"Ek1bHgP9uYyEtB5-V6oAzT_wB4mUnvCpirPqO4MyFwE\"}"); JwtConsumer consumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1420046767)) .setExpectedAudience(false, "other", "different") .setExpectedIssuer("same") .setRequireExpirationTime() .setVerificationKey(jsonWebKey.getKey()) .build(); JwtContext process = consumer.process(jwt); Assert.assertThat(1, equalTo(process.getJoseObjects().size())); consumer.processContext(jwtContext); consumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1420046768)) .setExpectedAudience(false, "other", "different") .setExpectedIssuer("same") .setRequireExpirationTime() .setVerificationKey(jsonWebKey.getKey()) .registerValidator(new Validator() { @Override public String validate(JwtContext jwtContext) throws MalformedClaimException { try { JwtClaims jcs = jwtContext.getJwtClaims(); List<String> audience = jcs.getAudience(); Iterator<String> iterator = audience.iterator(); // this will NPE iterator.next(); return null; } catch (Exception e) { throw new RuntimeException("Something bad happened.", e); } } }) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext,consumer); } @Test public void someExpectedAndUnexpectedEx() throws Exception { // https://tools.ietf.org/html/draft-ietf-oauth-json-web-token-32#section-3.1 String jwt = "eyJ0eXAiOiJKV1QiLA0KICJhbGciOiJIUzI1NiJ9." + "eyJpc3MiOiJqb2UiLA0KICJleHAiOjEzMDA4MTkzODAsDQogImh0dHA6Ly9leGFtcGxlLmNvbS9pc19yb290Ijp0cnVlfQ." + "dBjftJeZ4CVP-mB92K27uhbUJU1p1r_wW1gFWFOEjXk"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); JwtConsumer consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(new VerificationKeyResolver() { @Override public Key resolveKey(JsonWebSignature jws, List<JsonWebStructure> nestingContext) throws UnresolvableKeyException { throw new UnresolvableKeyException("Can't do it!"); } }) .setEvaluationTime(NumericDate.fromSeconds(1300819372)) .setExpectedIssuer("joe") .setRequireExpirationTime() .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(new VerificationKeyResolver() { @Override public Key resolveKey(JsonWebSignature jws, List<JsonWebStructure> nestingContext) throws UnresolvableKeyException { throw new IllegalArgumentException("Stuff happens..."); } }) .setEvaluationTime(NumericDate.fromSeconds(1300819372)) .setExpectedIssuer("joe") .setRequireExpirationTime() .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); } @Test public void missingCtyInNested() throws Exception { // Nested jwt without "cty":"JWT" -> expect failure here as the cty is a MUST for nesting // setEnableLiberalContentTypeHandling() on the builder will enable a best effort to deal with the content even when cty isn't specified String jwt = "eyJ6aXAiOiJERUYiLCJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTEyOENCQy1IUzI1NiIsImVwayI6eyJrdHkiOiJFQyIsIngiOiIwRGk0VTBZQ0R2NHAtS2hETUZwUThvY0FsZzA2SEwzSHR6UldRbzlDLWV3IiwieSI6IjBfVFJjR1Y3Qy05d0xseFJZSExJOFlKTXlET2hWNW5YeHVPMGdRVmVxd0EiLCJjcnYiOiJQLTI1NiJ9fQ..xw5H8Kztd_sqzbXjt4GKUg.YNa163HLj7MwlvjzGihbOHnJ2PC3NOTnnvVOanuk1O9XFJ97pbbHHQzEeEwG6jfvDgdmlrLjcIJkSu1U8qRby7Xr4gzP6CkaDPbKwvLveETZSNdmZh37XKfnQ4LvKgiko6OQzyLYG1gc97kUOeikXTYVaYaeV1838Bi4q3DsIG-j4ZESg0-ePQesw56A80AEE3j6wXwZ4vqugPP9_ogZzkPFcHf1lt3-A4amNMjDbV8.u-JJCoakXI55BG2rz_kBlg"; PublicJsonWebKey sigKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"loF6m9WAW_GKrhoh48ctg_d78fbIsmUb02XDOwJj59c\",\"y\":\"kDCHDkCbWjeX8DjD9feQKcndJyerdsLJ4VZ5YSTWCoU\",\"crv\":\"P-256\",\"d\":\"6D1C9gJsT9KXNtTNyqgpdyQuIrK-qzo0_QJOVe9DqJg\"}"); PublicJsonWebKey encKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"PNbMydlpYRBFTYn_XDFvvRAFqE4e0EJmK6-zULTVERs\",\"y\":\"dyO9wGVgKS3gtP5bx0PE8__MOV_HLSpiwK-mP1RGZgk\",\"crv\":\"P-256\",\"d\":\"FIs8wVojHBdl7vkiZVnLBPw5S9lbn4JF2WWY1OTupic\"}"); JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setEnableLiberalContentTypeHandling() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420219088)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, consumer); consumer = new JwtConsumerBuilder() .setEnableLiberalContentTypeHandling() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420219088)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); JwtContext ctx = consumer.process(jwt); consumer.processContext(jwtContext); for (JwtContext context : new JwtContext[] {ctx, jwtContext}) { JwtClaims jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); List<JsonWebStructure> joseObjects = context.getJoseObjects(); assertThat(2, equalTo(joseObjects.size())); assertTrue(joseObjects.get(0) instanceof JsonWebSignature); assertTrue(joseObjects.get(1) instanceof JsonWebEncryption); } } @Test public void missingCtyInNestedViaNimbusExample() throws Exception { // "Signed and encrypted JSON Web Token (JWT)" example JWT made from http://connect2id.com/products/nimbus-jose-jwt/examples/signed-and-encrypted-jwt // didn't have "cty":"JWT" at the time of writing (1/5/15 - https://twitter.com/__b_c/status/552105927512301568) but it made me think // allowing more liberal processing might be a good idea // keys and enc alg were changed from the example to produce this jwt final String jwt = "eyJhbGciOiJBMTI4S1ciLCJlbmMiOiJBMTI4Q0JDLUhTMjU2In0." + "IAseIHBLnv7hFKz_V3-o-Of3Mf2DIGzFnSh_8sLZgujPaNIG8NlZmA." + "fwbuvibqYUlDzTXTtsB6yw." + "5T70ZVMqOTl4q_tYegL0bgJpT2wTUlSvnJ2QAB8KfpNO_J3StiK8oHvSmVOPOrCQJai_XffZGUpmAO2fnGnUajKmQpxm_iaJUZtzexwqeNlVzAr-swLUZDmW0lh3NgDB" + "EAgY4khN7v1L_etToKuuEI6P-UGsg34BqaNuZEkj7ylsY1McZg73t5x9C4Q9dsBbsPLFPPUxxvA2abJhAq1Hew." + "D1hDq8pD6nQ42yvez-yjlQ\n"; AesKey decryptionKey = new AesKey(new byte[16]); JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(decryptionKey) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setEnableLiberalContentTypeHandling() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); final JwtConsumer consumer = new JwtConsumerBuilder() .setEnableLiberalContentTypeHandling() // this will try nested content as JOSE if JSON paring fails .setDecryptionKey(decryptionKey) .setVerificationKey(new AesKey(new byte[32])) .setEvaluationTime(NumericDate.fromSeconds(1420467806)) .setExpectedIssuer("https://c2id.com") .setRequireIssuedAt() .build(); JwtContext ctx = consumer.process(jwt); for (JwtContext context : new JwtContext[] {ctx, jwtContext}) { JwtClaims jwtClaims = context.getJwtClaims(); Assert.assertThat("alice", equalTo(jwtClaims.getSubject())); List<JsonWebStructure> joseObjects = context.getJoseObjects(); assertThat(2, equalTo(joseObjects.size())); assertTrue(joseObjects.get(0) instanceof JsonWebSignature); assertTrue(joseObjects.get(1) instanceof JsonWebEncryption); } } @Test public void ctyValueVariationsInNested() throws Exception { // Nested jwt with variations on "cty":"JWT" like jwt, application/jwt, application/JWT ... PublicJsonWebKey sigKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"HVDkXtG_j_JQUm_mNaRPSbsEhr6gdK0a6H4EURypTU0\",\"y\":\"NxdYFS2hl1w8VKf5UTpGXh2YR7KQ8gSBIHu64W0mK8M\",\"crv\":\"P-256\",\"d\":\"ToqTlgJLhI7AQYNLesI2i-08JuaYm2wxTCDiF-VxY4A\"}"); PublicJsonWebKey encKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"7kaETHB4U9pCdsErbjw11HGv8xcQUmFy3NMuBa_J7Os\",\"y\":\"FZK-vSMpKk9gLWC5wdFjG1W_C7vgJtdm1YfNPZevmCw\",\"crv\":\"P-256\",\"d\":\"spOxtF0qiKrrCTaUs_G04RISjCx7HEgje_I7aihXVMY\"}"); String jwt; jwt = "eyJ6aXAiOiJERUYiLCJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTEyOENCQy1IUzI1NiIsImN0eSI6ImFwcGxpY2F0aW9uL2p3dCIsImVwayI6eyJrdHkiOiJFQyIsIngiOiJCOUhPbG82UV9LV0NiQjZLbk1RMDFfaHcyRXdaQWNEMmNucEdYYVl5WFBBIiwieSI6InJYS2s3VzM4UXhVOHl4YWZZc3NsUjFWU2JLbDI5T0FNSWxROFBCWXVZcUEiLCJjcnYiOiJQLTI1NiJ9fQ..LcIG9_bnPb43aaps32H6yQ.rsV7ItJWWfNafDJmeLHluKhiwmsU0Mlwut2jwD6y96KpjD-hz_5zBxpXtj6mk8yGZwg2L26XLo8npt_82bhKnMYqlKSRM-3ge2Deg5WPmBCx6Fj0NyCMnoR8oJTn-oxh0OHZICK_85Xz3GptopeA3Hj8ESdsJEI6D4WbXQ7HfGeg8ID9uvTaL8NGOHT4BGY0bB-6nl3qNIY5ULpg-a4a1ou5k9HnM6SRSpVRwpBBUsk.1vqvwv9XAzsQfvragyMXZQ"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setEnableLiberalContentTypeHandling() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); Assert.assertThat("eh", equalTo(jwtContext.getJwtClaims().getStringClaimValue("message"))); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420219088)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); JwtContext context = consumer.process(jwt); JwtClaims jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); consumer.processContext(jwtContext); jwt = "eyJ6aXAiOiJERUYiLCJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTEyOENCQy1IUzI1NiIsImN0eSI6ImFwcGxpY2F0aW9uL0pXVCIsImVwayI6eyJrdHkiOiJFQyIsIngiOiJxelBlRUl0ZXJmQ0dhTFBpbDU3UmRudERHQVdwdVlBRGtVLUJubkkyTXowIiwieSI6ImNmWUxlc1dneGlfVndCdzdvSzNPT3dabGNrbVRCVmMzcEdnMTNRZ3V5WjQiLCJjcnYiOiJQLTI1NiJ9fQ..ftNMf4CqUSCq8p3L1Y7K1A.Z9K1YIJmSY9du5LUuSs0szCj1PUzq0ZnsEppT8yVPdGVDkDi0elEcsM8dCq8CvYrXG8OFuyp0s8dd2u_fIw4RjMc-aVMBT4ikWDmqb4CA17nC2Hxm6dZFPy3Xx3GnqjiGUIB2JiMOxj6mBZtTSvkKAUvs3Rh4G-87v2hJFpqdLSySqd-rQXL7Dhqxl0Cbu9nZFcYEIk58lpC0H2TN9aP5GtuQYa3BlNuEoEDzIcLhc4.N6VFQ0_UgNqyBsPLyE6MQQ"; firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setEnableLiberalContentTypeHandling() .build(); jwtContext = firstPassConsumer.process(jwt); Assert.assertThat("eh", equalTo(jwtContext.getJwtClaims().getStringClaimValue("message"))); consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420219095)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); context = consumer.process(jwt); jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); consumer.processContext(jwtContext); jwt = "eyJ6aXAiOiJERUYiLCJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTEyOENCQy1IUzI1NiIsImN0eSI6Imp3dCIsImVwayI6eyJrdHkiOiJFQyIsIngiOiJoTm5zTlRXZWN3TEVRUGVRMlFjZ05WSDJLX0dzTkFUZXNVaENhY2x2OVAwIiwieSI6ImI2V1lSR1V5Z1NBUGo5a0lFYktYTm5ZaDhEbmNrRXB2NDFYbUVnanA4VE0iLCJjcnYiOiJQLTI1NiJ9fQ..VGTURmPYERdJ7q9_5wlENA.91m_JN65XNlp9WsFHaHihhGB7soKNUdeBNpmODVcIiinhPClH00-GTMwfT08VmXEU2djW3Aw_eBAoU7rI_M0ovYbbmAy7UnVRUyCTbkGsQpv7OxYIznemMVMraFuHNmTAF_MU7oM4gPkqKzwuBa0uwd4JhN00bq-jEcLifMPgMvyGvfJ19SXAyrIVA4Otjuii347V5u1GwlB5VBqMiqtBnbMMzR1Fe3X-4-sEgT9BrM.4T3uLGa4Bm5_r-ZNKPzEWg"; firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setEnableLiberalContentTypeHandling() .build(); jwtContext = firstPassConsumer.process(jwt); Assert.assertThat("eh", equalTo(jwtContext.getJwtClaims().getStringClaimValue("message"))); consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420219099)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); context = consumer.process(jwt); jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); consumer.processContext(jwtContext); jwt = "eyJ6aXAiOiJERUYiLCJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTEyOENCQy1IUzI1NiIsImN0eSI6ImpXdCIsImVwayI6eyJrdHkiOiJFQyIsIngiOiJmYTlJVEh6cEROSG1uV2NDSDVvWGtFYjJ1SncwTXNOU2stQjdFb091WUEwIiwieSI6IkZ1U0RaVXdmb1EtQXB6dEFQRUc1dk40QmZRR2sxWnRMT0FzM1o0a19obmciLCJjcnYiOiJQLTI1NiJ9fQ..FmuORwLWIoNBbRh0XcBzJQ.pSr58DMuRstF3A6xj24yM4KvNgWxtb_QDKuldesTCD-R00BNFwIVx4F51VL5DwR54ITgBZBKdAT4pN6eM-td5VrWBCnSWxFjNrBoDnnRkDfFgq8OjOBaR7k_4zUk41bBikDZ0JOQDWuiaODYBk7PWq0mgotvLPbJ9oc7zfp6lbHqaYXjbzfuD56W_kDYO8zSjiZUGLcYgJDYnO3F8K-QhP02v-0OEpAGrm5SKKV3Txk.Ecojfru8KbkqIw4QvYS3qA"; firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .setEnableLiberalContentTypeHandling() .build(); jwtContext = firstPassConsumer.process(jwt); consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420220122)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); context = consumer.process(jwt); jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); consumer.processContext(jwtContext); } @Test public void ctyRoundTrip() throws JoseException, InvalidJwtException, MalformedClaimException { JsonWebKeySet jwks = new JsonWebKeySet("{\"keys\":[" + "{\"kty\":\"oct\",\"kid\":\"hk1\",\"alg\":\"HS256\",\"k\":\"RYCCH0Qai_7Clk_GnfBElTFIa5VJP3pJUDd8g5H0PKs\"}," + "{\"kty\":\"oct\",\"kid\":\"ek1\",\"alg\":\"A128KW\",\"k\":\"Qi38jqNMENlgKaVRbhKWnQ\"}]}"); SimpleJwkFilter filter = new SimpleJwkFilter(); filter.setKid("hk1", false); JsonWebKey hmacKey = filter.filter(jwks.getJsonWebKeys()).iterator().next(); filter = new SimpleJwkFilter(); filter.setKid("ek1", false); JsonWebKey encKey = filter.filter(jwks.getJsonWebKeys()).iterator().next(); JwtClaims claims = new JwtClaims(); claims.setSubject("subject"); claims.setAudience("audience"); claims.setIssuer("issuer"); claims.setExpirationTimeMinutesInTheFuture(10); claims.setNotBeforeMinutesInThePast(5); claims.setGeneratedJwtId(); JsonWebSignature jws = new JsonWebSignature(); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); jws.setPayload(claims.toJson()); jws.setKey(hmacKey.getKey()); jws.setKeyIdHeaderValue(hmacKey.getKeyId()); String innerJwt = jws.getCompactSerialization(); JsonWebEncryption jwe = new JsonWebEncryption(); jwe.setAlgorithmHeaderValue(KeyManagementAlgorithmIdentifiers.A128KW); jwe.setEncryptionMethodHeaderParameter(ContentEncryptionAlgorithmIdentifiers.AES_128_CBC_HMAC_SHA_256); jwe.setKey(encKey.getKey()); jwe.setKeyIdHeaderValue(encKey.getKeyId()); jwe.setContentTypeHeaderValue("JWT"); jwe.setPayload(innerJwt); String jwt = jwe.getCompactSerialization(); JwtConsumer jwtConsumer = new JwtConsumerBuilder() .setExpectedIssuer("issuer") .setExpectedAudience("audience") .setRequireSubject() .setRequireExpirationTime() .setDecryptionKey(encKey.getKey()) .setVerificationKey(hmacKey.getKey()) .build(); JwtContext jwtContext = jwtConsumer.process(jwt); Assert.assertThat("subject", equalTo(jwtContext.getJwtClaims().getSubject())); List<JsonWebStructure> joseObjects = jwtContext.getJoseObjects(); JsonWebStructure outerJsonWebObject = joseObjects.get(joseObjects.size() - 1); Assert.assertTrue(outerJsonWebObject instanceof JsonWebEncryption); Assert.assertThat("JWT", equalTo(outerJsonWebObject.getContentTypeHeaderValue())); Assert.assertThat("JWT", equalTo(outerJsonWebObject.getHeader(HeaderParameterNames.CONTENT_TYPE))); Assert.assertThat("JWT", equalTo(outerJsonWebObject.getHeaders().getStringHeaderValue(HeaderParameterNames.CONTENT_TYPE))); JsonWebStructure innerJsonWebObject = joseObjects.get(0); Assert.assertTrue(innerJsonWebObject instanceof JsonWebSignature); } @Test public void nestedBackwards() throws Exception { // a JWT that's a JWE inside a JWS, which is unusual but legal String jwt = "eyJjdHkiOiJKV1QiLCJhbGciOiJFUzI1NiJ9.ZXlKNmFYQWlPaUpFUlVZaUxDSmhiR2NpT2lKRlEwUklMVVZUSWl3aVpXNWpJam9pUVRFeU9FTkNReTFJVXpJMU5pSXNJbVZ3YXlJNmV5SnJkSGtpT2lKRlF5SXNJbmdpT2lKYVIwczNWbkZOUzNKV1VGcEphRXc1UkRsT05tTnpNV0ZhYlU5MVpqbHlUWGhtUm1kRFVURjFaREJuSWl3aWVTSTZJbTAyZW01VlQybEtjMnMwTlRaRVVWb3RjVTEzZEVKblpqQkRNVXh4VDB0dk5HYzNjakpGUTBkQllUZ2lMQ0pqY25ZaU9pSlFMVEkxTmlKOWZRLi4xSndRWThoVFJVczdUMFNpOWM1VE9RLkFOdUpNcFowTU1KLTBrbVdvVHhvRDlxLTA1YUxrMkpvRzMxLXdVZ01ZakdaaWZiWG96SDEzZGRuaXZpWXNtenhMcFdVNU1lQnptN3J3TExTeUlCdjB3LmVEb1lFTEhFWXBnMHFpRzBaeHUtWEE.NctFu0mNSArPnMXakIMQKagWyU4v7733dNhDNK3KwiFP2MahpfaH0LA7x0knRk0sjASRxDuEIW6UZGfPTFOjkw"; PublicJsonWebKey sigKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"HVDkXtG_j_JQUm_mNaRPSbsEhr6gdK0a6H4EURypTU0\",\"y\":\"NxdYFS2hl1w8VKf5UTpGXh2YR7KQ8gSBIHu64W0mK8M\",\"crv\":\"P-256\",\"d\":\"ToqTlgJLhI7AQYNLesI2i-08JuaYm2wxTCDiF-VxY4A\"}"); PublicJsonWebKey encKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"7kaETHB4U9pCdsErbjw11HGv8xcQUmFy3NMuBa_J7Os\",\"y\":\"FZK-vSMpKk9gLWC5wdFjG1W_C7vgJtdm1YfNPZevmCw\",\"crv\":\"P-256\",\"d\":\"spOxtF0qiKrrCTaUs_G04RISjCx7HEgje_I7aihXVMY\"}"); JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420226222)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); JwtContext ctx = consumer.process(jwt); consumer.processContext(jwtContext); for (JwtContext context : new JwtContext[] {ctx, jwtContext}) { JwtClaims jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); List<JsonWebStructure> joseObjects = context.getJoseObjects(); assertThat(2, equalTo(joseObjects.size())); assertTrue(joseObjects.get(0) instanceof JsonWebEncryption); assertTrue(joseObjects.get(1) instanceof JsonWebSignature); } } @Test public void tripleNesting() throws Exception { // a JWT that's a JWE inside a JWS, which is unusual but legal String jwt = "eyJhbGciOiJQQkVTMi1IUzI1NitBMTI4S1ciLCJlbmMiOiJBMTI4Q0JDLUhTMjU2IiwiY3R5Ijoiand0IiwicDJjIjo4MTkyLCJwMnMiOiJiWE13N0F3YUtITWZ4cWRNIn0.5Qo4mtR0E6AnTsiq-hcH9_RJoZwmWiMl0se_riEr1sdz2IXA-vCkrw.iA7lBH3Tzs4uIJVtekZEfg.jkdleffS8GIen_xt_g3QHAc0cat6UBAODpv6WLJ_ytMw-h0dtV0F77d7k1oWxBQ68Ff83v3Pxsyiqf6K9BQUVyzmI6rZafDStQm1IdTS-rvsiB4qDrx9juMqzu1udPy5N7JGs_CDV31Ky3fWEveAy4kBX46-axdyhP5XFg6xMfJ614mcf_bfo5hIJByZFwqNolNwsHLUTuiUBa4Mdg-tfob692-ox8B2c6w4RqRrLOVA_M3gENoxbLIJGL0WL1OkdQb7fyEsaMzR3urJL1t8LI5Q1pD8wjbiv4VKvc1BqoJSM0h9mLm_GNhTdQGPmevBwWVZ1k1tWJjQw0nU2eFZJi1STDGzK1GRDBD91rZSYD763WHADbxcqxrcri92jtyZrxB22pJXEgkpMlUkxqjCFATV20WSM8aSW4Od9Of9MCnrNTIby_3np4zEq5EpFEkVmH-9PzalKWo5gOHR8Zqnldyz6xcOamP34o_lEh5ddEwAFjGTlJWrDkssMeBjOog3_CXHZhutD9IfCKmIHu6Wk10XkELamiKPmNCe_CMDEdx6o6LrCtfyheOfgpDaZeZZc3Y-TF1o9J3RmCZqB-oHgLEc9mZQrGU6r5UZ4lYyfrAJl2y7Rya87LBGsUjSs7SuIyQKYkH5ek8j_9rhm_3nZhivDchkiWx5J3Pzso5Q3p6hjUfvhpgO2ywtnii45iINi5UAL6O8xqUhxZUJSoMxt1XKwx92bmC9kOoF1ljLm-w.VP_VFGef9SGdxoHCZ01FxQ"; PublicJsonWebKey sigKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"HVDkXtG_j_JQUm_mNaRPSbsEhr6gdK0a6H4EURypTU0\",\"y\":\"NxdYFS2hl1w8VKf5UTpGXh2YR7KQ8gSBIHu64W0mK8M\",\"crv\":\"P-256\",\"d\":\"ToqTlgJLhI7AQYNLesI2i-08JuaYm2wxTCDiF-VxY4A\"}"); final PublicJsonWebKey encKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"7kaETHB4U9pCdsErbjw11HGv8xcQUmFy3NMuBa_J7Os\",\"y\":\"FZK-vSMpKk9gLWC5wdFjG1W_C7vgJtdm1YfNPZevmCw\",\"crv\":\"P-256\",\"d\":\"spOxtF0qiKrrCTaUs_G04RISjCx7HEgje_I7aihXVMY\"}"); final Key passwordIsTaco = new PbkdfKey("taco"); DecryptionKeyResolver decryptionKeyResolver = new DecryptionKeyResolver() { @Override public Key resolveKey(JsonWebEncryption jwe, List<JsonWebStructure> nestingContext) throws UnresolvableKeyException { return nestingContext.isEmpty() ? passwordIsTaco : encKey.getPrivateKey(); } }; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKeyResolver(decryptionKeyResolver) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKeyResolver(decryptionKeyResolver) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420229816)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); JwtContext ctx = consumer.process(jwt); consumer.processContext(jwtContext); for (JwtContext context : new JwtContext[] {ctx, jwtContext}) { JwtClaims jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); List<JsonWebStructure> joseObjects = context.getJoseObjects(); assertThat(3, equalTo(joseObjects.size())); assertTrue(joseObjects.get(2) instanceof JsonWebEncryption); assertTrue(joseObjects.get(1) instanceof JsonWebEncryption); assertTrue(joseObjects.get(0) instanceof JsonWebSignature); } } @Test public void testOnlyEncrypted() throws Exception { // there are legitimate cases where a JWT need only be encrypted but the majority of time a mac'd or signed JWS is needed // by default the JwtConsumer should not accept a JWE only JWT to protect against cases where integrity protection might // be accidentally inferred PublicJsonWebKey sigKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"HVDkXtG_j_JQUm_mNaRPSbsEhr6gdK0a6H4EURypTU0\",\"y\":\"NxdYFS2hl1w8VKf5UTpGXh2YR7KQ8gSBIHu64W0mK8M\",\"crv\":\"P-256\",\"d\":\"ToqTlgJLhI7AQYNLesI2i-08JuaYm2wxTCDiF-VxY4A\"}"); PublicJsonWebKey encKey = PublicJsonWebKey.Factory.newPublicJwk("{\"kty\":\"EC\",\"x\":\"7kaETHB4U9pCdsErbjw11HGv8xcQUmFy3NMuBa_J7Os\",\"y\":\"FZK-vSMpKk9gLWC5wdFjG1W_C7vgJtdm1YfNPZevmCw\",\"crv\":\"P-256\",\"d\":\"spOxtF0qiKrrCTaUs_G04RISjCx7HEgje_I7aihXVMY\"}"); String jwt = "eyJ6aXAiOiJERUYiLCJhbGciOiJFQ0RILUVTIiwiZW5jIjoiQTEyOENCQy1IUzI1NiIsImVwayI6eyJrdHkiOiJFQyIsIngiOiJ3UXdIa1RUci1tUFpaZURDYU8wRjEwNi1NTkg0aFBfX0xrTW5MaElkTVhVIiwieSI6IkF4Ul9VNW1EN1FhMnFia3R5WS0tU1dsMng0N1gxTWJ5S2Rxb1JteUFVS1UiLCJjcnYiOiJQLTI1NiJ9fQ..oeYI_sIoU1LWIUw3z16V_g.J_BlS-qDJnAqw9wzngIQQioTbTGbyFnorVRq1WTO3leFXKKuBmqoWPHqoVSZdzsVeiFkI-F1DesY489MltwGYg.egjQH2w4oHpMgfjg8saXxQ"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); Assert.assertThat("eh", equalTo(jwtContext.getJwtClaims().getStringClaimValue("message"))); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420219088)) .setExpectedAudience("canada") .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtContext, consumer); consumer = new JwtConsumerBuilder() .setDecryptionKey(encKey.getPrivateKey()) .setVerificationKey(sigKey.getPublicKey()) .setEvaluationTime(NumericDate.fromSeconds(1420219088)) .setExpectedAudience("canada") .setDisableRequireSignature() .setExpectedIssuer("usa") .setRequireExpirationTime() .build(); JwtContext context = consumer.process(jwt); JwtClaims jwtClaims = context.getJwtClaims(); Assert.assertThat("eh", equalTo(jwtClaims.getStringClaimValue("message"))); consumer.processContext(jwtContext); } @Test public void encOnlyWithIntegrityIssues() throws Exception { String jwt = "eyJhbGciOiJkaXIiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2In0..zWNzKpA-QA0BboVl02nz-A.oSy4V6cQ6EnuIMyazDCqc9jEZMC7k8LwLKkrC12Pf-wpFRyDtQjGdIZ_Ndq9JMAnrCbx0bgFSxjKISbXbcnHiA.QsGX3JhHP1Pwy4zQ8Ha9FQ"; JsonWebKey jsonWebKey = JsonWebKey.Factory.newJwk("{\"kty\":\"oct\",\"k\":\"30WEMkbhwHPBkg_fIfm_4GuzIz5pPZB7_BSfI3dHbbQ\"}"); DecryptionKeyResolver decryptionKeyResolver = new JwksDecryptionKeyResolver(Collections.singletonList(jsonWebKey)); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKeyResolver(decryptionKeyResolver) .setEvaluationTime(NumericDate.fromSeconds(1420230888)) .setExpectedAudience("me") .setExpectedIssuer("me") .setRequireExpirationTime() .setDisableRequireSignature() .build(); JwtClaims jwtClaims = consumer.processToClaims(jwt); Assert.assertThat("value", equalTo(jwtClaims.getStringClaimValue("name"))); // change some things and make sure it fails jwt = "eyJhbGciOiJkaXIiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2In0..zWNzKpA-QA0BboVl02nz-A.eyJpc3MiOiJtZSIsImF1ZCI6Im1lIiwiZXhwIjoxNDIwMjMxNjA2LCJuYW1lIjoidmFsdWUifQ.QsGX3JhHP1Pwy4zQ8Ha9FQ"; SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, consumer); jwt = "eyJhbGciOiJkaXIiLCJlbmMiOiJBMTI4Q0JDLUhTMjU2In0..zWNzKpA-QA0BboVl02nz-A.u1D7JCpDFeRl69G1L-h3IRrmcOXiWLnhr23ugO2kkDqKVNcO1YQ4Xvl9Sag4aYOnkqUbqe6Wdz8KK3d9q178tA.QsGX3JhHP1Pwy4zQ8Ha9FQ"; SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, consumer); } @Test public void hmacWithResolver() throws Exception { String jwt = "eyJraWQiOiJfMyIsImFsZyI6IkhTMjU2In0" + ".eyJpc3MiOiJmcm9tIiwiYXVkIjpbInRvIiwib3J5b3UiXSwiZXhwIjoxNDI0MDQxNTc0LCJzdWIiOiJhYm91dCJ9" + ".jgC4hWHd1C4kkYiVIbung4vg44bQOEv3JkGupnRrYDk"; JwtConsumer firstPassConsumer = new JwtConsumerBuilder() .setSkipAllValidators() .setDisableRequireSignature() .setSkipSignatureVerification() .build(); JwtContext jwtContext = firstPassConsumer.process(jwt); String json = "{\"keys\":[" + "{\"kty\":\"oct\",\"kid\":\"_1\", \"k\":\"9g99cnHIc3kMeR_JbwmAojgUlHIH0GoKz7COz9719x1\"}," + "{\"kty\":\"oct\",\"kid\":\"_2\", \"k\":\"vvlp7BacRr-a9pOKK7BKxZo88u6cY2o9Lz6-P--_01p\"}," + "{\"kty\":\"oct\",\"kid\":\"_3\",\"k\":\"a991cccx6-7rP5p91nnHi3K-jcDjsFh1o34bIeWA081\"}]}"; JsonWebKeySet jsonWebKeySet = new JsonWebKeySet(json); JwtConsumer consumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1424041569)) .setExpectedAudience("to") .setExpectedIssuer("from") .setRequireSubject() .setVerificationKeyResolver(new JwksVerificationKeyResolver(jsonWebKeySet.getJsonWebKeys())) .setRequireExpirationTime() .build(); JwtContext ctx = consumer.process(jwt); consumer.processContext(jwtContext); for (JwtContext context : new JwtContext[] {ctx, jwtContext}) { assertThat(1, equalTo(context.getJoseObjects().size())); assertThat("about", equalTo(context.getJwtClaims().getSubject())); } } @Test public void ifItWereAnIdTokenHint() throws InvalidJwtException, JoseException, MalformedClaimException { // an ID Token and JWKS from NRI-phpOIDC-Implicit-10-Apr-2015 http://openid.net/certification/ just 'cause it's nice to have JWT content produced elsewhere // this test was intended to explore some concepts around https://bitbucket.org/b_c/jose4j/issue/19 (skipping date aud checks and also an expected subject value) String keys = "{\n" + "\"keys\": [\n" + " {\n" + " \"e\": \"AQAB\",\n" + " \"kid\": \"PHPOP-00\",\n" + " \"kty\": \"RSA\",\n" + " \"n\": \"lqjtB9h9j1yl5Y3pmyt0qRUuGnCSn6HWFXHdlUPwt2xanA8aP5MN5dlRJCVR_sR08pb4taIerowTZ7ShdSaWqkGAqwgJYhM0Nyvj_GO1XIYfWl2u49U8j1s" + "EFGDvNMNYQcX4RwaLU3lbavlYVHx_0W5gvw6XfEvkdWkPEbO3Ik1_cCySBxbaCxKszFP_yKCfRBbSQzrz_ZV6PMU6B0_OSknD7BRaogABdxPu79mUU-_Fk1XSA4gdRd5ccnX" + "6lXiF0ePiI2x7s-RdyrMMT4HrXMYlO7VxraUvK61bNOKuRqoV6K-OdJUbcgziRe0nEidgyOgRTXRgnRkyCp2eMkKXFw\"\n" + "}]}"; String jwt = "eyJhbGciOiJSUzI1NiIsImprdSI6Imh0dHBzOlwvXC9jb25uZWN0Lm9wZW5pZDQudXM6NTQ0M1wvcGhwT3BcL29wLmp3ayIsImtpZCI6IlBIUE9QLTAwIn0" + ".eyJpc3MiOiJodHRwczpcL1wvY29ubmVjdC5vcGVuaWQ0LnVzOjU0NDNcL3BocE9wIiwic3ViIjoiZDRjMTEzOTE3NTA1MmRkNTE1ZmE5MzU4YTVjMmQ0YjRhNGF" + "kYTM2ZDgxNWJiODc4OWEwNDFhNDFmZmZmZGNlYSIsImF1ZCI6WyJSLVJ1ZmpTRFZHQ0dmZFRtSW9iZjJRIl0sImV4cCI6MTQyODQ0NjkwNSwiaWF0IjoxNDI4NDQ" + "2NjA1LCJub25jZSI6IlB1enhKSWtxdjZ6ciIsImF1dGhfdGltZSI6MTQyODQ0NTAxMH0" + ".WYh2Zn3oNys7VIa6bCCw9LcIPD95W5YP4XKiIBcY5gz0Ti3fiwslsbm1wGJB-nJA9AXi1cIywsZs94l7BKJdNdUiJQUuSFRuyHCCDY--7iELwWFIGXSzFkwjUsR" + "AAq9sMWqBO3qm01ganUH4Q9wFuSa-d6GA8ybMy3ymfV1OyNzVpTUqi9HWrRlAw0jUoTVGZA4p7qMzXgZfNF3pyankL2mmeb34ZhFk8S2IAZKFhRKuo0ORJRJ6_Fu" + "9Eq0DvfrvX1RJpA3MKkJ8aiD5N4fcUy7vzgQRCNqsgEaqC-i4-vlNN5uyKP5IUZW-hqh-c6rXVrM-8hpZtCM_Z76eRfv1VQ"; // sub=d4c1139175052dd515fa9358a5c2d4b4a4ada36d815bb8789a041a41ffffdcea // aud=[R-RufjSDVGCGfdTmIobf2Q] // exp=1428446905 -> Apr 7, 2015 4:48:25 PM MDT JwtConsumer consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(new JwksVerificationKeyResolver(new JsonWebKeySet(keys).getJsonWebKeys())) .setAllowedClockSkewInSeconds(Integer.MAX_VALUE) .setExpectedSubject("d4c1139175052dd515fa9358a5c2d4b4a4ada36d815bb8789a041a41ffffdcea") .setExpectedAudience("R-RufjSDVGCGfdTmIobf2Q") .build(); JwtContext jwtCtx = consumer.process(jwt); assertThat(jwtCtx.getJwtClaims().getSubject(), equalTo("d4c1139175052dd515fa9358a5c2d4b4a4ada36d815bb8789a041a41ffffdcea")); consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(new JwksVerificationKeyResolver(new JsonWebKeySet(keys).getJsonWebKeys())) .setAllowedClockSkewInSeconds(Integer.MAX_VALUE) .setExpectedSubject("NOOOOOOOOOOOOOOOOPE") .setExpectedAudience("R-RufjSDVGCGfdTmIobf2Q") .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, consumer); consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(new JwksVerificationKeyResolver(new JsonWebKeySet(keys).getJsonWebKeys())) .setAllowedClockSkewInSeconds(Integer.MAX_VALUE) .setSkipDefaultAudienceValidation() .build(); jwtCtx = consumer.process(jwt); assertThat(jwtCtx.getJwtClaims().getAudience().iterator().next(), equalTo("R-RufjSDVGCGfdTmIobf2Q")); consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(new JwksVerificationKeyResolver(new JsonWebKeySet(keys).getJsonWebKeys())) .setAllowedClockSkewInSeconds(Integer.MAX_VALUE) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, consumer); consumer = new JwtConsumerBuilder() .setVerificationKeyResolver(new JwksVerificationKeyResolver(new JsonWebKeySet(keys).getJsonWebKeys())) .setAllowedClockSkewInSeconds(Integer.MAX_VALUE) .setExpectedAudience("no", "nope", "no way jose") .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, consumer); } @Test public void relaxDecryptionKeyValidation() throws Exception { // PublicJsonWebKey rsaJsonWebKey = RsaJwkGenerator.generateJwk(1024); // rsaJsonWebKey.setKeyId("acc"); // OctetSequenceJsonWebKey octetSequenceJsonWebKey = OctJwkGenerator.generateJwk(256); // octetSequenceJsonWebKey.setKeyId("ltc"); // // JsonWebKeySet jwks = new JsonWebKeySet(rsaJsonWebKey, octetSequenceJsonWebKey); // System.out.println(jwks.toJson(JsonWebKey.OutputControlLevel.INCLUDE_PRIVATE)); // // JwtClaims jwtClaims = new JwtClaims(); // jwtClaims.setAudience("a"); // jwtClaims.setIssuer("i"); // jwtClaims.setExpirationTimeMinutesInTheFuture(10); // jwtClaims.setSubject("s"); // jwtClaims.setNotBeforeMinutesInThePast(1); // // System.out.println(jwtClaims); // // JsonWebSignature jws = new JsonWebSignature(); // jws.setPayload(jwtClaims.toJson()); // jws.setKey(octetSequenceJsonWebKey.getKey()); // jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); // jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); // String jwsCompactSerialization = jws.getCompactSerialization(); // // System.out.println(jwsCompactSerialization); // // JsonWebEncryption jwe = new JsonWebEncryption(); // jwe.setAlgorithmHeaderValue(KeyManagementAlgorithmIdentifiers.RSA_OAEP); // jwe.setEncryptionMethodHeaderParameter(ContentEncryptionAlgorithmIdentifiers.AES_128_CBC_HMAC_SHA_256); // jwe.setKey(rsaJsonWebKey.getPublicKey()); // jwe.setDoKeyValidation(false); // jwe.setKeyIdHeaderValue(rsaJsonWebKey.getKeyId()); // jwe.setPayload(jwsCompactSerialization); // jwe.setContentTypeHeaderValue("JWT"); // String jweCompactSerialization = jwe.getCompactSerialization(); // // System.out.println(jweCompactSerialization); String jwt = "eyJhbGciOiJSU0EtT0FFUCIsImVuYyI6IkExMjhDQkMtSFMyNTYiLCJraWQiOiJhY2MiLCJjdHkiOiJKV1QifQ" + ".KrukndaF2sHb3Y0r311rrYmCrXco-99ZIQ3iLjvCVbbow5MppRTK4DPJUShcndfcIVIFXMYSLGvIJwf39yZRJJ_EvBFnqhOUeCAsUHLGO1yxoQ619jmSh4bCaIicLYeivKaVSQN4Ezc5fvg-Nnv6TBIIgHuWMDU2Ztd96DJRokc" + ".wMg2Eb8izCOUnACqdrcPQA" + ".quFKSN7xQoMJzaYFBVwykQZ8zB3hpW8HtK7pm-4Ggzorno_K-eBQ7fXjRmJ1Jw-kCcmUa8flpnQqpL9jurtlz7DC1ABe0vm2ZkHoJluB6QeSr60Y9rP7kyy_rd3blXT_7t6Wgowo8MumXrrUUxxEQJgXvCmKbd-Rw9sK5jAHEug3zztLXHOX0O0QoxDzTJOsSRtodsu7bTJa-ADvPmK9e0Xp06NRqvx7WuJGKlq3cwQ" + ".DL6yaCdiOUcViN-eZVIwOA"; JsonWebKeySet jwks = new JsonWebKeySet("{\"keys\":[" + "{\"kty\":\"RSA\",\"kid\":\"acc\",\"n\":\"pkRsP8W09WkolK85OQlq6XTQEoRsulNY6vQsJMluOPErKIOJp6K4cgg5n6Y9NXnswUt0n5suxqlKDHmRRQgU9BGBcqptmCog-0KQKvTqUQJmtDviRTu1aO12Zz_ATEszf8rvPt795xaFvDycCA2YS87lkdIET2ap2qrHCfeWlkk\"," + "\"e\":\"AQAB\",\"d\":\"MnNknV0ycZz9EVCx_lqbNEebs2K3UzpjKrf4hRkR9vlG7T4skM9RRFi2k3jv7cAXVPe-ZYfDA8jujSZ-LAItyPwIO-pbtIeXrKQtvLgP4igsfDMCmvRvNmUuV93Gy9fMBVhEGK_xxVQtJWbdgZsk_v2kMUkX4W2WS_Mbo3YHCwE\"," + "\"p\":\"2BBdLVoi6DP-5JJyTCxdBbaKUjQvVPHXlcqNdaKf2949Nze7IpLoPtkCTVVlTtEvAhYGxuI1i101fK4hGW_IcQ\",\"q\":\"xP_Mg7_SNlzg0eyCzK09mKdagOFfoHKIMoJb9qzOAENnIjt67hpxd7x2h45pX4HM7ObU_1OAl9IYvTqUPhPXWQ\"," + "\"dp\":\"ljx6rchZMWDGQiVaeID4hbpx38sNhmFLaIqZZkyYH4gexMBpzRadiuXWZfOVKALoTukF-VDdrnQ3duSVe1xw4Q\",\"dq\":\"Q23K8s2VhkYELdZmbuhdTQL7V2HM-X46YA9-qtA7MpvfkTgKu7URYYqAh6WXK7miCvR3s21BdrXTAfIrC5R_AQ\"," + "\"qi\":\"iaHGlWvmsQvWyZ5GdAar0WOJi_CNTGCzv9SaVnA83I1ewXvKejYMnzLjetPbopxE2enVicnvjlrDaihJbZ5TYA\"}" + ",{\"kty\":\"oct\",\"kid\":\"ltc\",\"k\":\"vJRXGLSNo-jggR8o5yxjzrm_82w-35rpnve0JzEr2sw\"}" + "]}"); VerificationKeyResolver verificationKeyResolver = new JwksVerificationKeyResolver(jwks.getJsonWebKeys()); DecryptionKeyResolver decryptionKeyResolver = new JwksDecryptionKeyResolver(jwks.getJsonWebKeys()); JwtConsumer jwtConsumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1432324168)) .setExpectedAudience("a") .setExpectedIssuer("i") .setExpectedSubject("s") .setRequireExpirationTime() .setVerificationKeyResolver(verificationKeyResolver) .setDecryptionKeyResolver(decryptionKeyResolver) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtConsumer); // fail b/c the RSA key is too small jwtConsumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1432324168)) .setExpectedAudience("a") .setExpectedIssuer("i") .setExpectedSubject("s") .setRequireExpirationTime() .setVerificationKeyResolver(verificationKeyResolver) .setDecryptionKeyResolver(decryptionKeyResolver) .setRelaxDecryptionKeyValidation() // be more relaxed here to allow the 1024 bit RSA key .build(); JwtClaims claims = jwtConsumer.processToClaims(jwt); assertThat(claims.getClaimsMap().size(), equalTo(5)); } @Test public void relaxVerificationKeyValidation() throws Exception { // OctetSequenceJsonWebKey octetSequenceJsonWebKey = OctJwkGenerator.generateJwk(128); // octetSequenceJsonWebKey.setKeyId("esc"); // // JsonWebKeySet jwks = new JsonWebKeySet(octetSequenceJsonWebKey); // System.out.println(jwks.toJson(JsonWebKey.OutputControlLevel.INCLUDE_PRIVATE)); // // JwtClaims jwtClaims = new JwtClaims(); // jwtClaims.setAudience("a"); // jwtClaims.setIssuer("i"); // jwtClaims.setExpirationTimeMinutesInTheFuture(10); // jwtClaims.setSubject("s"); // jwtClaims.setNotBeforeMinutesInThePast(1); // // System.out.println(jwtClaims); // // JsonWebSignature jws = new JsonWebSignature(); // jws.setPayload(jwtClaims.toJson()); // jws.setKey(octetSequenceJsonWebKey.getKey()); // jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); // jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); // jws.setDoKeyValidation(false); // String jwsCompactSerialization = jws.getCompactSerialization(); // // System.out.println(jwsCompactSerialization); String jwt = "eyJraWQiOiJlc2MiLCJhbGciOiJIUzI1NiJ9.eyJhdWQiOiJhIiwiaXNzIjoiaSIsImV4cCI6MTQzMjMyNTQ5Niwic3ViIjoicyIsIm5iZiI6MTQzMjMyNDgzNn0.16LpzAZyBcokZ4aUaXHn5yN0xQ1zpmLyJVFHu6nH1zY"; JsonWebKeySet jwks = new JsonWebKeySet("{\"keys\":[{\"kty\":\"oct\",\"kid\":\"esc\",\"k\":\"dbwsHvQsXoZiWpulhZA8dg\"}]}"); VerificationKeyResolver verificationKeyResolver = new JwksVerificationKeyResolver(jwks.getJsonWebKeys()); JwtConsumer jwtConsumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1432324836)) .setExpectedAudience("a") .setExpectedIssuer("i") .setExpectedSubject("s") .setRequireExpirationTime() .setVerificationKeyResolver(verificationKeyResolver) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtConsumer); // fail b/c the HMAC key is too small jwtConsumer = new JwtConsumerBuilder() .setEvaluationTime(NumericDate.fromSeconds(1432324836)) .setExpectedAudience("a") .setExpectedIssuer("i") .setExpectedSubject("s") .setRequireExpirationTime() .setVerificationKeyResolver(verificationKeyResolver) .setRelaxVerificationKeyValidation() // be more relaxed here to allow the smaller key .build(); JwtClaims claims = jwtConsumer.processToClaims(jwt); assertThat(claims.getClaimsMap().size(), equalTo(5)); } @Test public void skipAllDefaultValidators() throws Exception { // OctetSequenceJsonWebKey octetSequenceJsonWebKey = OctJwkGenerator.generateJwk(256); // octetSequenceJsonWebKey.setKeyId("xxc"); // // JsonWebKeySet jwks = new JsonWebKeySet(octetSequenceJsonWebKey); // System.out.println(jwks.toJson(JsonWebKey.OutputControlLevel.INCLUDE_PRIVATE)); // // JwtClaims jwtClaims = new JwtClaims(); // jwtClaims.setAudience("a"); // jwtClaims.setIssuer("i"); // jwtClaims.setExpirationTimeMinutesInTheFuture(10); // jwtClaims.setSubject("s"); // jwtClaims.setNotBeforeMinutesInThePast(1); // // System.out.println(jwtClaims); // // JsonWebSignature jws = new JsonWebSignature(); // jws.setPayload(jwtClaims.toJson()); // jws.setKey(octetSequenceJsonWebKey.getKey()); // jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); // jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); // jws.setDoKeyValidation(false); // String jwsCompactSerialization = jws.getCompactSerialization(); // // System.out.println(jwsCompactSerialization); String jwt = "eyJraWQiOiJ4eGMiLCJhbGciOiJIUzI1NiJ9.eyJhdWQiOiJhIiwiaXNzIjoiaSIsImV4cCI6MTQzMjMyNzE5NSwic3ViIjoicyIsIm5iZiI6MTQzMjMyNjUzNX0.zfBXCLSysVxY-zT4DNCLXS7IyfKkYv7kCIUKxdIGxdI"; JsonWebKeySet jwks = new JsonWebKeySet("{\"keys\":[{\"kty\":\"oct\",\"kid\":\"xxc\",\"k\":\"7bLZdrROsprHkX75gCjKLeGj4brDf7TFtcr2h1F_nfc\"}]}"); VerificationKeyResolver verificationKeyResolver = new JwksVerificationKeyResolver(jwks.getJsonWebKeys()); JwtConsumer jwtConsumer = new JwtConsumerBuilder() .setVerificationKeyResolver(verificationKeyResolver) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtConsumer); // fail b/c exp and aud jwtConsumer = new JwtConsumerBuilder() .setVerificationKeyResolver(verificationKeyResolver) .setSkipAllDefaultValidators() .build(); JwtClaims claims = jwtConsumer.processToClaims(jwt); // this will work 'cause no claims validation is happening assertThat(claims.getClaimsMap().size(), equalTo(5)); Validator customValidator = new Validator() { @Override public String validate(JwtContext jwtContext) throws MalformedClaimException { return (jwtContext.getJwtClaims().getIssuer().equals("i")) ? "i isn't okay as an issuer" : null; } }; jwtConsumer = new JwtConsumerBuilder() .setVerificationKeyResolver(verificationKeyResolver) .setSkipAllDefaultValidators() .registerValidator(customValidator) .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtConsumer); // make sure fail w/ custom validator b/c setSkipAllDefaultValidators runs any that were registered jwtConsumer = new JwtConsumerBuilder() .setVerificationKeyResolver(verificationKeyResolver) .setSkipAllValidators() .registerValidator(customValidator) .build(); claims = jwtConsumer.processToClaims(jwt); // this will work 'cause no claims validation is happening due to setSkipAllValidators assertThat(claims.getClaimsMap().size(), equalTo(5)); // setSkipAllDefaultValidators makes more sense than setSkipAllValidators but I started with setSkipAllValidators and don't want to change that behaviour and accidentally break someone } @Test public void roundTripWithMoreLiveDateChecks() throws Exception { OctetSequenceJsonWebKey octetSequenceJsonWebKey = OctJwkGenerator.generateJwk(256); octetSequenceJsonWebKey.setKeyId("ltc"); JsonWebKeySet jwks = new JsonWebKeySet(octetSequenceJsonWebKey); JwtClaims jwtClaims = new JwtClaims(); jwtClaims.setAudience("a"); jwtClaims.setIssuer("i"); jwtClaims.setExpirationTimeMinutesInTheFuture(2); jwtClaims.setSubject("s"); jwtClaims.setNotBeforeMinutesInThePast(2); JsonWebSignature jws = new JsonWebSignature(); jws.setPayload(jwtClaims.toJson()); jws.setKey(octetSequenceJsonWebKey.getKey()); jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); String jwt = jws.getCompactSerialization(); VerificationKeyResolver verificationKeyResolver = new JwksVerificationKeyResolver(jwks.getJsonWebKeys()); JwtConsumer jwtConsumer = new JwtConsumerBuilder() .setExpectedAudience("a") .setExpectedIssuer("i") .setExpectedSubject("s") .setRequireExpirationTime() .setVerificationKeyResolver(verificationKeyResolver) .build(); JwtClaims claims = jwtConsumer.processToClaims(jwt); assertThat(claims.getClaimsMap().size(), equalTo(5)); jwtClaims = new JwtClaims(); jwtClaims.setAudience("a"); jwtClaims.setIssuer("i"); jwtClaims.setExpirationTimeMinutesInTheFuture(-1); jwtClaims.setSubject("s"); jwtClaims.setNotBeforeMinutesInThePast(3); jws = new JsonWebSignature(); jws.setPayload(jwtClaims.toJson()); jws.setKey(octetSequenceJsonWebKey.getKey()); jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); jwt = jws.getCompactSerialization(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtConsumer); jwtClaims = new JwtClaims(); jwtClaims.setAudience("a"); jwtClaims.setIssuer("i"); jwtClaims.setExpirationTimeMinutesInTheFuture(-1); jwtClaims.setSubject("s"); jws = new JsonWebSignature(); jws.setPayload(jwtClaims.toJson()); jws.setKey(octetSequenceJsonWebKey.getKey()); jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); jwt = jws.getCompactSerialization(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtConsumer); jwtClaims = new JwtClaims(); jwtClaims.setAudience("a"); jwtClaims.setIssuer("i"); jwtClaims.setExpirationTimeMinutesInTheFuture(20); jwtClaims.setSubject("s"); jwtClaims.setNotBeforeMinutesInThePast(-4); jws = new JsonWebSignature(); jws.setPayload(jwtClaims.toJson()); jws.setKey(octetSequenceJsonWebKey.getKey()); jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); jwt = jws.getCompactSerialization(); SimpleJwtConsumerTestHelp.expectProcessingFailure(jwt, jwtConsumer); jwtClaims = new JwtClaims(); jwtClaims.setAudience("a"); jwtClaims.setIssuer("i"); jwtClaims.setExpirationTimeMinutesInTheFuture(1); jwtClaims.setSubject("s"); jws = new JsonWebSignature(); jws.setPayload(jwtClaims.toJson()); jws.setKey(octetSequenceJsonWebKey.getKey()); jws.setKeyIdHeaderValue(octetSequenceJsonWebKey.getKeyId()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); jwt = jws.getCompactSerialization(); claims = jwtConsumer.processToClaims(jwt); assertThat(claims.getClaimsMap().size(), equalTo(4)); } @Test public void someBasicAudChecks() throws InvalidJwtException { JwtClaims jwtClaims = JwtClaims.parse("{\"aud\":\"example.com\"}"); JwtConsumer jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.com").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org", "example.com", "k8HiI26Y7").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org", "nope", "nada").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"sub\":\"subject\"}"); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience(false, "example.org", "www.example.org").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience(true, "example.org", "www.example.org").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"aud\":[\"example.com\", \"usa.org\", \"ca.ca\"]}"); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org", "some.other.junk").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("usa.org").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("ca.ca").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("ca.ca", "some.other.thing").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("noway", "ca.ca", "some.other.thing").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("usa.org", "ca.ca", "random").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("usa.org", "ca.ca").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("usa.org", "ca.ca", "example.com").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"aud\":[\"example.com\", 47, false]}"); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"aud\":20475}"); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"aud\":{\"aud\":\"example.org\"}}"); jwtConsumer = new JwtConsumerBuilder().setExpectedAudience("example.org").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); } @Test public void someBasicIssChecks() throws InvalidJwtException { JwtClaims jwtClaims = JwtClaims.parse("{\"iss\":\"issuer.example.com\"}"); JwtConsumer jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer(null).build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer(false, null).build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer("issuer.example.com").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer(false, "issuer.example.com").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer("nope.example.com").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"sub\":\"subject\"}"); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer("issuer.example.com").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer(false, "issuer.example.com").build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer(false, null).build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"iss\":[\"issuer1\", \"other.one\", \"meh\"]}"); jwtConsumer = new JwtConsumerBuilder().setExpectedIssuer("issuer.example.com").build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"iss\":[\"issuer1\", \"nope.not\"]}"); jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); } @Test public void someBasicSubChecks() throws InvalidJwtException { JwtClaims jwtClaims = JwtClaims.parse("{\"sub\":\"brian.d.campbell\"}"); JwtConsumer jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setRequireSubject().build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"name\":\"brian.d.campbell\"}"); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"sub\":724729}"); jwtConsumer = new JwtConsumerBuilder().setRequireSubject().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"sub\":{\"values\":[\"one\", \"2\"]}}"); jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); } @Test public void someBasicJtiChecks() throws InvalidJwtException { JwtClaims jwtClaims = JwtClaims.parse("{\"jti\":\"1Y5iLSQfNgcSGt0A4is29\"}"); JwtConsumer jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().setRequireJwtId().build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"notjti\":\"lbZ_mLS6w3xBSlvW6ULmkV-uLCk\"}"); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.goodValidate(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"jti\":55581529751992}"); jwtConsumer = new JwtConsumerBuilder().setRequireJwtId().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); jwtClaims = JwtClaims.parse("{\"jti\":[\"S0w3XbslvW6ULmk0\", \"5iLSQfNgcSGt7A4is\"]}"); jwtConsumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jwtClaims, jwtConsumer); } @Test public void someBasicTimeChecks() throws InvalidJwtException, MalformedClaimException { JwtClaims jcs = JwtClaims.parse("{\"sub\":\"brian.d.campbell\"}"); JwtConsumer consumer = new JwtConsumerBuilder().build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireIssuedAt().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireNotBefore().build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); jcs = JwtClaims.parse("{\"sub\":\"brian.d.campbell\", \"exp\":1430602000}"); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602000)).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602000)).setAllowedClockSkewInSeconds(10).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setEvaluationTime(NumericDate.fromSeconds(1430601000)).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430601000)).setAllowedClockSkewInSeconds(6000).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setEvaluationTime(NumericDate.fromSeconds(1430602002)).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602002)).setAllowedClockSkewInSeconds(1).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602002)).setAllowedClockSkewInSeconds(2).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602002)).setAllowedClockSkewInSeconds(3).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setEvaluationTime(NumericDate.fromSeconds(1430602065)).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602065)).setAllowedClockSkewInSeconds(60).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602065)).setAllowedClockSkewInSeconds(120).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); jcs = JwtClaims.parse("{\"sub\":\"brian.d.campbell\", \"nbf\":1430602000}"); consumer = new JwtConsumerBuilder().setEvaluationTime(NumericDate.fromSeconds(1430602000)).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setEvaluationTime(NumericDate.fromSeconds(1430601999)).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setEvaluationTime(NumericDate.fromSeconds(1430601983)).setAllowedClockSkewInSeconds(30).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setEvaluationTime(NumericDate.fromSeconds(1430601983)).setAllowedClockSkewInSeconds(3000).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); jcs = JwtClaims.parse("{\"sub\":\"brian.d.campbell\", \"nbf\":1430602000, \"iat\":1430602060, \"exp\":1430602600 }"); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setRequireNotBefore().setRequireIssuedAt().setEvaluationTime(NumericDate.fromSeconds(1430602002)).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); jcs = JwtClaims.parse("{\"sub\":\"brian.d.campbell\", \"nbf\":1430603000, \"iat\":1430602060, \"exp\":1430602600 }"); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602002)).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); jcs = JwtClaims.parse("{\"sub\":\"brian.d.campbell\", \"nbf\":1430602000, \"iat\":1430602660, \"exp\":1430602600 }"); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430602002)).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); jcs = JwtClaims.parse("{\"sub\":\"brian.d.campbell\", \"exp\":1430607201}"); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430600000)).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430600000)).setMaxFutureValidityInMinutes(90).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430600000)).setMaxFutureValidityInMinutes(120).build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); consumer = new JwtConsumerBuilder().setRequireExpirationTime().setEvaluationTime(NumericDate.fromSeconds(1430600000)).setMaxFutureValidityInMinutes(120).setAllowedClockSkewInSeconds(20).build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); } @Test public void someBasicChecks() throws InvalidJwtException { JwtClaims jcs = JwtClaims.parse("{\"sub\":\"subject\", \"iss\":\"issuer\", \"aud\":\"audience\"}"); JwtConsumer consumer = new JwtConsumerBuilder().setExpectedAudience("audience").setExpectedIssuer("issuer").build(); SimpleJwtConsumerTestHelp.goodValidate(jcs, consumer); consumer = new JwtConsumerBuilder() .setExpectedAudience("nope") .setExpectedIssuer("no way") .setRequireSubject() .setRequireJwtId() .build(); SimpleJwtConsumerTestHelp.expectValidationFailure(jcs, consumer); } @Test public void testNpeWithNonExtractableKeyDataHS256() throws Exception { byte[] raw = Base64Url.decode("hup76LcA9B7pqrEtqyb4EBg6XCcr9r0iOCFF1FeZiJM"); FakeHsmNonExtractableSecretKeySpec key = new FakeHsmNonExtractableSecretKeySpec(raw, "HmacSHA256"); JwtClaims claims = new JwtClaims(); claims.setExpirationTimeMinutesInTheFuture(5); claims.setSubject("subject"); claims.setIssuer("issuer"); JsonWebSignature jws = new JsonWebSignature(); jws.setPayload(claims.toJson()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.HMAC_SHA256); jws.setKey(key); String jwt = jws.getCompactSerialization(); JwtConsumerBuilder jwtConsumerBuilder = new JwtConsumerBuilder(); jwtConsumerBuilder.setAllowedClockSkewInSeconds(60); jwtConsumerBuilder.setRequireSubject(); jwtConsumerBuilder.setExpectedIssuer("issuer"); jwtConsumerBuilder.setVerificationKey(key); JwtConsumer jwtConsumer = jwtConsumerBuilder.build(); JwtClaims processedClaims = jwtConsumer.processToClaims(jwt); System.out.println(processedClaims); } @Test public void testNpeWithNonExtractableKeyDataAxxxKW() throws Exception { littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.A128KW, ContentEncryptionAlgorithmIdentifiers.AES_128_CBC_HMAC_SHA_256, "mmp7iLc1cB7cQrEtqyb9c1"); littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.A192KW, ContentEncryptionAlgorithmIdentifiers.AES_192_CBC_HMAC_SHA_384, "X--mSrs-JGaf0ulQQFSoJGH0vjrfe_c1"); littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.A256KW, ContentEncryptionAlgorithmIdentifiers.AES_256_CBC_HMAC_SHA_512, "j-DJVQ9ftUV-muUT_-yjP6dB9kuypGeT6lEGpCKOi-c"); } // @Test direct doesn't currently work w/ non extractable keys and will require some deeper changes to treat the CEK as a key rather than bytes public void testNpeWithNonExtractableKeyDataDirect() throws Exception { littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.DIRECT, ContentEncryptionAlgorithmIdentifiers.AES_128_CBC_HMAC_SHA_256, "j-DJVQ9ftUV-muUT_-yjP6dB9kuypGeT6lEGpCKOi-c"); littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.DIRECT, ContentEncryptionAlgorithmIdentifiers.AES_192_CBC_HMAC_SHA_384, "X--mSrs-JGaf0ulQQFSoJGH0vjrfe_c1X--mSrs-JGaf0ulQQFSoJGH0vjrfe_c1"); littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.DIRECT, ContentEncryptionAlgorithmIdentifiers.AES_256_CBC_HMAC_SHA_512, "j-DJVQ9ftUV-muUT_-yjP6dB9kuypGeT6lEGpCKOi-cj-DJVQ9ftUV-muUT_-yjP6dB9kuypGeT6lEGpCKOi-c"); JceProviderTestSupport jceProviderTestSupport = new JceProviderTestSupport(); jceProviderTestSupport.setEncryptionAlgsNeeded(AES_128_GCM, AES_192_GCM, AES_256_GCM); jceProviderTestSupport.runWithBouncyCastleProviderIfNeeded( new JceProviderTestSupport.RunnableTest() { @Override public void runTest() throws Exception { littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.DIRECT, AES_128_GCM, "mmp7iLc1cB7cQrEtqyb9c1"); littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.DIRECT, AES_192_GCM, "X--mSrs-JGaf0ulQQFSoJGH0vjrfe_c1"); littleJweRoundTrip(KeyManagementAlgorithmIdentifiers.DIRECT, AES_256_GCM, "j-DJVQ9ftUV-muUT_-yjP6dB9kuypGeT6lEGpCKOi-c"); } } ); } private void littleJweRoundTrip(String alg, String enc, String b64uKey) throws Exception { byte[] raw = Base64Url.decode(b64uKey); Key key = new FakeHsmNonExtractableSecretKeySpec(raw, "AES"); JwtClaims claims = new JwtClaims(); claims.setExpirationTimeMinutesInTheFuture(5); claims.setSubject("subject"); claims.setIssuer("issuer"); JsonWebEncryption jwe = new JsonWebEncryption(); jwe.setPayload(claims.toJson()); jwe.setAlgorithmHeaderValue(alg); jwe.setEncryptionMethodHeaderParameter(enc); jwe.setKey(key); String jwt = jwe.getCompactSerialization(); JwtConsumerBuilder jwtConsumerBuilder = new JwtConsumerBuilder(); jwtConsumerBuilder.setAllowedClockSkewInSeconds(60); jwtConsumerBuilder.setRequireSubject(); jwtConsumerBuilder.setExpectedIssuer("issuer"); jwtConsumerBuilder.setDecryptionKey(key); jwtConsumerBuilder.setDisableRequireSignature(); JwtConsumer jwtConsumer = jwtConsumerBuilder.build(); JwtClaims processedClaims = jwtConsumer.processToClaims(jwt); Assert.assertThat(processedClaims.getSubject(), equalTo("subject")); } @Test public void testNpeWithNonExtractableKeyDataAxxxGCMKW() throws Exception { JceProviderTestSupport jceProviderTestSupport = new JceProviderTestSupport(); jceProviderTestSupport.setKeyManagementAlgsNeeded(A128GCMKW, A192GCMKW, A256GCMKW); jceProviderTestSupport.setEncryptionAlgsNeeded(AES_128_GCM, AES_192_GCM, AES_256_GCM); jceProviderTestSupport.runWithBouncyCastleProviderIfNeeded( new JceProviderTestSupport.RunnableTest() { @Override public void runTest() throws Exception { littleJweRoundTrip(A128GCMKW, AES_128_GCM, "mmp7iLc1cB7cQrEtqyb9c1"); littleJweRoundTrip(A192GCMKW, AES_192_GCM, "X--mSrs-JGaf0ulQQFSoJGH0vjrfe_c1"); littleJweRoundTrip(A256GCMKW, AES_256_GCM, "j-DJVQ9ftUV-muUT2-yjP6dB9kuypGeT6lEGpCKOi-c"); } } ); } @Test public void customizationCallbacksWithCritHeaders() throws Exception { JwtClaims claims = new JwtClaims(); claims.setSubject("me"); claims.setAudience("a"); claims.setIssuer("i"); claims.setExpirationTimeMinutesInTheFuture(10); JsonWebSignature jws = new JsonWebSignature(); jws.setKey(ExampleEcKeysFromJws.PRIVATE_256); jws.setPayload(claims.toJson()); jws.setAlgorithmHeaderValue(AlgorithmIdentifiers.ECDSA_USING_P256_CURVE_AND_SHA256); jws.setCriticalHeaderNames("fake.meh"); JsonWebEncryption jwe = new JsonWebEncryption(); jwe.setPayload(jws.getCompactSerialization()); jwe.setAlgorithmHeaderValue(KeyManagementAlgorithmIdentifiers.RSA_OAEP); jwe.setEncryptionMethodHeaderParameter(ContentEncryptionAlgorithmIdentifiers.AES_128_CBC_HMAC_SHA_256); jwe.setKey(ExampleRsaKeyFromJws.PUBLIC_KEY); jwe.setContentTypeHeaderValue("jwt"); jwe.setCriticalHeaderNames("fake.blah"); System.out.println(claims); String nestedJwt = jwe.getCompactSerialization(); System.out.println(nestedJwt); JwtConsumer consumer = new JwtConsumerBuilder() .setDecryptionKey(ExampleRsaKeyFromJws.PRIVATE_KEY) .setVerificationKey(ExampleEcKeysFromJws.PUBLIC_256) .setExpectedAudience("a") .setRequireExpirationTime() .build(); SimpleJwtConsumerTestHelp.expectProcessingFailure(nestedJwt, consumer); consumer = new JwtConsumerBuilder() .setDecryptionKey(ExampleRsaKeyFromJws.PRIVATE_KEY) .setVerificationKey(ExampleEcKeysFromJws.PUBLIC_256) .setExpectedAudience("a") .setRequireExpirationTime() .setJwsCustomizer(new JwsCustomizer() { @Override public void customize(JsonWebSignature jws, List<JsonWebStructure> nestingContext) { jws.setKnownCriticalHeaders("fake.meh"); } }) .setJweCustomizer(new JweCustomizer() { @Override public void customize(JsonWebEncryption jwe, List<JsonWebStructure> nestingContext) { jwe.setKnownCriticalHeaders("fake.blah"); } }) .build(); JwtContext ctx = consumer.process(nestedJwt); assertThat(ctx.getJoseObjects().size(), equalTo(2)); assertThat(ctx.getJwtClaims().getSubject(), equalTo("me")); assertThat(ctx.getJwt(), equalTo(nestedJwt)); } }
/* * Copyright 2002-2011 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.test.util; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.springframework.test.util.ReflectionTestUtils.getField; import static org.springframework.test.util.ReflectionTestUtils.invokeGetterMethod; import static org.springframework.test.util.ReflectionTestUtils.invokeMethod; import static org.springframework.test.util.ReflectionTestUtils.invokeSetterMethod; import static org.springframework.test.util.ReflectionTestUtils.setField; import org.junit.Ignore; import org.junit.Test; import org.springframework.test.AssertThrows; import org.springframework.test.util.subpackage.Component; import org.springframework.test.util.subpackage.Person; /** * Unit tests for {@link ReflectionTestUtils}. * * @author Sam Brannen * @author Juergen Hoeller */ @SuppressWarnings("deprecation") public class ReflectionTestUtilsTests { private static final Float PI = new Float((float) 22 / 7); private final Person person = new Person(); private final Component component = new Component(); @Test public void setAndGetFields() throws Exception { // --------------------------------------------------------------------- // Standard setField(person, "id", new Long(99), long.class); setField(person, "name", "Tom"); setField(person, "age", new Integer(42)); setField(person, "eyeColor", "blue", String.class); setField(person, "likesPets", Boolean.TRUE); setField(person, "favoriteNumber", PI, Number.class); assertEquals("ID (private field in a superclass)", 99, person.getId()); assertEquals("name (protected field)", "Tom", person.getName()); assertEquals("age (private field)", 42, person.getAge()); assertEquals("eye color (package private field)", "blue", person.getEyeColor()); assertEquals("'likes pets' flag (package private boolean field)", true, person.likesPets()); assertEquals("'favorite number' (package field)", PI, person.getFavoriteNumber()); assertEquals(new Long(99), getField(person, "id")); assertEquals("Tom", getField(person, "name")); assertEquals(new Integer(42), getField(person, "age")); assertEquals("blue", getField(person, "eyeColor")); assertEquals(Boolean.TRUE, getField(person, "likesPets")); assertEquals(PI, getField(person, "favoriteNumber")); // --------------------------------------------------------------------- // Null - non-primitives setField(person, "name", null, String.class); setField(person, "eyeColor", null, String.class); setField(person, "favoriteNumber", null, Number.class); assertNull("name (protected field)", person.getName()); assertNull("eye color (package private field)", person.getEyeColor()); assertNull("'favorite number' (package field)", person.getFavoriteNumber()); // --------------------------------------------------------------------- // Null - primitives new AssertThrows(IllegalArgumentException.class, "Calling setField() with NULL for a primitive type should throw an IllegalArgumentException.") { public void test() throws Exception { setField(person, "id", null, long.class); } }.runTest(); new AssertThrows(IllegalArgumentException.class, "Calling setField() with NULL for a primitive type should throw an IllegalArgumentException.") { public void test() throws Exception { setField(person, "age", null, int.class); } }.runTest(); new AssertThrows(IllegalArgumentException.class, "Calling setField() with NULL for a primitive type should throw an IllegalArgumentException.") { public void test() throws Exception { setField(person, "likesPets", null, boolean.class); } }.runTest(); } @Test public void invokeSetterAndMethods() throws Exception { // --------------------------------------------------------------------- // Standard - properties invokeSetterMethod(person, "id", new Long(99), long.class); invokeSetterMethod(person, "name", "Tom"); invokeSetterMethod(person, "age", new Integer(42)); invokeSetterMethod(person, "eyeColor", "blue", String.class); invokeSetterMethod(person, "likesPets", Boolean.TRUE); invokeSetterMethod(person, "favoriteNumber", PI, Number.class); assertEquals("ID (protected method in a superclass)", 99, person.getId()); assertEquals("name (private method)", "Tom", person.getName()); assertEquals("age (protected method)", 42, person.getAge()); assertEquals("eye color (package private method)", "blue", person.getEyeColor()); assertEquals("'likes pets' flag (protected method for a boolean)", true, person.likesPets()); assertEquals("'favorite number' (protected method for a Number)", PI, person.getFavoriteNumber()); assertEquals(new Long(99), invokeGetterMethod(person, "id")); assertEquals("Tom", invokeGetterMethod(person, "name")); assertEquals(new Integer(42), invokeGetterMethod(person, "age")); assertEquals("blue", invokeGetterMethod(person, "eyeColor")); assertEquals(Boolean.TRUE, invokeGetterMethod(person, "likesPets")); assertEquals(PI, invokeGetterMethod(person, "favoriteNumber")); // --------------------------------------------------------------------- // Standard - setter methods invokeSetterMethod(person, "setId", new Long(1), long.class); invokeSetterMethod(person, "setName", "Jerry", String.class); invokeSetterMethod(person, "setAge", new Integer(33), int.class); invokeSetterMethod(person, "setEyeColor", "green", String.class); invokeSetterMethod(person, "setLikesPets", Boolean.FALSE, boolean.class); invokeSetterMethod(person, "setFavoriteNumber", new Integer(42), Number.class); assertEquals("ID (protected method in a superclass)", 1, person.getId()); assertEquals("name (private method)", "Jerry", person.getName()); assertEquals("age (protected method)", 33, person.getAge()); assertEquals("eye color (package private method)", "green", person.getEyeColor()); assertEquals("'likes pets' flag (protected method for a boolean)", false, person.likesPets()); assertEquals("'favorite number' (protected method for a Number)", new Integer(42), person.getFavoriteNumber()); assertEquals(new Long(1), invokeGetterMethod(person, "getId")); assertEquals("Jerry", invokeGetterMethod(person, "getName")); assertEquals(new Integer(33), invokeGetterMethod(person, "getAge")); assertEquals("green", invokeGetterMethod(person, "getEyeColor")); assertEquals(Boolean.FALSE, invokeGetterMethod(person, "likesPets")); assertEquals(new Integer(42), invokeGetterMethod(person, "getFavoriteNumber")); // --------------------------------------------------------------------- // Null - non-primitives invokeSetterMethod(person, "name", null, String.class); invokeSetterMethod(person, "eyeColor", null, String.class); invokeSetterMethod(person, "favoriteNumber", null, Number.class); assertNull("name (private method)", person.getName()); assertNull("eye color (package private method)", person.getEyeColor()); assertNull("'favorite number' (protected method for a Number)", person.getFavoriteNumber()); // --------------------------------------------------------------------- // Null - primitives new AssertThrows(IllegalArgumentException.class, "Calling invokeSetterMethod() with NULL for a primitive type should throw an IllegalArgumentException.") { public void test() throws Exception { invokeSetterMethod(person, "id", null, long.class); } }.runTest(); new AssertThrows(IllegalArgumentException.class, "Calling invokeSetterMethod() with NULL for a primitive type should throw an IllegalArgumentException.") { public void test() throws Exception { invokeSetterMethod(person, "age", null, int.class); } }.runTest(); new AssertThrows(IllegalArgumentException.class, "Calling invokeSetterMethod() with NULL for a primitive type should throw an IllegalArgumentException.") { public void test() throws Exception { invokeSetterMethod(person, "likesPets", null, boolean.class); } }.runTest(); } @Test public void invokeMethodWithAutoboxingAndUnboxing() { int difference = invokeMethod(component, "subtract", 5, 2); assertEquals("subtract(5, 2)", 3, difference); } @Ignore("[SPR-8644] findMethod() does not currently support var-args") @Test public void invokeMethodWithPrimitiveVarArgs() { int sum = invokeMethod(component, "add", 1, 2, 3, 4); assertEquals("add(1,2,3,4)", 10, sum); } @Test public void invokeMethodWithPrimitiveVarArgsAsSingleArgument() { int sum = invokeMethod(component, "add", new int[] { 1, 2, 3, 4 }); assertEquals("add(1,2,3,4)", 10, sum); } @Test public void invokeMethodsSimulatingLifecycleEvents() { assertNull("number", component.getNumber()); assertNull("text", component.getText()); // Simulate autowiring a configuration method invokeMethod(component, "configure", new Integer(42), "enigma"); assertEquals("number should have been configured", new Integer(42), component.getNumber()); assertEquals("text should have been configured", "enigma", component.getText()); // Simulate @PostConstruct life-cycle event invokeMethod(component, "init"); // assertions in init() should succeed // Simulate @PreDestroy life-cycle event invokeMethod(component, "destroy"); assertNull("number", component.getNumber()); assertNull("text", component.getText()); } @Test(expected = IllegalStateException.class) public void invokeMethodWithIncompatibleArgumentTypes() { invokeMethod(component, "subtract", "foo", 2.0); } @Test(expected = IllegalStateException.class) public void invokeInitMethodBeforeAutowiring() { invokeMethod(component, "init"); } @Test(expected = IllegalStateException.class) public void invokeMethodWithTooFewArguments() { invokeMethod(component, "configure", new Integer(42)); } @Test(expected = IllegalStateException.class) public void invokeMethodWithTooManyArguments() { invokeMethod(component, "configure", new Integer(42), "enigma", "baz", "quux"); } }
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * The MovieDB App was created by M a r c F a r s s a c */ package com.mfarssac.moviedb.mvvm.detail; import android.arch.lifecycle.ViewModel; import android.arch.lifecycle.ViewModelProvider; import com.mfarssac.moviedb.repository.MoviesRepository; import com.mfarssac.moviedb.repository.room.MovieEntry; /** * Factory method that allows us to create a ViewModel with a constructor that takes a * {@link MoviesRepository} and an ID for the current {@link MovieEntry} */ public class DetailViewModelFactory extends ViewModelProvider.NewInstanceFactory { private final MoviesRepository mRepository; private final int mId; public DetailViewModelFactory(MoviesRepository repository, int id) { this.mRepository = repository; this.mId = id; } @Override public <T extends ViewModel> T create(Class<T> modelClass) { //noinspection unchecked return (T) new DetailActivityViewModel(mRepository, mId); } }
/* * ============================================================================= * * Copyright (c) 2011-2014, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.standard.expression; import java.math.BigDecimal; import java.math.BigInteger; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.thymeleaf.TemplateEngine; import org.thymeleaf.context.IProcessingContext; import org.thymeleaf.util.StringUtils; /** * * @author Daniel Fern&aacute;ndez * * @since 1.1 * */ public final class NumberTokenExpression extends Token { private static final Logger logger = LoggerFactory.getLogger(NumberTokenExpression.class); private static final long serialVersionUID = -3729844055243242571L; public static final char DECIMAL_POINT = '.'; static Number computeValue(final String value) { final BigDecimal bigDecimalValue = new BigDecimal(value); if (bigDecimalValue.scale() > 0) { return bigDecimalValue; } return bigDecimalValue.toBigInteger(); } public NumberTokenExpression(final String value) { super(computeValue(value)); } @Override public String getStringRepresentation() { final Object value = getValue(); if (value instanceof BigDecimal) { return ((BigDecimal)getValue()).toPlainString(); } return value.toString(); } static NumberTokenExpression parseNumberToken(final String input) { if (StringUtils.isEmptyOrWhitespace(input)) { return null; } boolean decimalFound = false; final int inputLen = input.length(); for (int i = 0; i < inputLen; i++) { final char c = input.charAt(i); if (Character.isDigit(c)) { continue; } else if (c == DECIMAL_POINT) { if (decimalFound) { return null; } decimalFound = true; continue; } else { return null; } } try { return new NumberTokenExpression(input); } catch (final NumberFormatException e) { // It seems after all it wasn't valid as a number return null; } } static Object executeNumberToken( @SuppressWarnings("unused") final IProcessingContext processingContext, final NumberTokenExpression expression, @SuppressWarnings("unused") final StandardExpressionExecutionContext expContext) { if (logger.isTraceEnabled()) { logger.trace("[THYMELEAF][{}] Evaluating number token: \"{}\"", TemplateEngine.threadIndex(), expression.getStringRepresentation()); } return expression.getValue(); } }
package com.commerce.mall.model; import io.swagger.annotations.ApiModelProperty; import java.io.Serializable; public class CmsSubjectCategory implements Serializable { private Long id; private String name; @ApiModelProperty(value = "分类图标") private String icon; @ApiModelProperty(value = "专题数量") private Integer subjectCount; private Integer showStatus; private Integer sort; private static final long serialVersionUID = 1L; public Long getId() { return id; } public void setId(Long id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getIcon() { return icon; } public void setIcon(String icon) { this.icon = icon; } public Integer getSubjectCount() { return subjectCount; } public void setSubjectCount(Integer subjectCount) { this.subjectCount = subjectCount; } public Integer getShowStatus() { return showStatus; } public void setShowStatus(Integer showStatus) { this.showStatus = showStatus; } public Integer getSort() { return sort; } public void setSort(Integer sort) { this.sort = sort; } @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append(getClass().getSimpleName()); sb.append(" ["); sb.append("Hash = ").append(hashCode()); sb.append(", id=").append(id); sb.append(", name=").append(name); sb.append(", icon=").append(icon); sb.append(", subjectCount=").append(subjectCount); sb.append(", showStatus=").append(showStatus); sb.append(", sort=").append(sort); sb.append(", serialVersionUID=").append(serialVersionUID); sb.append("]"); return sb.toString(); } }
package evidence; import java.util.Scanner; public class Email{ public static void main(String[] args) { Scanner sc = new Scanner(System.in); System.out.println("Enter Email: "); String email = sc.nextLine(); int one = email.indexOf("@"); int two = email.indexOf("."); if (one > 0 && two < email.length() - 1 && one < two - 1) { System.out.println(" Valid"); } else { System.out.println(" Not valid "); } } }
package org.simpleflatmapper.jdbc.test; import org.junit.Test; import org.simpleflatmapper.jdbc.JdbcMapperBuilder; import org.simpleflatmapper.test.beans.DbBoxedPrimitiveObject; import org.simpleflatmapper.test.beans.DbFinalPrimitiveObject; import org.simpleflatmapper.test.beans.DbPrimitiveObjectWithSetter; import org.simpleflatmapper.test.beans.PrimitiveObject; import org.simpleflatmapper.map.SourceMapper; import java.sql.ResultSet; import java.sql.SQLException; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class JdbcMapperPrimitiveTest { @Test public void testIndexedPrimitivesWithSetterAccess() throws Exception { JdbcMapperBuilder<DbPrimitiveObjectWithSetter> builder = JdbcMapperFactoryHelper.asm().newBuilder(DbPrimitiveObjectWithSetter.class); testIndexedPrimitives(builder); } @Test public void testIndexedPrimitivesWithConstructorAccess() throws Exception { JdbcMapperBuilder<DbFinalPrimitiveObject> builder = JdbcMapperFactoryHelper.asm().newBuilder(DbFinalPrimitiveObject.class); testIndexedPrimitives(builder); } @Test public void testIndexedPrimitivesWithSetterAccessNoAsm() throws Exception { JdbcMapperBuilder<DbPrimitiveObjectWithSetter> builder = JdbcMapperFactoryHelper.noAsm().newBuilder(DbPrimitiveObjectWithSetter.class); testIndexedPrimitives(builder); } @Test public void testIndexedBoxedPrimitivesWithFieldAccess() throws Exception { JdbcMapperBuilder<DbBoxedPrimitiveObject> builder = JdbcMapperFactoryHelper.asm().newBuilder(DbBoxedPrimitiveObject.class); testIndexedPrimitives(builder); } @Test public void testIndexedPrimitivesWithFieldAccessNullValues() throws Exception { JdbcMapperBuilder<DbBoxedPrimitiveObject> builder = JdbcMapperFactoryHelper.asm().newBuilder(DbBoxedPrimitiveObject.class); testIndexedPrimitivesWithNull(builder); } private <T extends PrimitiveObject> void testIndexedPrimitives(JdbcMapperBuilder<T> builder) throws SQLException, Exception { addIndexedColumn(builder); testPrimitives(builder.mapper()); } private void testIndexedPrimitivesWithNull(JdbcMapperBuilder<DbBoxedPrimitiveObject> builder) throws SQLException, Exception { addIndexedColumn(builder); testPrimitivesWithNullValues(builder.mapper()); } private <T extends PrimitiveObject> void addIndexedColumn( JdbcMapperBuilder<T> builder) { builder .addMapping("pBoolean", 1) .addMapping("pByte", 2) .addMapping("pCharacter", 3) .addMapping("pShort", 4) .addMapping("pInt", 5) .addMapping("pLong", 6) .addMapping("pFloat", 7) .addMapping("pDouble", 8); } public <T extends PrimitiveObject> void testPrimitives(SourceMapper<ResultSet, T> mapper) throws SQLException, Exception { ResultSet rs = mock(ResultSet.class); when(rs.getBoolean(1)).thenReturn(true); when(rs.getByte(2)).thenReturn((byte)0xa3); when(rs.getInt(3)).thenReturn(0xa4); when(rs.getShort(4)).thenReturn((short)0xa5); when(rs.getInt(5)).thenReturn(0xa6); when(rs.getLong(6)).thenReturn(0xffa4l); when(rs.getFloat(7)).thenReturn(3.14f); when(rs.getDouble(8)).thenReturn(3.14159); T object = mapper.map(rs, null); assertEquals(true, object.ispBoolean()); assertEquals((byte)0xa3, object.getpByte()); assertEquals((char)0xa4, object.getpCharacter()); assertEquals((short)0xa5, object.getpShort()); assertEquals((int)0xa6, object.getpInt()); assertEquals((long)0xffa4l, object.getpLong()); assertEquals((float)3.14f, object.getpFloat(), 0); assertEquals((double)3.14159, object.getpDouble(), 0); } public void testPrimitivesWithNullValues( SourceMapper<ResultSet, DbBoxedPrimitiveObject> mapper) throws SQLException, Exception { ResultSet rs = mock(ResultSet.class); when(rs.wasNull()).thenReturn(true); when(rs.getBoolean(1)).thenReturn(false); when(rs.getByte(2)).thenReturn((byte)0); when(rs.getInt(3)).thenReturn(0); when(rs.getShort(4)).thenReturn((short)0); when(rs.getInt(5)).thenReturn(0); when(rs.getLong(6)).thenReturn(0l); when(rs.getFloat(7)).thenReturn(0f); when(rs.getDouble(8)).thenReturn(0d); DbBoxedPrimitiveObject object = mapper.map(rs, null); assertNull(object.getoBoolean()); assertNull(object.getoByte()); assertNull(object.getoCharacter()); assertNull(object.getoShort()); assertNull(object.getoInt()); assertNull(object.getoLong()); assertNull(object.getoFloat()); assertNull(object.getoDouble()); } }
package MVC.View.Events; import java.io.File; public class LoadFileEvent { private File filePath; public File getFilePath() { return filePath; } public void setFilePath(File filePath) { this.filePath = filePath; } public LoadFileEvent(Object source, File file) { this.filePath = file; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.module.hive; import org.apache.flink.configuration.Configuration; import org.apache.flink.table.factories.FactoryUtil; import org.apache.flink.table.module.Module; import org.junit.Test; import java.util.Collections; import static org.assertj.core.api.Assertions.assertThat; /** Test for {@link HiveModuleFactory}. */ public class HiveModuleFactoryTest { @Test public void test() { final HiveModule expected = new HiveModule(); final Module actualModule = FactoryUtil.createModule( HiveModuleFactory.IDENTIFIER, Collections.emptyMap(), new Configuration(), Thread.currentThread().getContextClassLoader()); checkEquals(expected, (HiveModule) actualModule); } private static void checkEquals(HiveModule m1, HiveModule m2) { assertThat(m2.getHiveVersion()).isEqualTo(m1.getHiveVersion()); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2006, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.test.foedeployer.ejb.o2mb; /** * Remote interface for O2MBManager. */ public interface O2MBManager extends javax.ejb.EJBObject { /** * Creates a company */ public void createCompany( java.lang.String companyName ) throws java.rmi.RemoteException; /** * Creates an employee */ public void createEmployee( java.lang.String employeeName ) throws java.rmi.RemoteException; /** * Returns all the companies */ public java.util.Collection getEmployeesForCompany( java.lang.String companyName ) throws java.rmi.RemoteException; /** * Returns emaployee's company */ public java.lang.String getCompanyForEmployee( java.lang.String employeeName ) throws java.rmi.RemoteException; /** * Creates new emaployee and adds it to a company */ public void createEmployeeForCompany( java.lang.String employeeName,java.lang.String companyName ) throws java.rmi.RemoteException; /** * Sets a company for employee */ public void employ( java.lang.String employeeName,java.lang.String companyName ) throws java.rmi.RemoteException; /** * Removes a company */ public void removeCompany( java.lang.String companyName ) throws java.rmi.RemoteException; /** * Removes a company if it exists */ public void removeCompanyIfExists( java.lang.String companyName ) throws java.rmi.RemoteException; }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.launcher3.compat; import android.content.Context; import android.graphics.drawable.Drawable; import android.os.Build; import com.android.launcher3.Utilities; import java.util.List; public abstract class UserManagerCompat { protected UserManagerCompat() { } private static final Object sInstanceLock = new Object(); private static UserManagerCompat sInstance; public static UserManagerCompat getInstance(Context context) { synchronized (sInstanceLock) { if (sInstance == null) { if (Utilities.ATLEAST_LOLLIPOP) { sInstance = new UserManagerCompatVL(context.getApplicationContext()); } else if (Utilities.ATLEAST_JB_MR1) { sInstance = new UserManagerCompatV17(context.getApplicationContext()); } else { sInstance = new UserManagerCompatV16(); } } return sInstance; } } /** * Creates a cache for users. */ public abstract void enableAndResetCache(); public abstract List<UserHandleCompat> getUserProfiles(); public abstract long getSerialNumberForUser(UserHandleCompat user); public abstract UserHandleCompat getUserForSerialNumber(long serialNumber); public abstract Drawable getBadgedDrawableForUser(Drawable unbadged, UserHandleCompat user); public abstract CharSequence getBadgedLabelForUser(CharSequence label, UserHandleCompat user); public abstract long getUserCreationTime(UserHandleCompat user); }
/* * Generated by the Jasper component of Apache Tomcat * Version: Apache Tomcat/8.0.26 * Generated at: 2020-08-17 08:56:05 UTC * Note: The last modified time of this file was set to * the last modified time of the source file after * generation to assist with modification tracking. */ package org.apache.jsp.ch08; import javax.servlet.*; import javax.servlet.http.*; import javax.servlet.jsp.*; public final class ex08_002d04From_jsp extends org.apache.jasper.runtime.HttpJspBase implements org.apache.jasper.runtime.JspSourceDependent, org.apache.jasper.runtime.JspSourceImports { private static final javax.servlet.jsp.JspFactory _jspxFactory = javax.servlet.jsp.JspFactory.getDefaultFactory(); private static java.util.Map<java.lang.String,java.lang.Long> _jspx_dependants; private static final java.util.Set<java.lang.String> _jspx_imports_packages; private static final java.util.Set<java.lang.String> _jspx_imports_classes; static { _jspx_imports_packages = new java.util.HashSet<>(); _jspx_imports_packages.add("javax.servlet"); _jspx_imports_packages.add("javax.servlet.http"); _jspx_imports_packages.add("javax.servlet.jsp"); _jspx_imports_classes = null; } private javax.el.ExpressionFactory _el_expressionfactory; private org.apache.tomcat.InstanceManager _jsp_instancemanager; public java.util.Map<java.lang.String,java.lang.Long> getDependants() { return _jspx_dependants; } public java.util.Set<java.lang.String> getPackageImports() { return _jspx_imports_packages; } public java.util.Set<java.lang.String> getClassImports() { return _jspx_imports_classes; } public void _jspInit() { _el_expressionfactory = _jspxFactory.getJspApplicationContext(getServletConfig().getServletContext()).getExpressionFactory(); _jsp_instancemanager = org.apache.jasper.runtime.InstanceManagerFactory.getInstanceManager(getServletConfig()); } public void _jspDestroy() { } public void _jspService(final javax.servlet.http.HttpServletRequest request, final javax.servlet.http.HttpServletResponse response) throws java.io.IOException, javax.servlet.ServletException { final java.lang.String _jspx_method = request.getMethod(); if (!"GET".equals(_jspx_method) && !"POST".equals(_jspx_method) && !"HEAD".equals(_jspx_method) && !javax.servlet.DispatcherType.ERROR.equals(request.getDispatcherType())) { response.sendError(HttpServletResponse.SC_METHOD_NOT_ALLOWED, "JSPs only permit GET POST or HEAD"); return; } final javax.servlet.jsp.PageContext pageContext; javax.servlet.http.HttpSession session = null; final javax.servlet.ServletContext application; final javax.servlet.ServletConfig config; javax.servlet.jsp.JspWriter out = null; final java.lang.Object page = this; javax.servlet.jsp.JspWriter _jspx_out = null; javax.servlet.jsp.PageContext _jspx_page_context = null; try { response.setContentType("text/html; charset=UTF-8"); pageContext = _jspxFactory.getPageContext(this, request, response, null, true, 8192, true); _jspx_page_context = pageContext; application = pageContext.getServletContext(); config = pageContext.getServletConfig(); session = pageContext.getSession(); out = pageContext.getOut(); _jspx_out = out; out.write('\r'); out.write('\n'); out.write(' '); out.write("\r\n"); out.write("\r\n"); out.write("\r\n"); out.write(" "); request.setCharacterEncoding("utf-8"); out.write("\r\n"); out.write(" "); String id=""; String passwd=""; id=request.getParameter("id"); passwd=request.getParameter("passwd"); if(id==null||id.equals("")){ id="test"; } if(passwd==null||passwd.equals("")){ passwd="testPass"; } out.write("\r\n"); out.write(" \r\n"); out.write(" ex08-04To.jsp 페이지로 포워딩합니다.<br>\r\n"); out.write(" \r\n"); out.write(" "); if (true) { _jspx_page_context.forward("ex08-04To.jsp" + "?" + org.apache.jasper.runtime.JspRuntimeLibrary.URLEncode("id", request.getCharacterEncoding())+ "=" + org.apache.jasper.runtime.JspRuntimeLibrary.URLEncode(String.valueOf(id ), request.getCharacterEncoding()) + "&" + org.apache.jasper.runtime.JspRuntimeLibrary.URLEncode("passwd", request.getCharacterEncoding())+ "=" + org.apache.jasper.runtime.JspRuntimeLibrary.URLEncode(String.valueOf(passwd ), request.getCharacterEncoding())); return; } } catch (java.lang.Throwable t) { if (!(t instanceof javax.servlet.jsp.SkipPageException)){ out = _jspx_out; if (out != null && out.getBufferSize() != 0) try { if (response.isCommitted()) { out.flush(); } else { out.clearBuffer(); } } catch (java.io.IOException e) {} if (_jspx_page_context != null) _jspx_page_context.handlePageException(t); else throw new ServletException(t); } } finally { _jspxFactory.releasePageContext(_jspx_page_context); } } }
/* * Copyright 2020 Immutables Authors and Contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.immutables.criteria.reflect; import javax.annotation.concurrent.NotThreadSafe; import java.lang.reflect.Member; import java.util.ArrayDeque; import java.util.Arrays; import java.util.Deque; import java.util.HashSet; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Queue; import java.util.Set; /** * Lazily iterate through class hierarchy */ @NotThreadSafe class LazyClassScanner implements ClassScanner { private final ImmutableBuilder builder; LazyClassScanner(ImmutableBuilder builder) { this.builder = builder; } @Override public Iterator<Member> iterator() { return new LazyMemberIterator(builder); } private static class LazyMemberIterator implements Iterator<Member> { private final Set<Class<?>> visited = new HashSet<>(); private final Deque<Class<?>> toVisit = new ArrayDeque<>(); private final Queue<Member> queue = new ArrayDeque<>(); private final Builder builder; private LazyMemberIterator(Builder builder) { this.builder = builder; visited.add(Object.class); toVisit.add(builder.type()); } @Override public boolean hasNext() { return tryNext(); } private boolean tryNext() { while (queue.isEmpty() && !toVisit.isEmpty()) { final Class<?> current = toVisit.pop(); if (!visited.add(current)) { continue; } if (builder.fields()) { queue.addAll(Arrays.asList(current.getDeclaredFields())); } if (builder.methods()) { queue.addAll(Arrays.asList(current.getDeclaredMethods())); } if (builder.constructors()) { queue.addAll(Arrays.asList(current.getDeclaredConstructors())); } if (!current.isInterface() && !visited.contains(current.getSuperclass()) && builder.superclass()) { toVisit.push(current.getSuperclass()); } if (builder.interfaces()) { toVisit.addAll(Arrays.asList(current.getInterfaces())); } } return !queue.isEmpty(); } @Override public Member next() { if (!hasNext()) { throw new NoSuchElementException("No more elements to scan for " + builder.type()); } return queue.remove(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws.cw; import com.amazonaws.ClientConfiguration; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSStaticCredentialsProvider; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.regions.Regions; import com.amazonaws.services.cloudwatch.AmazonCloudWatch; import com.amazonaws.services.cloudwatch.AmazonCloudWatchClientBuilder; import org.apache.camel.Component; import org.apache.camel.Consumer; import org.apache.camel.Processor; import org.apache.camel.Producer; import org.apache.camel.spi.UriEndpoint; import org.apache.camel.spi.UriParam; import org.apache.camel.support.DefaultEndpoint; import org.apache.camel.util.ObjectHelper; /** * The aws-cw component is used for sending metrics to an Amazon CloudWatch. */ @UriEndpoint(firstVersion = "2.11.0", scheme = "aws-cw", title = "AWS CloudWatch", syntax = "aws-cw:namespace", producerOnly = true, label = "cloud,monitoring") public class CwEndpoint extends DefaultEndpoint { @UriParam private CwConfiguration configuration; private AmazonCloudWatch cloudWatchClient; public CwEndpoint(String uri, Component component, CwConfiguration configuration) { super(uri, component); this.configuration = configuration; } @Override public Consumer createConsumer(Processor processor) throws Exception { throw new UnsupportedOperationException("You cannot receive messages from this endpoint"); } @Override public Producer createProducer() throws Exception { return new CwProducer(this); } @Override public void doInit() throws Exception { super.doInit(); cloudWatchClient = configuration.getAmazonCwClient() != null ? configuration.getAmazonCwClient() : createCloudWatchClient(); } @Override public void doStop() throws Exception { if (ObjectHelper.isEmpty(configuration.getAmazonCwClient())) { if (cloudWatchClient != null) { cloudWatchClient.shutdown(); } } super.doStop(); } public CwConfiguration getConfiguration() { return configuration; } public void setConfiguration(CwConfiguration configuration) { this.configuration = configuration; } public void setCloudWatchClient(AmazonCloudWatch cloudWatchClient) { this.cloudWatchClient = cloudWatchClient; } public AmazonCloudWatch getCloudWatchClient() { return cloudWatchClient; } AmazonCloudWatch createCloudWatchClient() { AmazonCloudWatch client = null; AmazonCloudWatchClientBuilder clientBuilder = null; ClientConfiguration clientConfiguration = null; boolean isClientConfigFound = false; if (ObjectHelper.isNotEmpty(configuration.getProxyHost()) && ObjectHelper.isNotEmpty(configuration.getProxyPort())) { clientConfiguration = new ClientConfiguration(); clientConfiguration.setProxyProtocol(configuration.getProxyProtocol()); clientConfiguration.setProxyHost(configuration.getProxyHost()); clientConfiguration.setProxyPort(configuration.getProxyPort()); isClientConfigFound = true; } if (configuration.getAccessKey() != null && configuration.getSecretKey() != null) { AWSCredentials credentials = new BasicAWSCredentials(configuration.getAccessKey(), configuration.getSecretKey()); AWSCredentialsProvider credentialsProvider = new AWSStaticCredentialsProvider(credentials); if (isClientConfigFound) { clientBuilder = AmazonCloudWatchClientBuilder.standard().withClientConfiguration(clientConfiguration).withCredentials(credentialsProvider); } else { clientBuilder = AmazonCloudWatchClientBuilder.standard().withCredentials(credentialsProvider); } } else { if (isClientConfigFound) { clientBuilder = AmazonCloudWatchClientBuilder.standard(); } else { clientBuilder = AmazonCloudWatchClientBuilder.standard().withClientConfiguration(clientConfiguration); } } if (ObjectHelper.isNotEmpty(configuration.getRegion())) { clientBuilder = clientBuilder.withRegion(Regions.valueOf(configuration.getRegion())); } client = clientBuilder.build(); return client; } }
package com.isaev.ee.tablemodule.application; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.*; class ApplicationTest { @Test @Order(1) public void shouldInitiateDatabase() { assertDoesNotThrow(() -> { Application.initiateDatabase(); }); } @Test @Order(2) public void shouldGenerateData() { assertDoesNotThrow(() -> { Application.generateData(); }); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.parse; import com.google.common.base.Function; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Multimap; import org.antlr.runtime.ClassicToken; import org.antlr.runtime.CommonToken; import org.antlr.runtime.tree.Tree; import org.antlr.runtime.tree.TreeVisitor; import org.antlr.runtime.tree.TreeVisitorAction; import org.apache.calcite.adapter.druid.DruidQuery; import org.apache.calcite.adapter.druid.DruidSchema; import org.apache.calcite.adapter.druid.DruidTable; import org.apache.calcite.adapter.java.JavaTypeFactory; import org.apache.calcite.adapter.jdbc.JdbcConvention; import org.apache.calcite.adapter.jdbc.JdbcImplementor; import org.apache.calcite.adapter.jdbc.JdbcSchema; import org.apache.calcite.adapter.jdbc.JdbcTable; import org.apache.calcite.config.CalciteConnectionConfig; import org.apache.calcite.config.CalciteConnectionConfigImpl; import org.apache.calcite.config.CalciteConnectionProperty; import org.apache.calcite.config.NullCollation; import org.apache.calcite.interpreter.BindableConvention; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptCost; import org.apache.calcite.plan.RelOptMaterialization; import org.apache.calcite.plan.RelOptPlanner; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.plan.RelOptSchema; import org.apache.calcite.plan.RelOptUtil; import org.apache.calcite.plan.RelTraitSet; import org.apache.calcite.plan.hep.HepMatchOrder; import org.apache.calcite.plan.hep.HepPlanner; import org.apache.calcite.plan.hep.HepProgram; import org.apache.calcite.plan.hep.HepProgramBuilder; import org.apache.calcite.rel.RelCollation; import org.apache.calcite.rel.RelCollationImpl; import org.apache.calcite.rel.RelCollations; import org.apache.calcite.rel.RelFieldCollation; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.RelVisitor; import org.apache.calcite.rel.core.Aggregate; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.core.Filter; import org.apache.calcite.rel.core.JoinRelType; import org.apache.calcite.rel.core.Project; import org.apache.calcite.rel.core.SetOp; import org.apache.calcite.rel.core.TableScan; import org.apache.calcite.rel.metadata.CachingRelMetadataProvider; import org.apache.calcite.rel.metadata.ChainedRelMetadataProvider; import org.apache.calcite.rel.metadata.DefaultRelMetadataProvider; import org.apache.calcite.rel.metadata.JaninoRelMetadataProvider; import org.apache.calcite.rel.metadata.RelMetadataProvider; import org.apache.calcite.rel.metadata.RelMetadataQuery; import org.apache.calcite.rel.rules.FilterMergeRule; import org.apache.calcite.rel.rules.JoinToMultiJoinRule; import org.apache.calcite.rel.rules.LoptOptimizeJoinRule; import org.apache.calcite.rel.rules.ProjectMergeRule; import org.apache.calcite.rel.rules.ProjectRemoveRule; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rel.type.RelDataTypeImpl; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexExecutor; import org.apache.calcite.rex.RexFieldCollation; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexNode; import org.apache.calcite.rex.RexUtil; import org.apache.calcite.rex.RexWindowBound; import org.apache.calcite.schema.SchemaPlus; import org.apache.calcite.sql.SqlAggFunction; import org.apache.calcite.sql.SqlCall; import org.apache.calcite.sql.SqlDialect; import org.apache.calcite.sql.SqlDialectFactoryImpl; import org.apache.calcite.sql.SqlExplainLevel; import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlLiteral; import org.apache.calcite.sql.SqlNode; import org.apache.calcite.sql.SqlOperator; import org.apache.calcite.sql.SqlWindow; import org.apache.calcite.sql.dialect.HiveSqlDialect; import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.sql.type.ArraySqlType; import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.sql.validate.SqlValidatorUtil; import org.apache.calcite.tools.Frameworks; import org.apache.calcite.util.CompositeList; import org.apache.calcite.util.ImmutableBitSet; import org.apache.calcite.util.ImmutableIntList; import org.apache.calcite.util.Pair; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.ObjectPair; import org.apache.hadoop.hive.conf.Constants; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.conf.HiveConf.StrictChecks; import org.apache.hadoop.hive.metastore.Warehouse; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.ql.Context; import org.apache.hadoop.hive.ql.ErrorMsg; import org.apache.hadoop.hive.ql.QueryProperties; import org.apache.hadoop.hive.ql.QueryState; import org.apache.hadoop.hive.ql.exec.ColumnInfo; import org.apache.hadoop.hive.ql.exec.Description; import org.apache.hadoop.hive.ql.exec.FunctionInfo; import org.apache.hadoop.hive.ql.exec.FunctionRegistry; import org.apache.hadoop.hive.ql.exec.Operator; import org.apache.hadoop.hive.ql.exec.OperatorFactory; import org.apache.hadoop.hive.ql.exec.RowSchema; import org.apache.hadoop.hive.ql.exec.Utilities; import org.apache.hadoop.hive.ql.lib.Node; import org.apache.hadoop.hive.ql.log.PerfLogger; import org.apache.hadoop.hive.ql.metadata.Hive; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.NotNullConstraint; import org.apache.hadoop.hive.ql.metadata.PrimaryKeyInfo; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.ql.metadata.VirtualColumn; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException.UnsupportedFeature; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSubquerySemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteViewSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveConfPlannerContext; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveDefaultRelMetadataProvider; import org.apache.hadoop.hive.ql.optimizer.calcite.HivePlannerContext; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelFactories; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRelOpMaterializationValidator; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveRexExecutorImpl; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveTypeSystemImpl; import org.apache.hadoop.hive.ql.optimizer.calcite.RelOptHiveTable; import org.apache.hadoop.hive.ql.optimizer.calcite.TraitsUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveAlgorithmsConf; import org.apache.hadoop.hive.ql.optimizer.calcite.cost.HiveVolcanoPlanner; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExcept; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveGroupingID; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveIntersect; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveJoin; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveSemiJoin; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveSortLimit; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableFunctionScan; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableScan; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveUnion; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.HiveJdbcConverter; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.jdbc.JdbcHiveTableScan; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveAggregateJoinTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveAggregateProjectMergeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveAggregatePullUpConstantsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveAggregateReduceFunctionsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveAggregateReduceRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveDruidRules; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveExceptRewriteRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveExpandDistinctAggregatesRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterAggregateTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterJoinRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterProjectTSTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterProjectTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterSetOpTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveFilterSortTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveInsertExchange4JoinRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveIntersectMergeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveIntersectRewriteRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveJoinAddNotNullRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveJoinCommuteRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveJoinConstraintsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveJoinProjectTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveJoinPushTransitivePredicatesRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveJoinToMultiJoinRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HivePartitionPruneRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HivePointLookupOptimizerRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HivePreFilteringRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveProjectFilterPullUpConstantsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveProjectJoinTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveProjectMergeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveProjectOverIntersectRemoveRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveProjectSortTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveReduceExpressionsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveReduceExpressionsWithStatsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveRelDecorrelator; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveRelFieldTrimmer; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveRemoveGBYSemiJoinRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveRemoveSqCountCheck; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveRulesRegistry; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSemiJoinRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSortJoinReduceRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSortLimitPullUpConstantsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSortMergeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSortProjectTransposeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSortRemoveRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSortUnionReduceRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveSubQueryRemoveRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveUnionMergeRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveUnionPullUpConstantsRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.HiveWindowingFixRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCAbstractSplitFilterRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCAggregationPushDownRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCExtractJoinFilterRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCFilterJoinRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCFilterPushDownRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCJoinPushDownRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCProjectPushDownRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCSortPushDownRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.jdbc.JDBCUnionPushDownRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveAggregateIncrementalRewritingRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveMaterializedViewRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.HiveNoAggregateIncrementalRewritingRule; import org.apache.hadoop.hive.ql.optimizer.calcite.rules.views.MaterializedViewRewritingRelVisitor; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTBuilder; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.ASTConverter; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.HiveOpConverter; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.JoinCondTypeCheckProcFactory; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.JoinTypeCheckCtx; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.PlanModifierForReturnPath; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.RexNodeConverter; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.SqlFunctionConverter; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderExpression; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.OrderSpec; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionExpression; import org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.PartitionSpec; import org.apache.hadoop.hive.ql.parse.QBExpr.Opcode; import org.apache.hadoop.hive.ql.parse.WindowingSpec.BoundarySpec; import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowExpressionSpec; import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowFunctionSpec; import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowSpec; import org.apache.hadoop.hive.ql.parse.WindowingSpec.WindowType; import org.apache.hadoop.hive.ql.plan.ExprNodeColumnDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; import org.apache.hadoop.hive.ql.plan.ExprNodeDescUtils; import org.apache.hadoop.hive.ql.plan.GroupByDesc; import org.apache.hadoop.hive.ql.plan.HiveOperation; import org.apache.hadoop.hive.ql.plan.SelectDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.Mode; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFArray; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF; import org.apache.hadoop.hive.ql.udf.generic.GenericUDTFInline; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; import org.apache.hadoop.hive.serde2.objectinspector.StandardStructObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.StructField; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils; import org.joda.time.Interval; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.UndeclaredThrowableException; import java.math.BigDecimal; import java.util.AbstractMap.SimpleEntry; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collection; import java.util.Collections; import java.util.Deque; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import javax.sql.DataSource; public class CalcitePlanner extends SemanticAnalyzer { private final AtomicInteger noColsMissingStats = new AtomicInteger(0); private SemanticException semanticException; private boolean runCBO = true; private boolean disableSemJoinReordering = true; private EnumSet<ExtendedCBOProfile> profilesCBO; private static final CommonToken FROM_TOKEN = new ImmutableCommonToken(HiveParser.TOK_FROM, "TOK_FROM"); private static final CommonToken DEST_TOKEN = new ImmutableCommonToken(HiveParser.TOK_DESTINATION, "TOK_DESTINATION"); private static final CommonToken DIR_TOKEN = new ImmutableCommonToken(HiveParser.TOK_DIR, "TOK_DIR"); private static final CommonToken TMPFILE_TOKEN = new ImmutableCommonToken(HiveParser.TOK_TMP_FILE, "TOK_TMP_FILE"); private static final CommonToken SELECT_TOKEN = new ImmutableCommonToken(HiveParser.TOK_SELECT, "TOK_SELECT"); private static final CommonToken SELEXPR_TOKEN = new ImmutableCommonToken(HiveParser.TOK_SELEXPR, "TOK_SELEXPR"); private static final CommonToken TABLEORCOL_TOKEN = new ImmutableCommonToken(HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL"); private static final CommonToken INSERT_TOKEN = new ImmutableCommonToken(HiveParser.TOK_INSERT, "TOK_INSERT"); private static final CommonToken QUERY_TOKEN = new ImmutableCommonToken(HiveParser.TOK_QUERY, "TOK_QUERY"); private static final CommonToken SUBQUERY_TOKEN = new ImmutableCommonToken(HiveParser.TOK_SUBQUERY, "TOK_SUBQUERY"); public CalcitePlanner(QueryState queryState) throws SemanticException { super(queryState); if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)) { runCBO = false; disableSemJoinReordering = false; } } public void resetCalciteConfiguration() { if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_ENABLED)) { runCBO = true; disableSemJoinReordering = true; } } @Override @SuppressWarnings("nls") public void analyzeInternal(ASTNode ast) throws SemanticException { if (runCBO) { super.analyzeInternal(ast, new PlannerContextFactory() { @Override public PlannerContext create() { return new PreCboCtx(); } }); } else { super.analyzeInternal(ast); } } /** * This method is useful if we want to obtain the logical plan after being parsed and * optimized by Calcite. * * @return the Calcite plan for the query, null if it could not be generated */ public RelNode genLogicalPlan(ASTNode ast) throws SemanticException { LOG.info("Starting generating logical plan"); PreCboCtx cboCtx = new PreCboCtx(); //change the location of position alias process here processPositionAlias(ast); if (!genResolvedParseTree(ast, cboCtx)) { return null; } ASTNode queryForCbo = ast; if (cboCtx.type == PreCboCtx.Type.CTAS || cboCtx.type == PreCboCtx.Type.VIEW) { queryForCbo = cboCtx.nodeOfInterest; // nodeOfInterest is the query } runCBO = canCBOHandleAst(queryForCbo, getQB(), cboCtx); if (!runCBO) { return null; } profilesCBO = obtainCBOProfiles(queryProperties); disableJoinMerge = true; final RelNode resPlan = logicalPlan(); LOG.info("Finished generating logical plan"); return resPlan; } public static RelOptPlanner createPlanner(HiveConf conf) { return createPlanner(conf, new HashSet<RelNode>(), new HashSet<RelNode>()); } private static RelOptPlanner createPlanner( HiveConf conf, Set<RelNode> corrScalarRexSQWithAgg, Set<RelNode> scalarAggNoGbyNoWin) { final Double maxSplitSize = (double) HiveConf.getLongVar( conf, HiveConf.ConfVars.MAPREDMAXSPLITSIZE); final Double maxMemory = (double) HiveConf.getLongVar( conf, HiveConf.ConfVars.HIVECONVERTJOINNOCONDITIONALTASKTHRESHOLD); HiveAlgorithmsConf algorithmsConf = new HiveAlgorithmsConf(maxSplitSize, maxMemory); HiveRulesRegistry registry = new HiveRulesRegistry(); Properties calciteConfigProperties = new Properties(); calciteConfigProperties.setProperty( CalciteConnectionProperty.TIME_ZONE.camelName(), conf.getLocalTimeZone().getId()); calciteConfigProperties.setProperty( CalciteConnectionProperty.MATERIALIZATIONS_ENABLED.camelName(), Boolean.FALSE.toString()); CalciteConnectionConfig calciteConfig = new CalciteConnectionConfigImpl(calciteConfigProperties); boolean isCorrelatedColumns = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_STATS_CORRELATED_MULTI_KEY_JOINS); boolean heuristicMaterializationStrategy = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REWRITING_SELECTION_STRATEGY).equals("heuristic"); HivePlannerContext confContext = new HivePlannerContext(algorithmsConf, registry, calciteConfig, corrScalarRexSQWithAgg, scalarAggNoGbyNoWin, new HiveConfPlannerContext(isCorrelatedColumns, heuristicMaterializationStrategy)); return HiveVolcanoPlanner.createPlanner(confContext); } @Override @SuppressWarnings("rawtypes") Operator genOPTree(ASTNode ast, PlannerContext plannerCtx) throws SemanticException { Operator sinkOp = null; boolean skipCalcitePlan = false; if (!runCBO) { skipCalcitePlan = true; } else { PreCboCtx cboCtx = (PreCboCtx) plannerCtx; List<ASTNode> oldHints = new ArrayList<>(); // Cache the hints before CBO runs and removes them. // Use the hints later in top level QB. getHintsFromQB(getQB(), oldHints); // Note: for now, we don't actually pass the queryForCbo to CBO, because // it accepts qb, not AST, and can also access all the private stuff in // SA. We rely on the fact that CBO ignores the unknown tokens (create // table, destination), so if the query is otherwise ok, it is as if we // did remove those and gave CBO the proper AST. That is kinda hacky. ASTNode queryForCbo = ast; if (cboCtx.type == PreCboCtx.Type.CTAS || cboCtx.type == PreCboCtx.Type.VIEW) { queryForCbo = cboCtx.nodeOfInterest; // nodeOfInterest is the query } runCBO = canCBOHandleAst(queryForCbo, getQB(), cboCtx); if (queryProperties.hasMultiDestQuery()) { handleMultiDestQuery(ast, cboCtx); } if (runCBO) { profilesCBO = obtainCBOProfiles(queryProperties); disableJoinMerge = true; boolean reAnalyzeAST = false; final boolean materializedView = getQB().isMaterializedView(); try { if (this.conf.getBoolVar(HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) { if (cboCtx.type == PreCboCtx.Type.VIEW && !materializedView) { throw new SemanticException("Create view is not supported in cbo return path."); } sinkOp = getOptimizedHiveOPDag(); if (oldHints.size() > 0) { LOG.debug("Propagating hints to QB: " + oldHints); getQB().getParseInfo().setHintList(oldHints); } LOG.info("CBO Succeeded; optimized logical plan."); this.ctx.setCboInfo("Plan optimized by CBO."); this.ctx.setCboSucceeded(true); } else { // 0. Gen Optimized Plan final RelNode newPlan = logicalPlan(); // 1. Convert Plan to AST ASTNode newAST = getOptimizedAST(newPlan); // 1.1. Fix up the query for insert/ctas/materialized views newAST = fixUpAfterCbo(ast, newAST, cboCtx); // 1.2. Fix up the query for materialization rebuild if (mvRebuildMode == MaterializationRebuildMode.AGGREGATE_REBUILD) { fixUpASTAggregateIncrementalRebuild(newAST); } else if (mvRebuildMode == MaterializationRebuildMode.NO_AGGREGATE_REBUILD) { fixUpASTNoAggregateIncrementalRebuild(newAST); } // 2. Regen OP plan from optimized AST if (cboCtx.type == PreCboCtx.Type.VIEW) { try { viewSelect = handleCreateViewDDL(newAST); } catch (SemanticException e) { throw new CalciteViewSemanticException(e.getMessage()); } } else if (cboCtx.type == PreCboCtx.Type.CTAS) { // CTAS init(false); setAST(newAST); newAST = reAnalyzeCTASAfterCbo(newAST); } else { // All others init(false); } if (oldHints.size() > 0) { if (getQB().getParseInfo().getHints() != null) { LOG.warn("Hints are not null in the optimized tree; " + "after CBO " + getQB().getParseInfo().getHints().dump()); } else { LOG.debug("Propagating hints to QB: " + oldHints); getQB().getParseInfo().setHintList(oldHints); } } Phase1Ctx ctx_1 = initPhase1Ctx(); if (!doPhase1(newAST, getQB(), ctx_1, null)) { throw new RuntimeException("Couldn't do phase1 on CBO optimized query plan"); } // unfortunately making prunedPartitions immutable is not possible // here with SemiJoins not all tables are costed in CBO, so their // PartitionList is not evaluated until the run phase. getMetaData(getQB()); disableJoinMerge = defaultJoinMerge; sinkOp = genPlan(getQB()); LOG.info("CBO Succeeded; optimized logical plan."); this.ctx.setCboInfo("Plan optimized by CBO."); this.ctx.setCboSucceeded(true); if (this.ctx.isExplainPlan()) { ExplainConfiguration explainConfig = this.ctx.getExplainConfig(); if (explainConfig.isCbo()) { if (explainConfig.isCboExtended()) { // Include join cost this.ctx.setCalcitePlan(RelOptUtil.toString(newPlan, SqlExplainLevel.ALL_ATTRIBUTES)); } else { // Do not include join cost this.ctx.setCalcitePlan(RelOptUtil.toString(newPlan)); } } else if (explainConfig.isExtended() || explainConfig.isFormatted()) { this.ctx.setOptimizedSql(getOptimizedSql(newPlan)); } } if (LOG.isTraceEnabled()) { LOG.trace(getOptimizedSql(newPlan)); LOG.trace(newAST.dump()); } } } catch (Exception e) { boolean isMissingStats = noColsMissingStats.get() > 0; if (isMissingStats) { LOG.error("CBO failed due to missing column stats (see previous errors), skipping CBO"); this.ctx .setCboInfo("Plan not optimized by CBO due to missing statistics. Please check log for more details."); } else { LOG.error("CBO failed, skipping CBO. ", e); if (e instanceof CalciteSemanticException) { CalciteSemanticException calciteSemanticException = (CalciteSemanticException) e; UnsupportedFeature unsupportedFeature = calciteSemanticException .getUnsupportedFeature(); if (unsupportedFeature != null) { this.ctx.setCboInfo("Plan not optimized by CBO due to missing feature [" + unsupportedFeature + "]."); } else { this.ctx.setCboInfo("Plan not optimized by CBO."); } } else { this.ctx.setCboInfo("Plan not optimized by CBO."); } } if( e instanceof CalciteSubquerySemanticException) { // non-cbo path retries to execute subqueries and throws completely different exception/error // to eclipse the original error message // so avoid executing subqueries on non-cbo throw new SemanticException(e); } else if( e instanceof CalciteViewSemanticException) { // non-cbo path retries to execute create view and // we believe it will throw the same error message throw new SemanticException(e); } else if (!conf.getBoolVar(ConfVars.HIVE_IN_TEST) || isMissingStats || e instanceof CalciteSemanticException ) { reAnalyzeAST = true; } else if (e instanceof SemanticException && !conf.getBoolVar(ConfVars.HIVE_IN_TEST)) { // although, its likely to be a valid exception, we will retry // with cbo off anyway. // for tests we would like to avoid retrying to catch cbo failures reAnalyzeAST = true; } else if (e instanceof RuntimeException) { throw (RuntimeException) e; } else if (e instanceof SemanticException) { throw e; } else { throw new SemanticException(e); } } finally { runCBO = false; disableJoinMerge = defaultJoinMerge; disableSemJoinReordering = false; if (reAnalyzeAST) { init(true); prunedPartitions.clear(); // Assumption: At this point Parse Tree gen & resolution will always // be true (since we started out that way). super.genResolvedParseTree(ast, new PlannerContext()); skipCalcitePlan = true; } } } else { this.ctx.setCboInfo("Plan not optimized by CBO."); skipCalcitePlan = true; } } if (skipCalcitePlan) { sinkOp = super.genOPTree(ast, plannerCtx); } return sinkOp; } private ASTNode handleCreateViewDDL(ASTNode ast) throws SemanticException { saveViewDefinition(); String originalText = createVwDesc.getViewOriginalText(); String expandedText = createVwDesc.getViewExpandedText(); List<FieldSchema> schema = createVwDesc.getSchema(); List<FieldSchema> partitionColumns = createVwDesc.getPartCols(); init(false); setAST(ast); ASTNode newAST = reAnalyzeViewAfterCbo(ast); createVwDesc.setViewOriginalText(originalText); createVwDesc.setViewExpandedText(expandedText); createVwDesc.setSchema(schema); createVwDesc.setPartCols(partitionColumns); return newAST; } /* * Tries to optimize FROM clause of multi-insert. No attempt to optimize insert clauses of the query. * Returns true if rewriting is successful, false otherwise. */ private void handleMultiDestQuery(ASTNode ast, PreCboCtx cboCtx) throws SemanticException { // Not supported by CBO if (!runCBO) { return; } // Currently, we only optimized the query the content of the FROM clause // for multi-insert queries. Thus, nodeOfInterest is the FROM clause if (isJoinToken(cboCtx.nodeOfInterest)) { // Join clause: rewriting is needed ASTNode subq = rewriteASTForMultiInsert(ast, cboCtx.nodeOfInterest); if (subq != null) { // We could rewrite into a subquery cboCtx.nodeOfInterest = (ASTNode) subq.getChild(0); QB newQB = new QB(null, "", false); Phase1Ctx ctx_1 = initPhase1Ctx(); doPhase1(cboCtx.nodeOfInterest, newQB, ctx_1, null); setQB(newQB); getMetaData(getQB()); } else { runCBO = false; } } else if (cboCtx.nodeOfInterest.getToken().getType() == HiveParser.TOK_SUBQUERY) { // Subquery: no rewriting needed ASTNode subq = cboCtx.nodeOfInterest; // First child is subquery, second child is alias // We set the node of interest and QB to the subquery // We do not need to generate the QB again, but rather we use it directly cboCtx.nodeOfInterest = (ASTNode) subq.getChild(0); String subQAlias = unescapeIdentifier(subq.getChild(1).getText()); final QB newQB = getQB().getSubqForAlias(subQAlias).getQB(); newQB.getParseInfo().setAlias(""); newQB.getParseInfo().setIsSubQ(false); setQB(newQB); } else { // No need to run CBO (table ref or virtual table) or not supported runCBO = false; } } private ASTNode rewriteASTForMultiInsert(ASTNode query, ASTNode nodeOfInterest) { // 1. gather references from original query // This is a map from aliases to references. // We keep all references as we will need to modify them after creating // the subquery final Multimap<String, Object> aliasNodes = ArrayListMultimap.create(); // To know if we need to bail out final AtomicBoolean notSupported = new AtomicBoolean(false); TreeVisitorAction action = new TreeVisitorAction() { @Override public Object pre(Object t) { if (!notSupported.get()) { if (ParseDriver.adaptor.getType(t) == HiveParser.TOK_ALLCOLREF) { // TODO: this is a limitation of the AST rewriting approach that we will // not be able to overcome till proper integration of full multi-insert // queries with Calcite is implemented. // The current rewriting gather references from insert clauses and then // updates them with the new subquery references. However, if insert // clauses use * or tab.*, we cannot resolve the columns that we are // referring to. Thus, we just bail out and those queries will not be // currently optimized by Calcite. // An example of such query is: // FROM T_A a LEFT JOIN T_B b ON a.id = b.id // INSERT OVERWRITE TABLE join_result_1 // SELECT a.*, b.* // INSERT OVERWRITE TABLE join_result_3 // SELECT a.*, b.*; notSupported.set(true); } else if (ParseDriver.adaptor.getType(t) == HiveParser.DOT) { Object c = ParseDriver.adaptor.getChild(t, 0); if (c != null && ParseDriver.adaptor.getType(c) == HiveParser.TOK_TABLE_OR_COL) { aliasNodes.put(((ASTNode) t).toStringTree(), t); } } else if (ParseDriver.adaptor.getType(t) == HiveParser.TOK_TABLE_OR_COL) { Object p = ParseDriver.adaptor.getParent(t); if (p == null || ParseDriver.adaptor.getType(p) != HiveParser.DOT) { aliasNodes.put(((ASTNode) t).toStringTree(), t); } } } return t; } @Override public Object post(Object t) { return t; } }; TreeVisitor tv = new TreeVisitor(ParseDriver.adaptor); // We will iterate through the children: if it is an INSERT, we will traverse // the subtree to gather the references for (int i = 0; i < query.getChildCount(); i++) { ASTNode child = (ASTNode) query.getChild(i); if (ParseDriver.adaptor.getType(child) != HiveParser.TOK_INSERT) { // If it is not an INSERT, we do not need to anything continue; } tv.visit(child, action); } if (notSupported.get()) { // Bail out return null; } // 2. rewrite into query // TOK_QUERY // TOK_FROM // join // TOK_INSERT // TOK_DESTINATION // TOK_DIR // TOK_TMP_FILE // TOK_SELECT // refs ASTNode from = new ASTNode(FROM_TOKEN); from.addChild((ASTNode) ParseDriver.adaptor.dupTree(nodeOfInterest)); ASTNode destination = new ASTNode(DEST_TOKEN); ASTNode dir = new ASTNode(DIR_TOKEN); ASTNode tmpFile = new ASTNode(TMPFILE_TOKEN); dir.addChild(tmpFile); destination.addChild(dir); ASTNode select = new ASTNode(SELECT_TOKEN); int num = 0; for (Collection<Object> selectIdentifier : aliasNodes.asMap().values()) { Iterator<Object> it = selectIdentifier.iterator(); ASTNode node = (ASTNode) it.next(); // Add select expression ASTNode selectExpr = new ASTNode(SELEXPR_TOKEN); selectExpr.addChild((ASTNode) ParseDriver.adaptor.dupTree(node)); // Identifier String colAlias = "col" + num; selectExpr.addChild(new ASTNode(new CommonToken(HiveParser.Identifier, colAlias))); // Alias select.addChild(selectExpr); // Rewrite all INSERT references (all the node values for this key) ASTNode colExpr = new ASTNode(TABLEORCOL_TOKEN); colExpr.addChild(new ASTNode(new CommonToken(HiveParser.Identifier, colAlias))); replaceASTChild(node, colExpr); while (it.hasNext()) { // Loop to rewrite rest of INSERT references node = (ASTNode) it.next(); colExpr = new ASTNode(TABLEORCOL_TOKEN); colExpr.addChild(new ASTNode(new CommonToken(HiveParser.Identifier, colAlias))); replaceASTChild(node, colExpr); } num++; } ASTNode insert = new ASTNode(INSERT_TOKEN); insert.addChild(destination); insert.addChild(select); ASTNode newQuery = new ASTNode(QUERY_TOKEN); newQuery.addChild(from); newQuery.addChild(insert); // 3. create subquery ASTNode subq = new ASTNode(SUBQUERY_TOKEN); subq.addChild(newQuery); subq.addChild(new ASTNode(new CommonToken(HiveParser.Identifier, "subq"))); replaceASTChild(nodeOfInterest, subq); // 4. return subquery return subq; } /** * Can CBO handle the given AST? * * @param ast * Top level AST * @param qb * top level QB corresponding to the AST * @param cboCtx * @return boolean * * Assumption:<br> * If top level QB is query then everything below it must also be * Query. */ boolean canCBOHandleAst(ASTNode ast, QB qb, PreCboCtx cboCtx) { int root = ast.getToken().getType(); boolean needToLogMessage = STATIC_LOG.isInfoEnabled(); boolean isSupportedRoot = root == HiveParser.TOK_QUERY || root == HiveParser.TOK_EXPLAIN || qb.isCTAS() || qb.isMaterializedView(); // Queries without a source table currently are not supported by CBO boolean isSupportedType = (qb.getIsQuery()) || qb.isCTAS() || qb.isMaterializedView() || cboCtx.type == PreCboCtx.Type.INSERT || cboCtx.type == PreCboCtx.Type.MULTI_INSERT; boolean noBadTokens = HiveCalciteUtil.validateASTForUnsupportedTokens(ast); boolean result = isSupportedRoot && isSupportedType && noBadTokens; if (!result) { if (needToLogMessage) { String msg = ""; if (!isSupportedRoot) { msg += "doesn't have QUERY or EXPLAIN as root and not a CTAS; "; } if (!isSupportedType) { msg += "is not a query with at least one source table " + " or there is a subquery without a source table, or CTAS, or insert; "; } if (!noBadTokens) { msg += "has unsupported tokens; "; } if (msg.isEmpty()) { msg += "has some unspecified limitations; "; } STATIC_LOG.info("Not invoking CBO because the statement " + msg.substring(0, msg.length() - 2)); } return false; } // Now check QB in more detail. canHandleQbForCbo returns null if query can // be handled. String msg = CalcitePlanner.canHandleQbForCbo(queryProperties, conf, true, needToLogMessage, qb); if (msg == null) { return true; } if (needToLogMessage) { STATIC_LOG.info("Not invoking CBO because the statement " + msg.substring(0, msg.length() - 2)); } return false; } /** * Checks whether Calcite can handle the query. * * @param queryProperties * @param conf * @param topLevelQB * Does QB corresponds to top most query block? * @param verbose * Whether return value should be verbose in case of failure. * @return null if the query can be handled; non-null reason string if it * cannot be. * * Assumption:<br> * 1. If top level QB is query then everything below it must also be * Query<br> * 2. Nested Subquery will return false for qbToChk.getIsQuery() */ static String canHandleQbForCbo(QueryProperties queryProperties, HiveConf conf, boolean topLevelQB, boolean verbose, QB qb) { if (!queryProperties.hasClusterBy() && !queryProperties.hasDistributeBy() && !queryProperties.hasSortBy() && !queryProperties.hasPTF() && !queryProperties.usesScript() && queryProperties.isCBOSupportedLateralViews()) { // Ok to run CBO. return null; } // Not ok to run CBO, build error message. String msg = ""; if (verbose) { if (queryProperties.hasClusterBy()) msg += "has cluster by; "; if (queryProperties.hasDistributeBy()) msg += "has distribute by; "; if (queryProperties.hasSortBy()) msg += "has sort by; "; if (queryProperties.hasPTF()) msg += "has PTF; "; if (queryProperties.usesScript()) msg += "uses scripts; "; if (queryProperties.hasLateralViews()) msg += "has lateral views; "; if (msg.isEmpty()) msg += "has some unspecified limitations; "; } return msg; } /* This method inserts the right profiles into profiles CBO depending * on the query characteristics. */ private static EnumSet<ExtendedCBOProfile> obtainCBOProfiles(QueryProperties queryProperties) { EnumSet<ExtendedCBOProfile> profilesCBO = EnumSet.noneOf(ExtendedCBOProfile.class); // If the query contains more than one join if (queryProperties.getJoinCount() > 1) { profilesCBO.add(ExtendedCBOProfile.JOIN_REORDERING); } // If the query contains windowing processing if (queryProperties.hasWindowing()) { profilesCBO.add(ExtendedCBOProfile.WINDOWING_POSTPROCESSING); } return profilesCBO; } @Override boolean isCBOExecuted() { return runCBO; } @Override boolean isCBOSupportedLateralView(ASTNode lateralView) { // Lateral view AST has the following shape: // ^(TOK_LATERAL_VIEW // ^(TOK_SELECT ^(TOK_SELEXPR ^(TOK_FUNCTION Identifier params) identifier* tableAlias))) if (lateralView.getToken().getType() == HiveParser.TOK_LATERAL_VIEW_OUTER) { // LATERAL VIEW OUTER not supported in CBO return false; } // Only INLINE followed by ARRAY supported in CBO ASTNode lvFunc = (ASTNode) lateralView.getChild(0).getChild(0).getChild(0); String lvFuncName = lvFunc.getChild(0).getText(); if (lvFuncName.compareToIgnoreCase( GenericUDTFInline.class.getAnnotation(Description.class).name()) != 0) { return false; } if (lvFunc.getChildCount() != 2) { return false; } ASTNode innerFunc = (ASTNode) lvFunc.getChild(1); if (innerFunc.getToken().getType() != HiveParser.TOK_FUNCTION || innerFunc.getChild(0).getText().compareToIgnoreCase( GenericUDFArray.class.getAnnotation(Description.class).name()) != 0) { return false; } return true; } @Override boolean continueJoinMerge() { return !(runCBO && disableSemJoinReordering); } @Override Table materializeCTE(String cteName, CTEClause cte) throws HiveException { ASTNode createTable = new ASTNode(new ClassicToken(HiveParser.TOK_CREATETABLE)); ASTNode tableName = new ASTNode(new ClassicToken(HiveParser.TOK_TABNAME)); tableName.addChild(new ASTNode(new ClassicToken(HiveParser.Identifier, cteName))); ASTNode temporary = new ASTNode(new ClassicToken(HiveParser.KW_TEMPORARY, MATERIALIZATION_MARKER)); createTable.addChild(tableName); createTable.addChild(temporary); createTable.addChild(cte.cteNode); CalcitePlanner analyzer = new CalcitePlanner(queryState); analyzer.initCtx(ctx); analyzer.init(false); // should share cte contexts analyzer.aliasToCTEs.putAll(aliasToCTEs); HiveOperation operation = queryState.getHiveOperation(); try { analyzer.analyzeInternal(createTable); } finally { queryState.setCommandType(operation); } Table table = analyzer.tableDesc.toTable(conf); Path location = table.getDataLocation(); try { location.getFileSystem(conf).mkdirs(location); } catch (IOException e) { throw new HiveException(e); } table.setMaterializedTable(true); LOG.info(cteName + " will be materialized into " + location); cte.source = analyzer; ctx.addMaterializedTable(cteName, table); // For CalcitePlanner, store qualified name too ctx.addMaterializedTable(table.getFullyQualifiedName(), table); return table; } private void fixUpASTAggregateIncrementalRebuild(ASTNode newAST) throws SemanticException { // Replace INSERT OVERWRITE by MERGE equivalent rewriting. // Here we need to do this complex AST rewriting that generates the same plan // that a MERGE clause would generate because CBO does not support MERGE yet. // TODO: Support MERGE as first class member in CBO to simplify this logic. // 1) Replace INSERT OVERWRITE by INSERT ASTNode updateNode = new ASTSearcher().simpleBreadthFirstSearch( newAST, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT); ASTNode destinationNode = (ASTNode) updateNode.getChild(0); ASTNode newInsertInto = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_INSERT_INTO, "TOK_INSERT_INTO"); newInsertInto.addChildren(destinationNode.getChildren()); ASTNode destinationParentNode = (ASTNode) destinationNode.getParent(); int childIndex = destinationNode.childIndex; destinationParentNode.deleteChild(childIndex); destinationParentNode.insertChild(childIndex, newInsertInto); // 1.1) Extract name as we will need it afterwards: // TOK_DESTINATION TOK_TAB TOK_TABNAME <materialization_name> ASTNode materializationNode = new ASTSearcher().simpleBreadthFirstSearch( newInsertInto, HiveParser.TOK_INSERT_INTO, HiveParser.TOK_TAB, HiveParser.TOK_TABNAME); // 2) Copy INSERT branch and duplicate it, the first branch will be the UPDATE // for the MERGE statement while the new branch will be the INSERT for the // MERGE statement ASTNode updateParent = (ASTNode) updateNode.getParent(); ASTNode insertNode = (ASTNode) ParseDriver.adaptor.dupTree(updateNode); insertNode.setParent(updateParent); updateParent.addChild(insertNode); // 3) Create ROW_ID column in select clause from left input for the RIGHT OUTER JOIN. // This is needed for the UPDATE clause. Hence, we find the following node: // TOK_QUERY // TOK_FROM // TOK_RIGHTOUTERJOIN // TOK_SUBQUERY // TOK_QUERY // ... // TOK_INSERT // ... // TOK_SELECT // And then we create the following child node: // TOK_SELEXPR // . // TOK_TABLE_OR_COL // cmv_mat_view // ROW__ID ASTNode subqueryNodeInputROJ = new ASTSearcher().simpleBreadthFirstSearch( newAST, HiveParser.TOK_QUERY, HiveParser.TOK_FROM, HiveParser.TOK_RIGHTOUTERJOIN, HiveParser.TOK_SUBQUERY); ASTNode selectNodeInputROJ = new ASTSearcher().simpleBreadthFirstSearch( subqueryNodeInputROJ, HiveParser.TOK_SUBQUERY, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT, HiveParser.TOK_SELECT); ASTNode selectExprNodeInputROJ = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_SELEXPR, "TOK_SELEXPR"); ASTNode dotNodeInputROJ = (ASTNode) ParseDriver.adaptor.create( HiveParser.DOT, "."); ASTNode columnTokNodeInputROJ = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_TABLE_OR_COL, "TOK_TABLE_OR_COL"); ASTNode tableNameNodeInputROJ = (ASTNode) ParseDriver.adaptor.create( HiveParser.Identifier, Warehouse.getQualifiedName( materializationNode.getChild(0).getText(), materializationNode.getChild(1).getText())); ASTNode rowIdNodeInputROJ = (ASTNode) ParseDriver.adaptor.create( HiveParser.Identifier, VirtualColumn.ROWID.getName()); ParseDriver.adaptor.addChild(selectNodeInputROJ, selectExprNodeInputROJ); ParseDriver.adaptor.addChild(selectExprNodeInputROJ, dotNodeInputROJ); ParseDriver.adaptor.addChild(dotNodeInputROJ, columnTokNodeInputROJ); ParseDriver.adaptor.addChild(dotNodeInputROJ, rowIdNodeInputROJ); ParseDriver.adaptor.addChild(columnTokNodeInputROJ, tableNameNodeInputROJ); // 4) Transform first INSERT branch into an UPDATE // 4.1) Adding ROW__ID field ASTNode selectNodeInUpdate = (ASTNode) updateNode.getChild(1); if (selectNodeInUpdate.getType() != HiveParser.TOK_SELECT) { throw new SemanticException("TOK_SELECT expected in incremental rewriting"); } ASTNode selectExprNodeInUpdate = (ASTNode) ParseDriver.adaptor.dupNode(selectExprNodeInputROJ); ASTNode dotNodeInUpdate = (ASTNode) ParseDriver.adaptor.dupNode(dotNodeInputROJ); ASTNode columnTokNodeInUpdate = (ASTNode) ParseDriver.adaptor.dupNode(columnTokNodeInputROJ); ASTNode tableNameNodeInUpdate = (ASTNode) ParseDriver.adaptor.dupNode(subqueryNodeInputROJ.getChild(1)); ASTNode rowIdNodeInUpdate = (ASTNode) ParseDriver.adaptor.dupNode(rowIdNodeInputROJ); ParseDriver.adaptor.addChild(selectExprNodeInUpdate, dotNodeInUpdate); ParseDriver.adaptor.addChild(dotNodeInUpdate, columnTokNodeInUpdate); ParseDriver.adaptor.addChild(dotNodeInUpdate, rowIdNodeInUpdate); ParseDriver.adaptor.addChild(columnTokNodeInUpdate, tableNameNodeInUpdate); selectNodeInUpdate.insertChild(0, ParseDriver.adaptor.dupTree(selectExprNodeInUpdate)); // 4.2) Modifying filter condition. The incremental rewriting rule generated an OR // clause where first disjunct contains the condition for the UPDATE branch. // TOK_WHERE // or // and <- DISJUNCT FOR <UPDATE> // = // . // TOK_TABLE_OR_COL // $hdt$_0 // a // . // TOK_TABLE_OR_COL // $hdt$_1 // a // = // . // TOK_TABLE_OR_COL // $hdt$_0 // c // . // TOK_TABLE_OR_COL // $hdt$_1 // c // and <- DISJUNCT FOR <INSERT> // TOK_FUNCTION // isnull // . // TOK_TABLE_OR_COL // $hdt$_0 // a // TOK_FUNCTION // isnull // . // TOK_TABLE_OR_COL // $hdt$_0 // c ASTNode whereClauseInUpdate = null; for (int i = 0; i < updateNode.getChildren().size(); i++) { if (updateNode.getChild(i).getType() == HiveParser.TOK_WHERE) { whereClauseInUpdate = (ASTNode) updateNode.getChild(i); break; } } if (whereClauseInUpdate == null) { throw new SemanticException("TOK_WHERE expected in incremental rewriting"); } if (whereClauseInUpdate.getChild(0).getType() != HiveParser.KW_OR) { throw new SemanticException("OR clause expected below TOK_WHERE in incremental rewriting"); } // We bypass the OR clause and select the first disjunct ASTNode newCondInUpdate = (ASTNode) whereClauseInUpdate.getChild(0).getChild(0); ParseDriver.adaptor.setChild(whereClauseInUpdate, 0, newCondInUpdate); // 4.3) Finally, we add SORT clause, this is needed for the UPDATE. // TOK_SORTBY // TOK_TABSORTCOLNAMEASC // TOK_NULLS_FIRST // . // TOK_TABLE_OR_COL // cmv_basetable_2 // ROW__ID ASTNode sortExprNode = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_SORTBY, "TOK_SORTBY"); ASTNode orderExprNode = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_TABSORTCOLNAMEASC, "TOK_TABSORTCOLNAMEASC"); ASTNode nullsOrderExprNode = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_NULLS_FIRST, "TOK_NULLS_FIRST"); ASTNode dotNodeInSort = (ASTNode) ParseDriver.adaptor.dupTree(dotNodeInUpdate); ParseDriver.adaptor.addChild(updateNode, sortExprNode); ParseDriver.adaptor.addChild(sortExprNode, orderExprNode); ParseDriver.adaptor.addChild(orderExprNode, nullsOrderExprNode); ParseDriver.adaptor.addChild(nullsOrderExprNode, dotNodeInSort); // 5) Modify INSERT branch condition. In particular, we need to modify the // WHERE clause and pick up the second disjunct from the OR operation. ASTNode whereClauseInInsert = null; for (int i = 0; i < insertNode.getChildren().size(); i++) { if (insertNode.getChild(i).getType() == HiveParser.TOK_WHERE) { whereClauseInInsert = (ASTNode) insertNode.getChild(i); break; } } if (whereClauseInInsert == null) { throw new SemanticException("TOK_WHERE expected in incremental rewriting"); } if (whereClauseInInsert.getChild(0).getType() != HiveParser.KW_OR) { throw new SemanticException("OR clause expected below TOK_WHERE in incremental rewriting"); } // We bypass the OR clause and select the second disjunct ASTNode newCondInInsert = (ASTNode) whereClauseInInsert.getChild(0).getChild(1); ParseDriver.adaptor.setChild(whereClauseInInsert, 0, newCondInInsert); // 6) Now we set some tree properties related to multi-insert // operation with INSERT/UPDATE ctx.setOperation(Context.Operation.MERGE); ctx.addDestNamePrefix(1, Context.DestClausePrefix.UPDATE); ctx.addDestNamePrefix(2, Context.DestClausePrefix.INSERT); } private void fixUpASTNoAggregateIncrementalRebuild(ASTNode newAST) throws SemanticException { // Replace INSERT OVERWRITE by INSERT INTO // AST tree will have this shape: // TOK_QUERY // TOK_FROM // ... // TOK_INSERT // TOK_DESTINATION <- THIS TOKEN IS REPLACED BY 'TOK_INSERT_INTO' // TOK_TAB // TOK_TABNAME // default.cmv_mat_view // TOK_SELECT // ... ASTNode dest = new ASTSearcher().simpleBreadthFirstSearch(newAST, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT, HiveParser.TOK_DESTINATION); ASTNode newChild = (ASTNode) ParseDriver.adaptor.create( HiveParser.TOK_INSERT_INTO, "TOK_INSERT_INTO"); newChild.addChildren(dest.getChildren()); ASTNode destParent = (ASTNode) dest.getParent(); int childIndex = dest.childIndex; destParent.deleteChild(childIndex); destParent.insertChild(childIndex, newChild); } @Override String fixCtasColumnName(String colName) { if (runCBO) { int lastDot = colName.lastIndexOf('.'); if (lastDot < 0) return colName; // alias is not fully qualified String nqColumnName = colName.substring(lastDot + 1); STATIC_LOG.debug("Replacing " + colName + " (produced by CBO) by " + nqColumnName); return nqColumnName; } return super.fixCtasColumnName(colName); } /** * The context that doPhase1 uses to populate information pertaining to CBO * (currently, this is used for CTAS and insert-as-select). */ static class PreCboCtx extends PlannerContext { enum Type { NONE, INSERT, MULTI_INSERT, CTAS, VIEW, UNEXPECTED } private ASTNode nodeOfInterest; private Type type = Type.NONE; private void set(Type type, ASTNode ast) { if (this.type != Type.NONE) { STATIC_LOG.warn("Setting " + type + " when already " + this.type + "; node " + ast.dump() + " vs old node " + nodeOfInterest.dump()); this.type = Type.UNEXPECTED; return; } this.type = type; this.nodeOfInterest = ast; } @Override void setCTASToken(ASTNode child) { set(PreCboCtx.Type.CTAS, child); } @Override void setViewToken(ASTNode child) { set(PreCboCtx.Type.VIEW, child); } @Override void setInsertToken(ASTNode ast, boolean isTmpFileDest) { if (!isTmpFileDest) { set(PreCboCtx.Type.INSERT, ast); } } @Override void setMultiInsertToken(ASTNode child) { set(PreCboCtx.Type.MULTI_INSERT, child); } @Override void resetToken() { this.type = Type.NONE; this.nodeOfInterest = null; } } ASTNode fixUpAfterCbo(ASTNode originalAst, ASTNode newAst, PreCboCtx cboCtx) throws SemanticException { switch (cboCtx.type) { case NONE: // nothing to do return newAst; case CTAS: case VIEW: { // Patch the optimized query back into original CTAS AST, replacing the // original query. replaceASTChild(cboCtx.nodeOfInterest, newAst); return originalAst; } case INSERT: { // We need to patch the dest back to original into new query. // This makes assumptions about the structure of the AST. ASTNode newDest = new ASTSearcher().simpleBreadthFirstSearch(newAst, HiveParser.TOK_QUERY, HiveParser.TOK_INSERT, HiveParser.TOK_DESTINATION); if (newDest == null) { LOG.error("Cannot find destination after CBO; new ast is " + newAst.dump()); throw new SemanticException("Cannot find destination after CBO"); } replaceASTChild(newDest, cboCtx.nodeOfInterest); return newAst; } case MULTI_INSERT: { // Patch the optimized query back into original FROM clause. replaceASTChild(cboCtx.nodeOfInterest, newAst); return originalAst; } default: throw new AssertionError("Unexpected type " + cboCtx.type); } } ASTNode reAnalyzeCTASAfterCbo(ASTNode newAst) throws SemanticException { // analyzeCreateTable uses this.ast, but doPhase1 doesn't, so only reset it // here. newAst = analyzeCreateTable(newAst, getQB(), null); if (newAst == null) { LOG.error("analyzeCreateTable failed to initialize CTAS after CBO;" + " new ast is " + getAST().dump()); throw new SemanticException("analyzeCreateTable failed to initialize CTAS after CBO"); } return newAst; } ASTNode reAnalyzeViewAfterCbo(ASTNode newAst) throws SemanticException { // analyzeCreateView uses this.ast, but doPhase1 doesn't, so only reset it // here. newAst = analyzeCreateView(newAst, getQB(), null); if (newAst == null) { LOG.error("analyzeCreateTable failed to initialize materialized view after CBO;" + " new ast is " + getAST().dump()); throw new SemanticException("analyzeCreateTable failed to initialize materialized view after CBO"); } return newAst; } public static class ASTSearcher { private final LinkedList<ASTNode> searchQueue = new LinkedList<ASTNode>(); /** * Performs breadth-first search of the AST for a nested set of tokens. Tokens * don't have to be each others' direct children, they can be separated by * layers of other tokens. For each token in the list, the first one found is * matched and there's no backtracking; thus, if AST has multiple instances of * some token, of which only one matches, it is not guaranteed to be found. We * use this for simple things. Not thread-safe - reuses searchQueue. */ public ASTNode simpleBreadthFirstSearch(ASTNode ast, int... tokens) { searchQueue.clear(); searchQueue.add(ast); for (int i = 0; i < tokens.length; ++i) { boolean found = false; int token = tokens[i]; while (!searchQueue.isEmpty() && !found) { ASTNode next = searchQueue.poll(); found = next.getType() == token; if (found) { if (i == tokens.length - 1) return next; searchQueue.clear(); } for (int j = 0; j < next.getChildCount(); ++j) { searchQueue.add((ASTNode) next.getChild(j)); } } if (!found) return null; } return null; } public ASTNode depthFirstSearch(ASTNode ast, int token) { searchQueue.clear(); searchQueue.add(ast); while (!searchQueue.isEmpty()) { ASTNode next = searchQueue.poll(); if (next.getType() == token) return next; for (int j = 0; j < next.getChildCount(); ++j) { searchQueue.add((ASTNode) next.getChild(j)); } } return null; } public ASTNode simpleBreadthFirstSearchAny(ASTNode ast, int... tokens) { searchQueue.clear(); searchQueue.add(ast); while (!searchQueue.isEmpty()) { ASTNode next = searchQueue.poll(); for (int i = 0; i < tokens.length; ++i) { if (next.getType() == tokens[i]) return next; } for (int i = 0; i < next.getChildCount(); ++i) { searchQueue.add((ASTNode) next.getChild(i)); } } return null; } public void reset() { searchQueue.clear(); } } private static void replaceASTChild(ASTNode child, ASTNode newChild) { ASTNode parent = (ASTNode) child.parent; int childIndex = child.childIndex; parent.deleteChild(childIndex); parent.insertChild(childIndex, newChild); } /** * Get optimized logical plan for the given QB tree in the semAnalyzer. * * @return * @throws SemanticException */ RelNode logicalPlan() throws SemanticException { RelNode optimizedOptiqPlan = null; CalcitePlannerAction calcitePlannerAction = null; if (this.columnAccessInfo == null) { this.columnAccessInfo = new ColumnAccessInfo(); } calcitePlannerAction = new CalcitePlannerAction( prunedPartitions, ctx.getOpContext().getColStatsCache(), this.columnAccessInfo); try { optimizedOptiqPlan = Frameworks.withPlanner(calcitePlannerAction, Frameworks .newConfigBuilder().typeSystem(new HiveTypeSystemImpl()).build()); } catch (Exception e) { rethrowCalciteException(e); throw new AssertionError("rethrowCalciteException didn't throw for " + e.getMessage()); } return optimizedOptiqPlan; } /** * Get SQL rewrite for a Calcite logical plan * * @return Optimized SQL text (or null, if failed) */ public String getOptimizedSql(RelNode optimizedOptiqPlan) { SqlDialect dialect = new HiveSqlDialect(SqlDialect.EMPTY_CONTEXT .withDatabaseProduct(SqlDialect.DatabaseProduct.HIVE) .withDatabaseMajorVersion(4) // TODO: should not be hardcoded .withDatabaseMinorVersion(0) .withIdentifierQuoteString("`") .withNullCollation(NullCollation.LOW)) { @Override protected boolean allowsAs() { return true; } @Override public boolean supportsCharSet() { return false; } }; try { final JdbcImplementor jdbcImplementor = new JdbcImplementor(dialect, (JavaTypeFactory) optimizedOptiqPlan.getCluster() .getTypeFactory()); final JdbcImplementor.Result result = jdbcImplementor.visitChild(0, optimizedOptiqPlan); String sql = result.asStatement().toSqlString(dialect).getSql(); return sql.replaceAll("VARCHAR\\(2147483647\\)", "STRING"); } catch (Exception ex) { LOG.warn("Rel2SQL Rewrite threw error", ex); } return null; } /** * Get Optimized AST for the given QB tree in the semAnalyzer. * * @return Optimized operator tree translated in to Hive AST * @throws SemanticException */ ASTNode getOptimizedAST() throws SemanticException { return getOptimizedAST(logicalPlan()); } /** * Get Optimized AST for the given QB tree in the semAnalyzer. * * @return Optimized operator tree translated in to Hive AST * @throws SemanticException */ ASTNode getOptimizedAST(RelNode optimizedOptiqPlan) throws SemanticException { ASTNode optiqOptimizedAST = ASTConverter.convert(optimizedOptiqPlan, resultSchema, HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_COLUMN_ALIGNMENT)); return optiqOptimizedAST; } /** * Get Optimized Hive Operator DAG for the given QB tree in the semAnalyzer. * * @return Optimized Hive operator tree * @throws SemanticException */ Operator getOptimizedHiveOPDag() throws SemanticException { RelNode optimizedOptiqPlan = null; CalcitePlannerAction calcitePlannerAction = null; if (this.columnAccessInfo == null) { this.columnAccessInfo = new ColumnAccessInfo(); } calcitePlannerAction = new CalcitePlannerAction( prunedPartitions, ctx.getOpContext().getColStatsCache(), this.columnAccessInfo); try { optimizedOptiqPlan = Frameworks.withPlanner(calcitePlannerAction, Frameworks .newConfigBuilder().typeSystem(new HiveTypeSystemImpl()).build()); } catch (Exception e) { rethrowCalciteException(e); throw new AssertionError("rethrowCalciteException didn't throw for " + e.getMessage()); } RelNode modifiedOptimizedOptiqPlan = PlanModifierForReturnPath.convertOpTree( optimizedOptiqPlan, resultSchema, this.getQB().getTableDesc() != null); LOG.debug("Translating the following plan:\n" + RelOptUtil.toString(modifiedOptimizedOptiqPlan)); Operator<?> hiveRoot = new HiveOpConverter(this, conf, unparseTranslator, topOps) .convert(modifiedOptimizedOptiqPlan); RowResolver hiveRootRR = genRowResolver(hiveRoot, getQB()); opParseCtx.put(hiveRoot, new OpParseContext(hiveRootRR)); String dest = getQB().getParseInfo().getClauseNames().iterator().next(); if (getQB().getParseInfo().getDestSchemaForClause(dest) != null && this.getQB().getTableDesc() == null) { Operator<?> selOp = handleInsertStatement(dest, hiveRoot, hiveRootRR, getQB()); return genFileSinkPlan(dest, getQB(), selOp); } else { return genFileSinkPlan(dest, getQB(), hiveRoot); } } // This function serves as the wrapper of handleInsertStatementSpec in // SemanticAnalyzer Operator<?> handleInsertStatement(String dest, Operator<?> input, RowResolver inputRR, QB qb) throws SemanticException { ArrayList<ExprNodeDesc> colList = new ArrayList<ExprNodeDesc>(); ArrayList<ColumnInfo> columns = inputRR.getColumnInfos(); for (int i = 0; i < columns.size(); i++) { ColumnInfo col = columns.get(i); colList.add(new ExprNodeColumnDesc(col)); } ASTNode selExprList = qb.getParseInfo().getSelForClause(dest); RowResolver out_rwsch = handleInsertStatementSpec(colList, dest, inputRR, qb, selExprList); ArrayList<String> columnNames = new ArrayList<String>(); Map<String, ExprNodeDesc> colExprMap = new HashMap<String, ExprNodeDesc>(); for (int i = 0; i < colList.size(); i++) { String outputCol = getColumnInternalName(i); colExprMap.put(outputCol, colList.get(i)); columnNames.add(outputCol); } Operator<?> output = putOpInsertMap(OperatorFactory.getAndMakeChild(new SelectDesc(colList, columnNames), new RowSchema(out_rwsch.getColumnInfos()), input), out_rwsch); output.setColumnExprMap(colExprMap); return output; } /*** * Unwraps Calcite Invocation exceptions coming meta data provider chain and * obtains the real cause. * * @param e */ private void rethrowCalciteException(Exception e) throws SemanticException { Throwable first = (semanticException != null) ? semanticException : e, current = first, cause = current .getCause(); while (cause != null) { Throwable causeOfCause = cause.getCause(); if (current == first && causeOfCause == null && isUselessCause(first)) { // "cause" is a root cause, and "e"/"first" is a useless // exception it's wrapped in. first = cause; break; } else if (causeOfCause != null && isUselessCause(cause) && ExceptionHelper.resetCause(current, causeOfCause)) { // "cause" was a useless intermediate cause and was replace it // with its own cause. cause = causeOfCause; continue; // do loop once again with the new cause of "current" } current = cause; cause = current.getCause(); } if (first instanceof RuntimeException) { throw (RuntimeException) first; } else if (first instanceof SemanticException) { throw (SemanticException) first; } throw new RuntimeException(first); } private static class ExceptionHelper { private static final Field CAUSE_FIELD = getField(Throwable.class, "cause"), TARGET_FIELD = getField(InvocationTargetException.class, "target"), MESSAGE_FIELD = getField(Throwable.class, "detailMessage"); private static Field getField(Class<?> clazz, String name) { try { Field f = clazz.getDeclaredField(name); f.setAccessible(true); return f; } catch (Throwable t) { return null; } } public static boolean resetCause(Throwable target, Throwable newCause) { try { if (MESSAGE_FIELD == null) return false; Field field = (target instanceof InvocationTargetException) ? TARGET_FIELD : CAUSE_FIELD; if (field == null) return false; Throwable oldCause = target.getCause(); String oldMsg = target.getMessage(); field.set(target, newCause); if (oldMsg != null && oldMsg.equals(oldCause.toString())) { MESSAGE_FIELD.set(target, newCause == null ? null : newCause.toString()); } } catch (Throwable se) { return false; } return true; } } private boolean isUselessCause(Throwable t) { return t instanceof RuntimeException || t instanceof InvocationTargetException || t instanceof UndeclaredThrowableException; } private RowResolver genRowResolver(Operator op, QB qb) { RowResolver rr = new RowResolver(); String subqAlias = (qb.getAliases().size() == 1 && qb.getSubqAliases().size() == 1) ? qb .getAliases().get(0) : null; for (ColumnInfo ci : op.getSchema().getSignature()) { try { rr.putWithCheck((subqAlias != null) ? subqAlias : ci.getTabAlias(), ci.getAlias() != null ? ci.getAlias() : ci.getInternalName(), ci.getInternalName(), new ColumnInfo(ci)); } catch (SemanticException e) { throw new RuntimeException(e); } } return rr; } private enum ExtendedCBOProfile { JOIN_REORDERING, WINDOWING_POSTPROCESSING, REFERENTIAL_CONSTRAINTS; } /** * Code responsible for Calcite plan generation and optimization. */ private class CalcitePlannerAction implements Frameworks.PlannerAction<RelNode> { private RelOptCluster cluster; private RelOptSchema relOptSchema; private final Map<String, PrunedPartitionList> partitionCache; private final Map<String, ColumnStatsList> colStatsCache; private final ColumnAccessInfo columnAccessInfo; private Map<HiveProject, Table> viewProjectToTableSchema; //correlated vars across subqueries within same query needs to have different ID // this will be used in RexNodeConverter to create cor var private int subqueryId; // this is to keep track if a subquery is correlated and contains aggregate // since this is special cased when it is rewritten in SubqueryRemoveRule Set<RelNode> corrScalarRexSQWithAgg = new HashSet<RelNode>(); Set<RelNode> scalarAggNoGbyNoWin = new HashSet<RelNode>(); // TODO: Do we need to keep track of RR, ColNameToPosMap for every op or // just last one. LinkedHashMap<RelNode, RowResolver> relToHiveRR = new LinkedHashMap<RelNode, RowResolver>(); LinkedHashMap<RelNode, ImmutableMap<String, Integer>> relToHiveColNameCalcitePosMap = new LinkedHashMap<RelNode, ImmutableMap<String, Integer>>(); CalcitePlannerAction( Map<String, PrunedPartitionList> partitionCache, Map<String, ColumnStatsList> colStatsCache, ColumnAccessInfo columnAccessInfo) { this.partitionCache = partitionCache; this.colStatsCache = colStatsCache; this.columnAccessInfo = columnAccessInfo; } @Override public RelNode apply(RelOptCluster cluster, RelOptSchema relOptSchema, SchemaPlus rootSchema) { RelNode calciteGenPlan = null; RelNode calcitePreCboPlan = null; RelNode calciteOptimizedPlan = null; subqueryId = -1; /* * recreate cluster, so that it picks up the additional traitDef */ RelOptPlanner planner = createPlanner(conf, corrScalarRexSQWithAgg, scalarAggNoGbyNoWin); final RexBuilder rexBuilder = cluster.getRexBuilder(); final RelOptCluster optCluster = RelOptCluster.create(planner, rexBuilder); this.cluster = optCluster; this.relOptSchema = relOptSchema; PerfLogger perfLogger = SessionState.getPerfLogger(); // 1. Gen Calcite Plan perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); try { calciteGenPlan = genLogicalPlan(getQB(), true, null, null); // if it is to create view, we do not use table alias resultSchema = SemanticAnalyzer.convertRowSchemaToResultSetSchema( relToHiveRR.get(calciteGenPlan), getQB().isView() || getQB().isMaterializedView() ? false : HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_RESULTSET_USE_UNIQUE_COLUMN_NAMES)); } catch (SemanticException e) { semanticException = e; throw new RuntimeException(e); } perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Plan generation"); // Validate query materialization (materialized views, query results caching. // This check needs to occur before constant folding, which may remove some // function calls from the query plan. HiveRelOpMaterializationValidator matValidator = new HiveRelOpMaterializationValidator(); matValidator.validateQueryMaterialization(calciteGenPlan); if (!matValidator.isValidMaterialization()) { String reason = matValidator.getInvalidMaterializationReason(); setInvalidQueryMaterializationReason(reason); } // Create executor RexExecutor executorProvider = new HiveRexExecutorImpl(optCluster); calciteGenPlan.getCluster().getPlanner().setExecutor(executorProvider); // We need to get the ColumnAccessInfo and viewToTableSchema for views. HiveRelFieldTrimmer fieldTrimmer = new HiveRelFieldTrimmer(null, HiveRelFactories.HIVE_BUILDER.create(optCluster, null), this.columnAccessInfo, this.viewProjectToTableSchema); fieldTrimmer.trim(calciteGenPlan); // Create and set MD provider HiveDefaultRelMetadataProvider mdProvider = new HiveDefaultRelMetadataProvider(conf); RelMetadataQuery.THREAD_PROVIDERS.set( JaninoRelMetadataProvider.of(mdProvider.getMetadataProvider())); //Remove subquery LOG.debug("Plan before removing subquery:\n" + RelOptUtil.toString(calciteGenPlan)); calciteGenPlan = hepPlan(calciteGenPlan, false, mdProvider.getMetadataProvider(), null, new HiveSubQueryRemoveRule(conf)); LOG.debug("Plan just after removing subquery:\n" + RelOptUtil.toString(calciteGenPlan)); calciteGenPlan = HiveRelDecorrelator.decorrelateQuery(calciteGenPlan); LOG.debug("Plan after decorrelation:\n" + RelOptUtil.toString(calciteGenPlan)); // 2. Apply pre-join order optimizations calcitePreCboPlan = applyPreJoinOrderingTransforms(calciteGenPlan, mdProvider.getMetadataProvider(), executorProvider); // 3. Materialized view based rewriting // We disable it for CTAS and MV creation queries (trying to avoid any problem // due to data freshness) if (conf.getBoolVar(ConfVars.HIVE_MATERIALIZED_VIEW_ENABLE_AUTO_REWRITING) && !getQB().isMaterializedView() && !ctx.isLoadingMaterializedView() && !getQB().isCTAS()) { calcitePreCboPlan = applyMaterializedViewRewriting(planner, calcitePreCboPlan, mdProvider.getMetadataProvider(), executorProvider); } // Get rid of sq_count_check if group by key is constant if (conf.getBoolVar(ConfVars.HIVE_REMOVE_SQ_COUNT_CHECK)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calcitePreCboPlan = hepPlan(calcitePreCboPlan, false, mdProvider.getMetadataProvider(), null, HiveRemoveSqCountCheck.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Removing sq_count_check UDF "); } // Remove Projects between Joins so that JoinToMultiJoinRule can merge them to MultiJoin calcitePreCboPlan = hepPlan(calcitePreCboPlan, true, mdProvider.getMetadataProvider(), executorProvider, HepMatchOrder.BOTTOM_UP, HiveJoinProjectTransposeRule.LEFT_PROJECT_BTW_JOIN, HiveJoinProjectTransposeRule.RIGHT_PROJECT_BTW_JOIN, HiveProjectMergeRule.INSTANCE); // 4. Apply join order optimizations: reordering MST algorithm // If join optimizations failed because of missing stats, we continue with // the rest of optimizations if (profilesCBO.contains(ExtendedCBOProfile.JOIN_REORDERING)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); try { List<RelMetadataProvider> list = Lists.newArrayList(); list.add(mdProvider.getMetadataProvider()); RelTraitSet desiredTraits = optCluster .traitSetOf(HiveRelNode.CONVENTION, RelCollations.EMPTY); HepProgramBuilder hepPgmBldr = new HepProgramBuilder().addMatchOrder(HepMatchOrder.BOTTOM_UP); hepPgmBldr.addRuleInstance(new JoinToMultiJoinRule(HiveJoin.class)); hepPgmBldr.addRuleInstance(new LoptOptimizeJoinRule(HiveRelFactories.HIVE_BUILDER)); HepProgram hepPgm = hepPgmBldr.build(); HepPlanner hepPlanner = new HepPlanner(hepPgm); hepPlanner.registerMetadataProviders(list); RelMetadataProvider chainedProvider = ChainedRelMetadataProvider.of(list); optCluster.setMetadataProvider(new CachingRelMetadataProvider(chainedProvider, hepPlanner)); RelNode rootRel = calcitePreCboPlan; hepPlanner.setRoot(rootRel); if (!calcitePreCboPlan.getTraitSet().equals(desiredTraits)) { rootRel = hepPlanner.changeTraits(calcitePreCboPlan, desiredTraits); } hepPlanner.setRoot(rootRel); calciteOptimizedPlan = hepPlanner.findBestExp(); } catch (Exception e) { boolean isMissingStats = noColsMissingStats.get() > 0; if (isMissingStats) { LOG.warn("Missing column stats (see previous messages), skipping join reordering in CBO"); noColsMissingStats.set(0); calciteOptimizedPlan = calcitePreCboPlan; disableSemJoinReordering = false; } else { throw e; } } perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Join Reordering"); } else { calciteOptimizedPlan = calcitePreCboPlan; disableSemJoinReordering = false; } // 5. Run other optimizations that do not need stats perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, ProjectRemoveRule.INSTANCE, HiveUnionMergeRule.INSTANCE, HiveAggregateProjectMergeRule.INSTANCE, HiveProjectMergeRule.INSTANCE_NO_FORCE, HiveJoinCommuteRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Optimizations without stats 1"); // 6. Run aggregate-join transpose (cost based) // If it failed because of missing stats, we continue with // the rest of optimizations if (conf.getBoolVar(ConfVars.AGGR_JOIN_TRANSPOSE)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); try { calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, HiveAggregateJoinTransposeRule.INSTANCE); } catch (Exception e) { boolean isMissingStats = noColsMissingStats.get() > 0; if (isMissingStats) { LOG.warn("Missing column stats (see previous messages), skipping aggregate-join transpose in CBO"); noColsMissingStats.set(0); } else { throw e; } } perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Aggregate join transpose"); } // 7.convert Join + GBy to semijoin // run this rule at later stages, since many calcite rules cant deal with semijoin if (conf.getBoolVar(ConfVars.SEMIJOIN_CONVERSION)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HiveSemiJoinRule.INSTANCE_PROJECT, HiveSemiJoinRule.INSTANCE_AGGREGATE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Semijoin conversion"); } // 8. convert SemiJoin + GBy to SemiJoin perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HiveRemoveGBYSemiJoinRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Removal of gby from semijoin"); // 9. Run rule to fix windowing issue when it is done over // aggregation columns (HIVE-10627) if (profilesCBO.contains(ExtendedCBOProfile.WINDOWING_POSTPROCESSING)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, HiveWindowingFixRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Window fixing rule"); } // 10. Apply Druid transformation rules perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, HiveDruidRules.FILTER, HiveDruidRules.PROJECT_FILTER_TRANSPOSE, HiveDruidRules.AGGREGATE_FILTER_TRANSPOSE, HiveDruidRules.AGGREGATE_PROJECT, HiveDruidRules.PROJECT, HiveDruidRules.EXPAND_SINGLE_DISTINCT_AGGREGATES_DRUID_RULE, HiveDruidRules.AGGREGATE, HiveDruidRules.POST_AGGREGATION_PROJECT, HiveDruidRules.FILTER_AGGREGATE_TRANSPOSE, HiveDruidRules.FILTER_PROJECT_TRANSPOSE, HiveDruidRules.HAVING_FILTER_RULE, HiveDruidRules.SORT_PROJECT_TRANSPOSE, HiveDruidRules.SORT, HiveDruidRules.PROJECT_SORT_TRANSPOSE ); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Druid transformation rules"); if (conf.getBoolVar(ConfVars.HIVE_ENABLE_JDBC_PUSHDOWN)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, true, mdProvider.getMetadataProvider(), null, HepMatchOrder.TOP_DOWN, JDBCExtractJoinFilterRule.INSTANCE, JDBCAbstractSplitFilterRule.SPLIT_FILTER_ABOVE_JOIN, JDBCAbstractSplitFilterRule.SPLIT_FILTER_ABOVE_CONVERTER, JDBCFilterJoinRule.INSTANCE, JDBCJoinPushDownRule.INSTANCE, JDBCUnionPushDownRule.INSTANCE, JDBCFilterPushDownRule.INSTANCE, JDBCProjectPushDownRule.INSTANCE, JDBCAggregationPushDownRule.INSTANCE, JDBCSortPushDownRule.INSTANCE ); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: JDBC transformation rules"); } // 11. Run rules to aid in translation from Calcite tree to Hive tree if (HiveConf.getBoolVar(conf, ConfVars.HIVE_CBO_RETPATH_HIVEOP)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); // 12.1. Merge join into multijoin operators (if possible) calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, true, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, HiveJoinProjectTransposeRule.BOTH_PROJECT_INCLUDE_OUTER, HiveJoinProjectTransposeRule.LEFT_PROJECT_INCLUDE_OUTER, HiveJoinProjectTransposeRule.RIGHT_PROJECT_INCLUDE_OUTER, HiveJoinToMultiJoinRule.INSTANCE, HiveProjectMergeRule.INSTANCE); // The previous rules can pull up projections through join operators, // thus we run the field trimmer again to push them back down fieldTrimmer = new HiveRelFieldTrimmer(null, HiveRelFactories.HIVE_BUILDER.create(optCluster, null)); calciteOptimizedPlan = fieldTrimmer.trim(calciteOptimizedPlan); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, ProjectRemoveRule.INSTANCE, new ProjectMergeRule(false, HiveRelFactories.HIVE_BUILDER)); calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, true, mdProvider.getMetadataProvider(), null, HiveFilterProjectTSTransposeRule.INSTANCE, HiveFilterProjectTSTransposeRule.INSTANCE_DRUID, HiveProjectFilterPullUpConstantsRule.INSTANCE); // 11.2. Introduce exchange operators below join/multijoin operators calciteOptimizedPlan = hepPlan(calciteOptimizedPlan, false, mdProvider.getMetadataProvider(), null, HepMatchOrder.BOTTOM_UP, HiveInsertExchange4JoinRule.EXCHANGE_BELOW_JOIN, HiveInsertExchange4JoinRule.EXCHANGE_BELOW_MULTIJOIN); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Translation from Calcite tree to Hive tree"); } if (LOG.isDebugEnabled() && !conf.getBoolVar(ConfVars.HIVE_IN_TEST)) { LOG.debug("CBO Planning details:\n"); LOG.debug("Original Plan:\n" + RelOptUtil.toString(calciteGenPlan)); LOG.debug("Plan After PPD, PartPruning, ColumnPruning:\n" + RelOptUtil.toString(calcitePreCboPlan)); LOG.debug("Plan After Join Reordering:\n" + RelOptUtil.toString(calciteOptimizedPlan, SqlExplainLevel.ALL_ATTRIBUTES)); } return calciteOptimizedPlan; } /** * Perform all optimizations before Join Ordering. * * @param basePlan * original plan * @param mdProvider * meta data provider * @param executorProvider * executor * @return */ private RelNode applyPreJoinOrderingTransforms(RelNode basePlan, RelMetadataProvider mdProvider, RexExecutor executorProvider) { // TODO: Decorelation of subquery should be done before attempting // Partition Pruning; otherwise Expression evaluation may try to execute // corelated sub query. PerfLogger perfLogger = SessionState.getPerfLogger(); final int maxCNFNodeCount = conf.getIntVar(HiveConf.ConfVars.HIVE_CBO_CNF_NODES_LIMIT); final int minNumORClauses = conf.getIntVar(HiveConf.ConfVars.HIVEPOINTLOOKUPOPTIMIZERMIN); //0. SetOp rewrite perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, true, mdProvider, null, HepMatchOrder.BOTTOM_UP, HiveProjectOverIntersectRemoveRule.INSTANCE, HiveIntersectMergeRule.INSTANCE, HiveUnionMergeRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: HiveProjectOverIntersectRemoveRule, HiveIntersectMerge and HiveUnionMergeRule rules"); perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, false, mdProvider, executorProvider, HepMatchOrder.BOTTOM_UP, HiveIntersectRewriteRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: HiveIntersectRewrite rule"); perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, false, mdProvider, executorProvider, HepMatchOrder.BOTTOM_UP, HiveExceptRewriteRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: HiveExceptRewrite rule"); //1. Distinct aggregate rewrite // Run this optimization early, since it is expanding the operator pipeline. if (!conf.getVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("mr") && conf.getBoolVar(HiveConf.ConfVars.HIVEOPTIMIZEDISTINCTREWRITE)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); // Its not clear, if this rewrite is always performant on MR, since extra map phase // introduced for 2nd MR job may offset gains of this multi-stage aggregation. // We need a cost model for MR to enable this on MR. basePlan = hepPlan(basePlan, true, mdProvider, executorProvider, HiveExpandDistinctAggregatesRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, Distinct aggregate rewrite"); } // 2. Try factoring out common filter elements & separating deterministic // vs non-deterministic UDF. This needs to run before PPD so that PPD can // add on-clauses for old style Join Syntax // Ex: select * from R1 join R2 where ((R1.x=R2.x) and R1.y<10) or // ((R1.x=R2.x) and R1.z=10)) and rand(1) < 0.1 perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, false, mdProvider, executorProvider, HepMatchOrder.ARBITRARY, new HivePreFilteringRule(maxCNFNodeCount)); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, factor out common filter elements and separating deterministic vs non-deterministic UDF"); // 3. Run exhaustive PPD, add not null filters, transitive inference, // constant propagation, constant folding List<RelOptRule> rules = Lists.newArrayList(); if (conf.getBoolVar(HiveConf.ConfVars.HIVEOPTPPD_WINDOWING)) { rules.add(HiveFilterProjectTransposeRule.INSTANCE_DETERMINISTIC_WINDOWING); } else { rules.add(HiveFilterProjectTransposeRule.INSTANCE_DETERMINISTIC); } rules.add(HiveFilterSetOpTransposeRule.INSTANCE); rules.add(HiveFilterSortTransposeRule.INSTANCE); rules.add(HiveFilterJoinRule.JOIN); rules.add(HiveFilterJoinRule.FILTER_ON_JOIN); rules.add(new HiveFilterAggregateTransposeRule(Filter.class, HiveRelFactories.HIVE_BUILDER, Aggregate.class)); rules.add(new FilterMergeRule(HiveRelFactories.HIVE_BUILDER)); if (conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_REDUCE_WITH_STATS)) { rules.add(HiveReduceExpressionsWithStatsRule.INSTANCE); } rules.add(HiveProjectFilterPullUpConstantsRule.INSTANCE); rules.add(HiveReduceExpressionsRule.PROJECT_INSTANCE); rules.add(HiveReduceExpressionsRule.FILTER_INSTANCE); rules.add(HiveReduceExpressionsRule.JOIN_INSTANCE); rules.add(HiveAggregateReduceFunctionsRule.INSTANCE); rules.add(HiveAggregateReduceRule.INSTANCE); if (conf.getBoolVar(HiveConf.ConfVars.HIVEPOINTLOOKUPOPTIMIZER)) { rules.add(new HivePointLookupOptimizerRule.FilterCondition(minNumORClauses)); rules.add(new HivePointLookupOptimizerRule.JoinCondition(minNumORClauses)); rules.add(new HivePointLookupOptimizerRule.ProjectionExpressions(minNumORClauses)); } if (conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_CONSTRAINTS_JOIN) && profilesCBO.contains(ExtendedCBOProfile.REFERENTIAL_CONSTRAINTS)) { rules.add(HiveProjectJoinTransposeRule.INSTANCE); rules.add(HiveJoinConstraintsRule.INSTANCE); } rules.add(HiveJoinAddNotNullRule.INSTANCE_JOIN); rules.add(HiveJoinAddNotNullRule.INSTANCE_SEMIJOIN); rules.add(HiveJoinPushTransitivePredicatesRule.INSTANCE_JOIN); rules.add(HiveJoinPushTransitivePredicatesRule.INSTANCE_SEMIJOIN); rules.add(HiveSortMergeRule.INSTANCE); rules.add(HiveSortLimitPullUpConstantsRule.INSTANCE); rules.add(HiveUnionPullUpConstantsRule.INSTANCE); rules.add(HiveAggregatePullUpConstantsRule.INSTANCE); perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, true, mdProvider, executorProvider, HepMatchOrder.BOTTOM_UP, rules.toArray(new RelOptRule[rules.size()])); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, PPD, not null predicates, transitive inference, constant folding"); // 4. Push down limit through outer join // NOTE: We run this after PPD to support old style join syntax. // Ex: select * from R1 left outer join R2 where ((R1.x=R2.x) and R1.y<10) or // ((R1.x=R2.x) and R1.z=10)) and rand(1) < 0.1 order by R1.x limit 10 if (conf.getBoolVar(HiveConf.ConfVars.HIVE_OPTIMIZE_LIMIT_TRANSPOSE)) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); // This should be a cost based decision, but till we enable the extended cost // model, we will use the given value for the variable final float reductionProportion = HiveConf.getFloatVar(conf, HiveConf.ConfVars.HIVE_OPTIMIZE_LIMIT_TRANSPOSE_REDUCTION_PERCENTAGE); final long reductionTuples = HiveConf.getLongVar(conf, HiveConf.ConfVars.HIVE_OPTIMIZE_LIMIT_TRANSPOSE_REDUCTION_TUPLES); basePlan = hepPlan(basePlan, true, mdProvider, executorProvider, HiveSortMergeRule.INSTANCE, HiveSortProjectTransposeRule.INSTANCE, HiveSortJoinReduceRule.INSTANCE, HiveSortUnionReduceRule.INSTANCE); basePlan = hepPlan(basePlan, true, mdProvider, executorProvider, HepMatchOrder.BOTTOM_UP, new HiveSortRemoveRule(reductionProportion, reductionTuples), HiveProjectSortTransposeRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, Push down limit through outer join"); } // 5. Push Down Semi Joins //TODO: Enable this later /*perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, true, mdProvider, executorProvider, SemiJoinJoinTransposeRule.INSTANCE, SemiJoinFilterTransposeRule.INSTANCE, SemiJoinProjectTransposeRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, Push Down Semi Joins"); */ // 6. Apply Partition Pruning perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, false, mdProvider, executorProvider, new HivePartitionPruneRule(conf)); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, Partition Pruning"); // 7. Projection Pruning (this introduces select above TS & hence needs to be run last due to PP) perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); HiveRelFieldTrimmer fieldTrimmer = new HiveRelFieldTrimmer(null, HiveRelFactories.HIVE_BUILDER.create(cluster, null), profilesCBO.contains(ExtendedCBOProfile.JOIN_REORDERING)); basePlan = fieldTrimmer.trim(basePlan); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, Projection Pruning"); // 8. Rerun PPD through Project as column pruning would have introduced // DT above scans; By pushing filter just above TS, Hive can push it into // storage (incase there are filters on non partition cols). This only // matches FIL-PROJ-TS // Also merge, remove and reduce Project if possible perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); basePlan = hepPlan(basePlan, true, mdProvider, executorProvider, HiveFilterProjectTSTransposeRule.INSTANCE, HiveFilterProjectTSTransposeRule.INSTANCE_DRUID, HiveProjectFilterPullUpConstantsRule.INSTANCE, HiveProjectMergeRule.INSTANCE, ProjectRemoveRule.INSTANCE, HiveSortMergeRule.INSTANCE); perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: Prejoin ordering transformation, Rerun PPD"); return basePlan; } private RelNode applyMaterializedViewRewriting(RelOptPlanner planner, RelNode basePlan, RelMetadataProvider mdProvider, RexExecutor executorProvider) { final RelOptCluster optCluster = basePlan.getCluster(); final PerfLogger perfLogger = SessionState.getPerfLogger(); final RelNode calcitePreMVRewritingPlan = basePlan; final boolean mvRebuild = mvRebuildMode != MaterializationRebuildMode.NONE; // Add views to planner List<RelOptMaterialization> materializations = new ArrayList<>(); try { if (mvRebuild) { // We only retrieve the materialization corresponding to the rebuild. In turn, // we pass 'true' for the forceMVContentsUpToDate parameter, as we cannot allow the // materialization contents to be stale for a rebuild if we want to use it. materializations = db.getValidMaterializedView(mvRebuildDbName, mvRebuildName, getTablesUsed(basePlan), true, getTxnMgr()); } else { // This is not a rebuild, we retrieve all the materializations. In turn, we do not need // to force the materialization contents to be up-to-date, as this is not a rebuild, and // we apply the user parameters (HIVE_MATERIALIZED_VIEW_REWRITING_TIME_WINDOW) instead. materializations = db.getAllValidMaterializedViews(getTablesUsed(basePlan), false, getTxnMgr()); } // We need to use the current cluster for the scan operator on views, // otherwise the planner will throw an Exception (different planners) materializations = Lists.transform(materializations, new Function<RelOptMaterialization, RelOptMaterialization>() { @Override public RelOptMaterialization apply(RelOptMaterialization materialization) { final RelNode viewScan = materialization.tableRel; final RelNode newViewScan; if (viewScan instanceof Project) { // There is a Project on top (due to nullability) final Project pq = (Project) viewScan; newViewScan = HiveProject.create(optCluster, copyNodeScan(pq.getInput()), pq.getChildExps(), pq.getRowType(), Collections.<RelCollation> emptyList()); } else { newViewScan = copyNodeScan(viewScan); } return new RelOptMaterialization(newViewScan, materialization.queryRel, null, materialization.qualifiedTableName); } private RelNode copyNodeScan(RelNode scan) { final RelNode newScan; if (scan instanceof DruidQuery) { final DruidQuery dq = (DruidQuery) scan; // Ideally we should use HiveRelNode convention. However, since Volcano planner // throws in that case because DruidQuery does not implement the interface, // we set it as Bindable. Currently, we do not use convention in Hive, hence that // should be fine. // TODO: If we want to make use of convention (e.g., while directly generating operator // tree instead of AST), this should be changed. newScan = DruidQuery.create(optCluster, optCluster.traitSetOf(BindableConvention.INSTANCE), scan.getTable(), dq.getDruidTable(), ImmutableList.<RelNode>of(dq.getTableScan())); } else { newScan = new HiveTableScan(optCluster, optCluster.traitSetOf(HiveRelNode.CONVENTION), (RelOptHiveTable) scan.getTable(), ((RelOptHiveTable) scan.getTable()).getName(), null, false, false); } return newScan; } } ); } catch (HiveException e) { LOG.warn("Exception loading materialized views", e); } if (!materializations.isEmpty()) { perfLogger.PerfLogBegin(this.getClass().getName(), PerfLogger.OPTIMIZER); if (mvRebuild) { // If it is a materialized view rebuild, we use the HepPlanner, since we only have // one MV and we would like to use it to create incremental maintenance plans HepPlanner hepPlanner = createHepPlanner(basePlan.getCluster(), true, mdProvider, null, HepMatchOrder.TOP_DOWN, HiveMaterializedViewRule.MATERIALIZED_VIEW_REWRITING_RULES); // Add materialization for rebuild to planner assert materializations.size() == 1; hepPlanner.addMaterialization(materializations.get(0)); // Optimize plan hepPlanner.setRoot(basePlan); basePlan = hepPlanner.findBestExp(); } else { // If this is not a rebuild, we use Volcano planner as the decision // on whether to use MVs or not and which MVs to use should be cost-based optCluster.invalidateMetadataQuery(); RelMetadataQuery.THREAD_PROVIDERS.set(JaninoRelMetadataProvider.of(DefaultRelMetadataProvider.INSTANCE)); // Add materializations to planner for (RelOptMaterialization materialization : materializations) { planner.addMaterialization(materialization); } // Add view-based rewriting rules to planner for (RelOptRule rule : HiveMaterializedViewRule.MATERIALIZED_VIEW_REWRITING_RULES) { planner.addRule(rule); } // Partition pruner rule planner.addRule(HiveFilterProjectTSTransposeRule.INSTANCE); planner.addRule(new HivePartitionPruneRule(conf)); // Optimize plan planner.setRoot(basePlan); basePlan = planner.findBestExp(); // Remove view-based rewriting rules from planner planner.clear(); // Restore default cost model optCluster.invalidateMetadataQuery(); RelMetadataQuery.THREAD_PROVIDERS.set(JaninoRelMetadataProvider.of(mdProvider)); } perfLogger.PerfLogEnd(this.getClass().getName(), PerfLogger.OPTIMIZER, "Calcite: View-based rewriting"); if (!RelOptUtil.toString(calcitePreMVRewritingPlan).equals(RelOptUtil.toString(basePlan))) { // A rewriting was produced, we will check whether it was part of an incremental rebuild // to try to replace INSERT OVERWRITE by INSERT or MERGE if (mvRebuildMode == MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD && HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REBUILD_INCREMENTAL)) { // First we need to check if it is valid to convert to MERGE/INSERT INTO. // If we succeed, we modify the plan and afterwards the AST. // MV should be an acid table. MaterializedViewRewritingRelVisitor visitor = new MaterializedViewRewritingRelVisitor(); visitor.go(basePlan); if (visitor.isRewritingAllowed()) { // Trigger rewriting to remove UNION branch with MV if (visitor.isContainsAggregate()) { basePlan = hepPlan(basePlan, false, mdProvider, null, HepMatchOrder.TOP_DOWN, HiveAggregateIncrementalRewritingRule.INSTANCE); mvRebuildMode = MaterializationRebuildMode.AGGREGATE_REBUILD; } else { basePlan = hepPlan(basePlan, false, mdProvider, null, HepMatchOrder.TOP_DOWN, HiveNoAggregateIncrementalRewritingRule.INSTANCE); mvRebuildMode = MaterializationRebuildMode.NO_AGGREGATE_REBUILD; } } } // Now we trigger some needed optimization rules again basePlan = applyPreJoinOrderingTransforms(basePlan, mdProvider, executorProvider); } } if (mvRebuildMode == MaterializationRebuildMode.AGGREGATE_REBUILD) { // Make a cost-based decision factoring the configuration property optCluster.invalidateMetadataQuery(); RelMetadataQuery.THREAD_PROVIDERS.set(JaninoRelMetadataProvider.of(DefaultRelMetadataProvider.INSTANCE)); RelMetadataQuery mq = RelMetadataQuery.instance(); RelOptCost costOriginalPlan = mq.getCumulativeCost(calcitePreMVRewritingPlan); final double factorSelectivity = (double) HiveConf.getFloatVar( conf, HiveConf.ConfVars.HIVE_MATERIALIZED_VIEW_REBUILD_INCREMENTAL_FACTOR); RelOptCost costRebuildPlan = mq.getCumulativeCost(basePlan).multiplyBy(factorSelectivity); if (costOriginalPlan.isLe(costRebuildPlan)) { basePlan = calcitePreMVRewritingPlan; mvRebuildMode = MaterializationRebuildMode.INSERT_OVERWRITE_REBUILD; } optCluster.invalidateMetadataQuery(); RelMetadataQuery.THREAD_PROVIDERS.set(JaninoRelMetadataProvider.of(mdProvider)); } return basePlan; } private List<String> getTablesUsed(RelNode plan) { List<String> tablesUsed = new ArrayList<>(); new RelVisitor() { @Override public void visit(RelNode node, int ordinal, RelNode parent) { if (node instanceof TableScan) { TableScan ts = (TableScan) node; tablesUsed.add(((RelOptHiveTable) ts.getTable()).getHiveTableMD().getFullyQualifiedName()); } super.visit(node, ordinal, parent); } }.go(plan); return tablesUsed; } /** * Run the HEP Planner with the given rule set. * * @param basePlan * @param followPlanChanges * @param mdProvider * @param executorProvider * @param rules * @return optimized RelNode */ private RelNode hepPlan(RelNode basePlan, boolean followPlanChanges, RelMetadataProvider mdProvider, RexExecutor executorProvider, RelOptRule... rules) { return hepPlan(basePlan, followPlanChanges, mdProvider, executorProvider, HepMatchOrder.TOP_DOWN, rules); } /** * Run the HEP Planner with the given rule set. * * @param basePlan * @param followPlanChanges * @param mdProvider * @param executorProvider * @param order * @param rules * @return optimized RelNode */ private RelNode hepPlan(RelNode basePlan, boolean followPlanChanges, RelMetadataProvider mdProvider, RexExecutor executorProvider, HepMatchOrder order, RelOptRule... rules) { HepPlanner planner = createHepPlanner(basePlan.getCluster(), followPlanChanges, mdProvider, executorProvider, order, rules); planner.setRoot(basePlan); return planner.findBestExp(); } private HepPlanner createHepPlanner(RelOptCluster cluster, boolean followPlanChanges, RelMetadataProvider mdProvider, RexExecutor executorProvider, HepMatchOrder order, RelOptRule... rules) { HepProgramBuilder programBuilder = new HepProgramBuilder(); if (followPlanChanges) { programBuilder.addMatchOrder(order); programBuilder = programBuilder.addRuleCollection(ImmutableList.copyOf(rules)); } else { // TODO: Should this be also TOP_DOWN? for (RelOptRule r : rules) programBuilder.addRuleInstance(r); } // Create planner and copy context HepPlanner planner = new HepPlanner(programBuilder.build(), cluster.getPlanner().getContext()); List<RelMetadataProvider> list = Lists.newArrayList(); list.add(mdProvider); planner.registerMetadataProviders(list); RelMetadataProvider chainedProvider = ChainedRelMetadataProvider.of(list); cluster.setMetadataProvider( new CachingRelMetadataProvider(chainedProvider, planner)); if (executorProvider != null) { // basePlan.getCluster.getPlanner is the VolcanoPlanner from apply() // both planners need to use the correct executor cluster.getPlanner().setExecutor(executorProvider); planner.setExecutor(executorProvider); } return planner; } @SuppressWarnings("nls") private RelNode genSetOpLogicalPlan(Opcode opcode, String alias, String leftalias, RelNode leftRel, String rightalias, RelNode rightRel) throws SemanticException { // 1. Get Row Resolvers, Column map for original left and right input of // SetOp Rel RowResolver leftRR = this.relToHiveRR.get(leftRel); RowResolver rightRR = this.relToHiveRR.get(rightRel); HashMap<String, ColumnInfo> leftmap = leftRR.getFieldMap(leftalias); HashMap<String, ColumnInfo> rightmap = rightRR.getFieldMap(rightalias); // 2. Validate that SetOp is feasible according to Hive (by using type // info from RR) if (leftmap.size() != rightmap.size()) { throw new SemanticException("Schema of both sides of union should match."); } ASTNode tabref = getQB().getAliases().isEmpty() ? null : getQB().getParseInfo() .getSrcForAlias(getQB().getAliases().get(0)); // 3. construct SetOp Output RR using original left & right Input RowResolver setOpOutRR = new RowResolver(); Iterator<Map.Entry<String, ColumnInfo>> lIter = leftmap.entrySet().iterator(); Iterator<Map.Entry<String, ColumnInfo>> rIter = rightmap.entrySet().iterator(); while (lIter.hasNext()) { Map.Entry<String, ColumnInfo> lEntry = lIter.next(); Map.Entry<String, ColumnInfo> rEntry = rIter.next(); ColumnInfo lInfo = lEntry.getValue(); ColumnInfo rInfo = rEntry.getValue(); String field = lEntry.getKey(); // try widening conversion, otherwise fail union TypeInfo commonTypeInfo = FunctionRegistry.getCommonClassForUnionAll(lInfo.getType(), rInfo.getType()); if (commonTypeInfo == null) { throw new SemanticException(generateErrorMessage(tabref, "Schema of both sides of setop should match: Column " + field + " is of type " + lInfo.getType().getTypeName() + " on first table and type " + rInfo.getType().getTypeName() + " on second table")); } ColumnInfo setOpColInfo = new ColumnInfo(lInfo); setOpColInfo.setType(commonTypeInfo); setOpOutRR.put(alias, field, setOpColInfo); } // 4. Determine which columns requires cast on left/right input (Calcite // requires exact types on both sides of SetOp) boolean leftNeedsTypeCast = false; boolean rightNeedsTypeCast = false; List<RexNode> leftProjs = new ArrayList<RexNode>(); List<RexNode> rightProjs = new ArrayList<RexNode>(); List<RelDataTypeField> leftRowDT = leftRel.getRowType().getFieldList(); List<RelDataTypeField> rightRowDT = rightRel.getRowType().getFieldList(); RelDataType leftFieldDT; RelDataType rightFieldDT; RelDataType unionFieldDT; for (int i = 0; i < leftRowDT.size(); i++) { leftFieldDT = leftRowDT.get(i).getType(); rightFieldDT = rightRowDT.get(i).getType(); if (!leftFieldDT.equals(rightFieldDT)) { unionFieldDT = TypeConverter.convert(setOpOutRR.getColumnInfos().get(i).getType(), cluster.getTypeFactory()); if (!unionFieldDT.equals(leftFieldDT)) { leftNeedsTypeCast = true; } leftProjs.add(cluster.getRexBuilder().ensureType(unionFieldDT, cluster.getRexBuilder().makeInputRef(leftFieldDT, i), true)); if (!unionFieldDT.equals(rightFieldDT)) { rightNeedsTypeCast = true; } rightProjs.add(cluster.getRexBuilder().ensureType(unionFieldDT, cluster.getRexBuilder().makeInputRef(rightFieldDT, i), true)); } else { leftProjs.add(cluster.getRexBuilder().ensureType(leftFieldDT, cluster.getRexBuilder().makeInputRef(leftFieldDT, i), true)); rightProjs.add(cluster.getRexBuilder().ensureType(rightFieldDT, cluster.getRexBuilder().makeInputRef(rightFieldDT, i), true)); } } // 5. Introduce Project Rel above original left/right inputs if cast is // needed for type parity RelNode setOpLeftInput = leftRel; RelNode setOpRightInput = rightRel; if (leftNeedsTypeCast) { setOpLeftInput = HiveProject.create(leftRel, leftProjs, leftRel.getRowType() .getFieldNames()); } if (rightNeedsTypeCast) { setOpRightInput = HiveProject.create(rightRel, rightProjs, rightRel.getRowType() .getFieldNames()); } // 6. Construct SetOp Rel Builder<RelNode> bldr = new ImmutableList.Builder<RelNode>(); bldr.add(setOpLeftInput); bldr.add(setOpRightInput); SetOp setOpRel = null; switch (opcode) { case UNION: setOpRel = new HiveUnion(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build()); break; case INTERSECT: setOpRel = new HiveIntersect(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build(), false); break; case INTERSECTALL: setOpRel = new HiveIntersect(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build(), true); break; case EXCEPT: setOpRel = new HiveExcept(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build(), false); break; case EXCEPTALL: setOpRel = new HiveExcept(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build(), true); break; default: throw new SemanticException(ErrorMsg.UNSUPPORTED_SET_OPERATOR.getMsg(opcode.toString())); } relToHiveRR.put(setOpRel, setOpOutRR); relToHiveColNameCalcitePosMap.put(setOpRel, this.buildHiveToCalciteColumnMap(setOpOutRR, setOpRel)); return setOpRel; } private RelNode genJoinRelNode(RelNode leftRel, String leftTableAlias, RelNode rightRel, String rightTableAlias, JoinType hiveJoinType, ASTNode joinCond) throws SemanticException { RowResolver leftRR = this.relToHiveRR.get(leftRel); RowResolver rightRR = this.relToHiveRR.get(rightRel); // 1. Construct ExpressionNodeDesc representing Join Condition RexNode calciteJoinCond = null; List<String> namedColumns = null; if (joinCond != null) { JoinTypeCheckCtx jCtx = new JoinTypeCheckCtx(leftRR, rightRR, hiveJoinType); RowResolver input = RowResolver.getCombinedRR(leftRR, rightRR); // named columns join // TODO: we can also do the same for semi join but it seems that other // DBMS does not support it yet. if (joinCond.getType() == HiveParser.TOK_TABCOLNAME && !hiveJoinType.equals(JoinType.LEFTSEMI)) { namedColumns = new ArrayList<>(); // We will transform using clause and make it look like an on-clause. // So, lets generate a valid on-clause AST from using. ASTNode and = (ASTNode) ParseDriver.adaptor.create(HiveParser.KW_AND, "and"); ASTNode equal = null; int count = 0; for (Node child : joinCond.getChildren()) { String columnName = ((ASTNode) child).getText(); // dealing with views if (unparseTranslator != null && unparseTranslator.isEnabled()) { unparseTranslator.addIdentifierTranslation((ASTNode) child); } namedColumns.add(columnName); ASTNode left = ASTBuilder.qualifiedName(leftTableAlias, columnName); ASTNode right = ASTBuilder.qualifiedName(rightTableAlias, columnName); equal = (ASTNode) ParseDriver.adaptor.create(HiveParser.EQUAL, "="); ParseDriver.adaptor.addChild(equal, left); ParseDriver.adaptor.addChild(equal, right); ParseDriver.adaptor.addChild(and, equal); count++; } joinCond = count > 1 ? and : equal; } else if (unparseTranslator != null && unparseTranslator.isEnabled()) { genAllExprNodeDesc(joinCond, input, jCtx); } Map<ASTNode, ExprNodeDesc> exprNodes = JoinCondTypeCheckProcFactory.genExprNode(joinCond, jCtx); if (jCtx.getError() != null) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage(jCtx.getErrorSrcNode(), jCtx.getError())); } ExprNodeDesc joinCondnExprNode = exprNodes.get(joinCond); List<RelNode> inputRels = new ArrayList<RelNode>(); inputRels.add(leftRel); inputRels.add(rightRel); calciteJoinCond = RexNodeConverter.convert(cluster, joinCondnExprNode, inputRels, relToHiveRR, relToHiveColNameCalcitePosMap, false); } else { calciteJoinCond = cluster.getRexBuilder().makeLiteral(true); } // 2. Validate that join condition is legal (i.e no function refering to // both sides of join, only equi join) // TODO: Join filter handling (only supported for OJ by runtime or is it // supported for IJ as well) // 3. Construct Join Rel Node and RowResolver for the new Join Node boolean leftSemiJoin = false; JoinRelType calciteJoinType; switch (hiveJoinType) { case LEFTOUTER: calciteJoinType = JoinRelType.LEFT; break; case RIGHTOUTER: calciteJoinType = JoinRelType.RIGHT; break; case FULLOUTER: calciteJoinType = JoinRelType.FULL; break; case LEFTSEMI: calciteJoinType = JoinRelType.INNER; leftSemiJoin = true; break; case INNER: default: calciteJoinType = JoinRelType.INNER; break; } RelNode topRel = null; RowResolver topRR = null; if (leftSemiJoin) { List<RelDataTypeField> sysFieldList = new ArrayList<RelDataTypeField>(); List<RexNode> leftJoinKeys = new ArrayList<RexNode>(); List<RexNode> rightJoinKeys = new ArrayList<RexNode>(); RexNode nonEquiConds = RelOptUtil.splitJoinCondition(sysFieldList, leftRel, rightRel, calciteJoinCond, leftJoinKeys, rightJoinKeys, null, null); RelNode[] inputRels = new RelNode[] { leftRel, rightRel }; final List<Integer> leftKeys = new ArrayList<Integer>(); final List<Integer> rightKeys = new ArrayList<Integer>(); RexNode remainingEquiCond = HiveCalciteUtil.projectNonColumnEquiConditions(HiveRelFactories.HIVE_PROJECT_FACTORY, inputRels, leftJoinKeys, rightJoinKeys, 0, leftKeys, rightKeys); // Adjust right input fields in nonEquiConds if previous call modified the input if (inputRels[0] != leftRel) { nonEquiConds = RexUtil.shift(nonEquiConds, leftRel.getRowType().getFieldCount(), inputRels[0].getRowType().getFieldCount() - leftRel.getRowType().getFieldCount()); } calciteJoinCond = remainingEquiCond != null ? RexUtil.composeConjunction(cluster.getRexBuilder(), ImmutableList.of(remainingEquiCond, nonEquiConds), false) : nonEquiConds; topRel = HiveSemiJoin.getSemiJoin(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), inputRels[0], inputRels[1], calciteJoinCond, ImmutableIntList.copyOf(leftKeys), ImmutableIntList.copyOf(rightKeys)); // Create join RR: we need to check whether we need to update left RR in case // previous call to projectNonColumnEquiConditions updated it if (inputRels[0] != leftRel) { RowResolver newLeftRR = new RowResolver(); if (!RowResolver.add(newLeftRR, leftRR)) { LOG.warn("Duplicates detected when adding columns to RR: see previous message"); } for (int i = leftRel.getRowType().getFieldCount(); i < inputRels[0].getRowType().getFieldCount(); i++) { ColumnInfo oColInfo = new ColumnInfo( SemanticAnalyzer.getColumnInternalName(i), TypeConverter.convert(inputRels[0].getRowType().getFieldList().get(i).getType()), null, false); newLeftRR.put(oColInfo.getTabAlias(), oColInfo.getInternalName(), oColInfo); } RowResolver joinRR = new RowResolver(); if (!RowResolver.add(joinRR, newLeftRR)) { LOG.warn("Duplicates detected when adding columns to RR: see previous message"); } relToHiveColNameCalcitePosMap.put(topRel, this.buildHiveToCalciteColumnMap(joinRR, topRel)); relToHiveRR.put(topRel, joinRR); // Introduce top project operator to remove additional column(s) that have // been introduced List<RexNode> topFields = new ArrayList<RexNode>(); List<String> topFieldNames = new ArrayList<String>(); for (int i = 0; i < leftRel.getRowType().getFieldCount(); i++) { final RelDataTypeField field = leftRel.getRowType().getFieldList().get(i); topFields.add(leftRel.getCluster().getRexBuilder().makeInputRef(field.getType(), i)); topFieldNames.add(field.getName()); } topRel = HiveRelFactories.HIVE_PROJECT_FACTORY.createProject(topRel, topFields, topFieldNames); } topRR = new RowResolver(); if (!RowResolver.add(topRR, leftRR)) { LOG.warn("Duplicates detected when adding columns to RR: see previous message"); } } else { topRel = HiveJoin.getJoin(cluster, leftRel, rightRel, calciteJoinCond, calciteJoinType); topRR = RowResolver.getCombinedRR(leftRR, rightRR); if (namedColumns != null) { List<String> tableAliases = new ArrayList<>(); tableAliases.add(leftTableAlias); tableAliases.add(rightTableAlias); topRR.setNamedJoinInfo(new NamedJoinInfo(tableAliases, namedColumns, hiveJoinType)); } } // 4. Add new rel & its RR to the maps relToHiveColNameCalcitePosMap.put(topRel, this.buildHiveToCalciteColumnMap(topRR, topRel)); relToHiveRR.put(topRel, topRR); return topRel; } /** * Generate Join Logical Plan Relnode by walking through the join AST. * * @param aliasToRel * Alias(Table/Relation alias) to RelNode; only read and not * written in to by this method * @return * @throws SemanticException */ private RelNode genJoinLogicalPlan(ASTNode joinParseTree, Map<String, RelNode> aliasToRel) throws SemanticException { RelNode leftRel = null; RelNode rightRel = null; JoinType hiveJoinType = null; if (joinParseTree.getToken().getType() == HiveParser.TOK_UNIQUEJOIN) { String msg = String.format("UNIQUE JOIN is currently not supported in CBO," + " turn off cbo to use UNIQUE JOIN."); LOG.debug(msg); throw new CalciteSemanticException(msg, UnsupportedFeature.Unique_join); } // 1. Determine Join Type // TODO: What about TOK_CROSSJOIN, TOK_MAPJOIN switch (joinParseTree.getToken().getType()) { case HiveParser.TOK_LEFTOUTERJOIN: hiveJoinType = JoinType.LEFTOUTER; break; case HiveParser.TOK_RIGHTOUTERJOIN: hiveJoinType = JoinType.RIGHTOUTER; break; case HiveParser.TOK_FULLOUTERJOIN: hiveJoinType = JoinType.FULLOUTER; break; case HiveParser.TOK_LEFTSEMIJOIN: hiveJoinType = JoinType.LEFTSEMI; break; default: hiveJoinType = JoinType.INNER; break; } // 2. Get Left Table Alias ASTNode left = (ASTNode) joinParseTree.getChild(0); String leftTableAlias = null; if ((left.getToken().getType() == HiveParser.TOK_TABREF) || (left.getToken().getType() == HiveParser.TOK_SUBQUERY) || (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) { String tableName = SemanticAnalyzer.getUnescapedUnqualifiedTableName( (ASTNode) left.getChild(0)).toLowerCase(); leftTableAlias = left.getChildCount() == 1 ? tableName : SemanticAnalyzer .unescapeIdentifier(left.getChild(left.getChildCount() - 1).getText().toLowerCase()); // ptf node form is: ^(TOK_PTBLFUNCTION $name $alias? // partitionTableFunctionSource partitioningSpec? expression*) // guranteed to have an lias here: check done in processJoin leftTableAlias = (left.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) ? SemanticAnalyzer .unescapeIdentifier(left.getChild(1).getText().toLowerCase()) : leftTableAlias; leftRel = aliasToRel.get(leftTableAlias); } else if (SemanticAnalyzer.isJoinToken(left)) { leftRel = genJoinLogicalPlan(left, aliasToRel); } else if (left.getToken().getType() == HiveParser.TOK_LATERAL_VIEW) { leftRel = genLateralViewPlans(left, aliasToRel); } else { assert (false); } // 3. Get Right Table Alias ASTNode right = (ASTNode) joinParseTree.getChild(1); String rightTableAlias = null; if ((right.getToken().getType() == HiveParser.TOK_TABREF) || (right.getToken().getType() == HiveParser.TOK_SUBQUERY) || (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION)) { String tableName = SemanticAnalyzer.getUnescapedUnqualifiedTableName( (ASTNode) right.getChild(0)).toLowerCase(); rightTableAlias = right.getChildCount() == 1 ? tableName : SemanticAnalyzer .unescapeIdentifier(right.getChild(right.getChildCount() - 1).getText().toLowerCase()); // ptf node form is: ^(TOK_PTBLFUNCTION $name $alias? // partitionTableFunctionSource partitioningSpec? expression*) // guranteed to have an lias here: check done in processJoin rightTableAlias = (right.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) ? SemanticAnalyzer .unescapeIdentifier(right.getChild(1).getText().toLowerCase()) : rightTableAlias; rightRel = aliasToRel.get(rightTableAlias); } else if (right.getToken().getType() == HiveParser.TOK_LATERAL_VIEW) { rightRel = genLateralViewPlans(right, aliasToRel); } else { assert (false); } // 4. Get Join Condn ASTNode joinCond = (ASTNode) joinParseTree.getChild(2); // 5. Create Join rel return genJoinRelNode(leftRel, leftTableAlias, rightRel, rightTableAlias, hiveJoinType, joinCond); } private RelNode genTableLogicalPlan(String tableAlias, QB qb) throws SemanticException { RowResolver rr = new RowResolver(); RelNode tableRel = null; try { // 1. If the table has a Sample specified, bail from Calcite path. // 2. if returnpath is on and hivetestmode is on bail if (qb.getParseInfo().getTabSample(tableAlias) != null || getNameToSplitSampleMap().containsKey(tableAlias) || (conf.getBoolVar(HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP)) && (conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE)) ) { String msg = String.format("Table Sample specified for %s." + " Currently we don't support Table Sample clauses in CBO," + " turn off cbo for queries on tableSamples.", tableAlias); LOG.debug(msg); throw new CalciteSemanticException(msg, UnsupportedFeature.Table_sample_clauses); } // 2. Get Table Metadata Table tabMetaData = qb.getMetaData().getSrcForAlias(tableAlias); // 3. Get Table Logical Schema (Row Type) // NOTE: Table logical schema = Non Partition Cols + Partition Cols + // Virtual Cols // 3.1 Add Column info for non partion cols (Object Inspector fields) @SuppressWarnings("deprecation") StructObjectInspector rowObjectInspector = (StructObjectInspector) tabMetaData.getDeserializer() .getObjectInspector(); List<? extends StructField> fields = rowObjectInspector.getAllStructFieldRefs(); ColumnInfo colInfo; String colName; ArrayList<ColumnInfo> cInfoLst = new ArrayList<ColumnInfo>(); for (int i = 0; i < fields.size(); i++) { colName = fields.get(i).getFieldName(); colInfo = new ColumnInfo( fields.get(i).getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(fields.get(i).getFieldObjectInspector()), tableAlias, false); colInfo.setSkewedCol((SemanticAnalyzer.isSkewedCol(tableAlias, qb, colName)) ? true : false); rr.put(tableAlias, colName, colInfo); cInfoLst.add(colInfo); } // TODO: Fix this ArrayList<ColumnInfo> nonPartitionColumns = new ArrayList<ColumnInfo>(cInfoLst); ArrayList<ColumnInfo> partitionColumns = new ArrayList<ColumnInfo>(); // 3.2 Add column info corresponding to partition columns for (FieldSchema part_col : tabMetaData.getPartCols()) { colName = part_col.getName(); colInfo = new ColumnInfo(colName, TypeInfoFactory.getPrimitiveTypeInfo(part_col.getType()), tableAlias, true); rr.put(tableAlias, colName, colInfo); cInfoLst.add(colInfo); partitionColumns.add(colInfo); } final TableType tableType = obtainTableType(tabMetaData); // 3.3 Add column info corresponding to virtual columns List<VirtualColumn> virtualCols = new ArrayList<VirtualColumn>(); if (tableType == TableType.NATIVE) { Iterator<VirtualColumn> vcs = VirtualColumn.getRegistry(conf).iterator(); while (vcs.hasNext()) { VirtualColumn vc = vcs.next(); colInfo = new ColumnInfo(vc.getName(), vc.getTypeInfo(), tableAlias, true, vc.getIsHidden()); rr.put(tableAlias, vc.getName().toLowerCase(), colInfo); cInfoLst.add(colInfo); virtualCols.add(vc); } } // 4. Build operator RelOptHiveTable optTable; if (tableType == TableType.DRUID || (tableType == TableType.JDBC && tabMetaData.getProperty(Constants.JDBC_TABLE) != null)) { // Create case sensitive columns list List<String> originalColumnNames = ((StandardStructObjectInspector)rowObjectInspector).getOriginalColumnNames(); List<ColumnInfo> cIList = new ArrayList<ColumnInfo>(originalColumnNames.size()); for (int i = 0; i < rr.getColumnInfos().size(); i++) { cIList.add(new ColumnInfo(originalColumnNames.get(i), rr.getColumnInfos().get(i).getType(), tableAlias, false)); } // Build row type from field <type, name> RelDataType rowType = TypeConverter.getType(cluster, cIList); // Build RelOptAbstractTable List<String> fullyQualifiedTabName = new ArrayList<>(); if (tabMetaData.getDbName() != null && !tabMetaData.getDbName().isEmpty()) { fullyQualifiedTabName.add(tabMetaData.getDbName()); } fullyQualifiedTabName.add(tabMetaData.getTableName()); if (tableType == TableType.DRUID) { // Build Druid query String address = HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_DRUID_BROKER_DEFAULT_ADDRESS); String dataSource = tabMetaData.getParameters().get(Constants.DRUID_DATA_SOURCE); Set<String> metrics = new HashSet<>(); RexBuilder rexBuilder = cluster.getRexBuilder(); RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); List<RelDataType> druidColTypes = new ArrayList<>(); List<String> druidColNames = new ArrayList<>(); //@TODO FIX this, we actually do not need this anymore, // in addition to that Druid allow numeric dimensions now so this check is not accurate for (RelDataTypeField field : rowType.getFieldList()) { if (DruidTable.DEFAULT_TIMESTAMP_COLUMN.equals(field.getName())) { // Druid's time column is always not null. druidColTypes.add(dtFactory.createTypeWithNullability(field.getType(), false)); } else { druidColTypes.add(field.getType()); } druidColNames.add(field.getName()); if (field.getName().equals(DruidTable.DEFAULT_TIMESTAMP_COLUMN)) { // timestamp continue; } if (field.getType().getSqlTypeName() == SqlTypeName.VARCHAR) { // dimension continue; } metrics.add(field.getName()); } List<Interval> intervals = Arrays.asList(DruidTable.DEFAULT_INTERVAL); rowType = dtFactory.createStructType(druidColTypes, druidColNames); DruidTable druidTable = new DruidTable(new DruidSchema(address, address, false), dataSource, RelDataTypeImpl.proto(rowType), metrics, DruidTable.DEFAULT_TIMESTAMP_COLUMN, intervals, null, null); optTable = new RelOptHiveTable(relOptSchema, relOptSchema.getTypeFactory(), fullyQualifiedTabName, rowType, tabMetaData, nonPartitionColumns, partitionColumns, virtualCols, conf, partitionCache, colStatsCache, noColsMissingStats); final TableScan scan = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), optTable, null == tableAlias ? tabMetaData.getTableName() : tableAlias, getAliasId(tableAlias, qb), HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP), qb.isInsideView() || qb.getAliasInsideView().contains(tableAlias.toLowerCase())); tableRel = DruidQuery.create(cluster, cluster.traitSetOf(BindableConvention.INSTANCE), optTable, druidTable, ImmutableList.of(scan), DruidSqlOperatorConverter.getDefaultMap()); } else { optTable = new RelOptHiveTable(relOptSchema, relOptSchema.getTypeFactory(), fullyQualifiedTabName, rowType, tabMetaData, nonPartitionColumns, partitionColumns, virtualCols, conf, partitionCache, colStatsCache, noColsMissingStats); final HiveTableScan hts = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), optTable, null == tableAlias ? tabMetaData.getTableName() : tableAlias, getAliasId(tableAlias, qb), HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP), qb.isInsideView() || qb.getAliasInsideView().contains(tableAlias.toLowerCase())); final String dataBaseType = tabMetaData.getProperty(Constants.JDBC_DATABASE_TYPE); final String url = tabMetaData.getProperty(Constants.JDBC_URL); final String driver = tabMetaData.getProperty(Constants.JDBC_DRIVER); final String user = tabMetaData.getProperty(Constants.JDBC_USERNAME); String pswd = tabMetaData.getProperty(Constants.JDBC_PASSWORD); if (pswd == null) { String keystore = tabMetaData.getProperty(Constants.JDBC_KEYSTORE); String key = tabMetaData.getProperty(Constants.JDBC_KEY); pswd = Utilities.getPasswdFromKeystore(keystore, key); } final String tableName = tabMetaData.getProperty(Constants.JDBC_TABLE); DataSource ds = JdbcSchema.dataSource(url, driver, user, pswd); SqlDialect jdbcDialect = JdbcSchema.createDialect(SqlDialectFactoryImpl.INSTANCE, ds); JdbcConvention jc = JdbcConvention.of(jdbcDialect, null, dataBaseType); JdbcSchema schema = new JdbcSchema(ds, jc.dialect, jc, null/*catalog */, null/*schema */); JdbcTable jt = (JdbcTable) schema.getTable(tableName); if (jt == null) { throw new SemanticException("Table " + tableName + " was not found in the database"); } JdbcHiveTableScan jdbcTableRel = new JdbcHiveTableScan(cluster, optTable, jt, jc, hts); tableRel = new HiveJdbcConverter(cluster, jdbcTableRel.getTraitSet().replace(HiveRelNode.CONVENTION), jdbcTableRel, jc, url, user); } } else { // Build row type from field <type, name> RelDataType rowType = inferNotNullableColumns(tabMetaData, TypeConverter.getType(cluster, rr, null)); // Build RelOptAbstractTable List<String> fullyQualifiedTabName = new ArrayList<>(); if (tabMetaData.getDbName() != null && !tabMetaData.getDbName().isEmpty()) { fullyQualifiedTabName.add(tabMetaData.getDbName()); } fullyQualifiedTabName.add(tabMetaData.getTableName()); optTable = new RelOptHiveTable(relOptSchema, relOptSchema.getTypeFactory(), fullyQualifiedTabName, rowType, tabMetaData, nonPartitionColumns, partitionColumns, virtualCols, conf, partitionCache, colStatsCache, noColsMissingStats); // Build Hive Table Scan Rel tableRel = new HiveTableScan(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), optTable, null == tableAlias ? tabMetaData.getTableName() : tableAlias, getAliasId(tableAlias, qb), HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CBO_RETPATH_HIVEOP), qb.isInsideView() || qb.getAliasInsideView().contains(tableAlias.toLowerCase())); } if (!optTable.getReferentialConstraints().isEmpty()) { profilesCBO.add(ExtendedCBOProfile.REFERENTIAL_CONSTRAINTS); } // 6. Add Schema(RR) to RelNode-Schema map ImmutableMap<String, Integer> hiveToCalciteColMap = buildHiveToCalciteColumnMap(rr, tableRel); relToHiveRR.put(tableRel, rr); relToHiveColNameCalcitePosMap.put(tableRel, hiveToCalciteColMap); } catch (Exception e) { if (e instanceof SemanticException) { throw (SemanticException) e; } else { throw (new RuntimeException(e)); } } return tableRel; } private RelDataType inferNotNullableColumns(Table tabMetaData, RelDataType rowType) throws HiveException { // Retrieve not null constraints final NotNullConstraint nnc = Hive.get().getReliableNotNullConstraints( tabMetaData.getDbName(), tabMetaData.getTableName()); // Retrieve primary key constraints (cannot be null) final PrimaryKeyInfo pkc = Hive.get().getReliablePrimaryKeys( tabMetaData.getDbName(), tabMetaData.getTableName()); if (nnc.getNotNullConstraints().isEmpty() && pkc.getColNames().isEmpty()) { return rowType; } // Build the bitset with not null columns ImmutableBitSet.Builder builder = ImmutableBitSet.builder(); for (String nnCol : nnc.getNotNullConstraints().values()) { int nnPos = -1; for (int i = 0; i < rowType.getFieldNames().size(); i++) { if (rowType.getFieldNames().get(i).equals(nnCol)) { nnPos = i; break; } } if (nnPos == -1) { LOG.error("Column for not null constraint definition " + nnCol + " not found"); return rowType; } builder.set(nnPos); } for (String pkCol : pkc.getColNames().values()) { int pkPos = -1; for (int i = 0; i < rowType.getFieldNames().size(); i++) { if (rowType.getFieldNames().get(i).equals(pkCol)) { pkPos = i; break; } } if (pkPos == -1) { LOG.error("Column for not null constraint definition " + pkCol + " not found"); return rowType; } builder.set(pkPos); } ImmutableBitSet bitSet = builder.build(); RexBuilder rexBuilder = cluster.getRexBuilder(); RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); List<RelDataType> fieldTypes = new LinkedList<RelDataType>(); List<String> fieldNames = new LinkedList<String>(); for (RelDataTypeField rdtf : rowType.getFieldList()) { if (bitSet.indexOf(rdtf.getIndex()) != -1) { fieldTypes.add(dtFactory.createTypeWithNullability(rdtf.getType(), false)); } else { fieldTypes.add(rdtf.getType()); } fieldNames.add(rdtf.getName()); } return dtFactory.createStructType(fieldTypes, fieldNames); } private TableType obtainTableType(Table tabMetaData) { if (tabMetaData.getStorageHandler() != null) { final String storageHandlerStr = tabMetaData.getStorageHandler().toString(); if (storageHandlerStr .equals(Constants.DRUID_HIVE_STORAGE_HANDLER_ID)) { return TableType.DRUID; } if (storageHandlerStr .equals(Constants.JDBC_HIVE_STORAGE_HANDLER_ID)) { return TableType.JDBC; } } return TableType.NATIVE; } private RelNode genFilterRelNode(ASTNode filterExpr, RelNode srcRel, ImmutableMap<String, Integer> outerNameToPosMap, RowResolver outerRR, boolean useCaching) throws SemanticException { ExprNodeDesc filterCondn = genExprNodeDesc(filterExpr, relToHiveRR.get(srcRel), outerRR, null, useCaching); if (filterCondn instanceof ExprNodeConstantDesc && !filterCondn.getTypeString().equals(serdeConstants.BOOLEAN_TYPE_NAME)) { // queries like select * from t1 where 'foo'; // Calcite's rule PushFilterThroughProject chokes on it. Arguably, we // can insert a cast to // boolean in such cases, but since Postgres, Oracle and MS SQL server // fail on compile time // for such queries, its an arcane corner case, not worth of adding that // complexity. throw new CalciteSemanticException("Filter expression with non-boolean return type.", UnsupportedFeature.Filter_expression_with_non_boolean_return_type); } ImmutableMap<String, Integer> hiveColNameCalcitePosMap = this.relToHiveColNameCalcitePosMap .get(srcRel); RexNode convertedFilterExpr = new RexNodeConverter(cluster, srcRel.getRowType(), outerNameToPosMap, hiveColNameCalcitePosMap, relToHiveRR.get(srcRel), outerRR, 0, true, subqueryId).convert(filterCondn); RexNode factoredFilterExpr = RexUtil .pullFactors(cluster.getRexBuilder(), convertedFilterExpr); RelNode filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), srcRel, factoredFilterExpr); this.relToHiveColNameCalcitePosMap.put(filterRel, hiveColNameCalcitePosMap); relToHiveRR.put(filterRel, relToHiveRR.get(srcRel)); relToHiveColNameCalcitePosMap.put(filterRel, hiveColNameCalcitePosMap); return filterRel; } private void subqueryRestrictionCheck(QB qb, ASTNode searchCond, RelNode srcRel, boolean forHavingClause, Set<ASTNode> corrScalarQueries, Set<ASTNode> scalarQueriesWithAggNoWinNoGby) throws SemanticException { List<ASTNode> subQueriesInOriginalTree = SubQueryUtils.findSubQueries(searchCond); ASTNode clonedSearchCond = (ASTNode) SubQueryUtils.adaptor.dupTree(searchCond); List<ASTNode> subQueries = SubQueryUtils.findSubQueries(clonedSearchCond); for(int i=0; i<subQueriesInOriginalTree.size(); i++){ //we do not care about the transformation or rewriting of AST // which following statement does // we only care about the restriction checks they perform. // We plan to get rid of these restrictions later int sqIdx = qb.incrNumSubQueryPredicates(); ASTNode originalSubQueryAST = subQueriesInOriginalTree.get(i); ASTNode subQueryAST = subQueries.get(i); //SubQueryUtils.rewriteParentQueryWhere(clonedSearchCond, subQueryAST); Boolean orInSubquery = new Boolean(false); Integer subqueryCount = new Integer(0); ObjectPair<Boolean, Integer> subqInfo = new ObjectPair<Boolean, Integer>(false, 0); ASTNode outerQueryExpr = (ASTNode) subQueryAST.getChild(2); if (outerQueryExpr != null && outerQueryExpr.getType() == HiveParser.TOK_SUBQUERY_EXPR) { throw new CalciteSubquerySemanticException( ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg( outerQueryExpr, "IN/NOT IN subqueries are not allowed in LHS")); } QBSubQuery subQuery = SubQueryUtils.buildSubQuery(qb.getId(), sqIdx, subQueryAST, originalSubQueryAST, ctx); RowResolver inputRR = relToHiveRR.get(srcRel); String havingInputAlias = null; boolean [] subqueryConfig = {false, false}; subQuery.subqueryRestrictionsCheck(inputRR, forHavingClause, havingInputAlias, subqueryConfig); if(subqueryConfig[0]) { corrScalarQueries.add(originalSubQueryAST); } if(subqueryConfig[1]) { scalarQueriesWithAggNoWinNoGby.add(originalSubQueryAST); } } } private RelNode genLateralViewPlans(ASTNode lateralView, Map<String, RelNode> aliasToRel) throws SemanticException { final RexBuilder rexBuilder = this.cluster.getRexBuilder(); final RelDataTypeFactory dtFactory = this.cluster.getTypeFactory(); final String inlineFunctionName = GenericUDTFInline.class.getAnnotation(Description.class).name(); int numChildren = lateralView.getChildCount(); assert (numChildren == 2); // 1) Obtain input and all related data structures ASTNode next = (ASTNode) lateralView.getChild(1); RelNode inputRel = null; switch (next.getToken().getType()) { case HiveParser.TOK_TABREF: case HiveParser.TOK_SUBQUERY: case HiveParser.TOK_PTBLFUNCTION: String inputTableName = SemanticAnalyzer.getUnescapedUnqualifiedTableName( (ASTNode) next.getChild(0)).toLowerCase(); String inputTableAlias; if (next.getToken().getType() == HiveParser.TOK_PTBLFUNCTION) { // ptf node form is: ^(TOK_PTBLFUNCTION $name $alias? // partitionTableFunctionSource partitioningSpec? expression*) // ptf node guaranteed to have an alias here inputTableAlias = SemanticAnalyzer.unescapeIdentifier(next.getChild(1).getText().toLowerCase()); } else { inputTableAlias = next.getChildCount() == 1 ? inputTableName : SemanticAnalyzer.unescapeIdentifier(next.getChild(next.getChildCount() - 1).getText().toLowerCase()); } inputRel = aliasToRel.get(inputTableAlias); break; case HiveParser.TOK_LATERAL_VIEW: inputRel = genLateralViewPlans(next, aliasToRel); break; default: throw new SemanticException(ErrorMsg.LATERAL_VIEW_INVALID_CHILD.getMsg(lateralView)); } // Input row resolver RowResolver inputRR = this.relToHiveRR.get(inputRel); // Extract input refs. They will serve as input for the function invocation List<RexNode> inputRefs = Lists.transform(inputRel.getRowType().getFieldList(), input -> new RexInputRef(input.getIndex(), input.getType())); // Extract type for the arguments List<RelDataType> inputRefsTypes = new ArrayList<>(); for (int i = 0; i < inputRefs.size(); i++) { inputRefsTypes.add(inputRefs.get(i).getType()); } // Input name to position map ImmutableMap<String, Integer> inputPosMap = this.relToHiveColNameCalcitePosMap.get(inputRel); // 2) Generate HiveTableFunctionScan RelNode for lateral view // TODO: Support different functions (not only INLINE) with LATERAL VIEW JOIN // ^(TOK_LATERAL_VIEW ^(TOK_SELECT ^(TOK_SELEXPR ^(TOK_FUNCTION Identifier["inline"] valuesClause) identifier* tableAlias))) final ASTNode selExprClause = (ASTNode) lateralView.getChild(0).getChild(0); final ASTNode functionCall = (ASTNode) selExprClause.getChild(0); if (functionCall.getChild(0).getText().compareToIgnoreCase(inlineFunctionName) != 0) { throw new SemanticException("CBO only supports inline LVJ"); } final ASTNode valuesClause = (ASTNode) functionCall.getChild(1); // Output types. They will be the concatenation of the input refs types and // the types of the expressions for the lateral view generated rows List<RelDataType> outputFieldTypes = new ArrayList<>(inputRefsTypes); List<String> outputFieldNames = new ArrayList<>(inputRel.getRowType().getFieldNames()); // Generate all expressions from lateral view ExprNodeDesc valuesExpr = genExprNodeDesc(valuesClause, inputRR, false); RexCall convertedOriginalValuesExpr = (RexCall) new RexNodeConverter(this.cluster, inputRel.getRowType(), inputPosMap, 0, false).convert(valuesExpr); RelDataType valuesRowType = ((ArraySqlType) convertedOriginalValuesExpr.getType()).getComponentType(); List<RexNode> newStructExprs = new ArrayList<>(); for (RexNode structExpr : convertedOriginalValuesExpr.getOperands()) { RexCall structCall = (RexCall) structExpr; List<RexNode> exprs = new ArrayList<>(inputRefs); exprs.addAll(structCall.getOperands()); newStructExprs.add(rexBuilder.makeCall(structCall.op, exprs)); } RexNode convertedFinalValuesExpr = rexBuilder.makeCall(convertedOriginalValuesExpr.op, newStructExprs); // The return type will be the concatenation of input type and original values type RelDataType retType = SqlValidatorUtil.deriveJoinRowType(inputRel.getRowType(), valuesRowType, JoinRelType.INNER, dtFactory, null, ImmutableList.of()); // Create inline SQL operator FunctionInfo inlineFunctionInfo = FunctionRegistry.getFunctionInfo(inlineFunctionName); SqlOperator calciteOp = SqlFunctionConverter.getCalciteOperator( inlineFunctionName, inlineFunctionInfo.getGenericUDTF(), ImmutableList.copyOf(inputRefsTypes), retType); RelNode htfsRel = HiveTableFunctionScan.create(cluster, TraitsUtil.getDefaultTraitSet(cluster), ImmutableList.of(inputRel), rexBuilder.makeCall(calciteOp, convertedFinalValuesExpr), null, retType, null); // 3) Keep track of colname-to-posmap && RR for new op RowResolver outputRR = new RowResolver(); // Add all input columns if (!RowResolver.add(outputRR, inputRR)) { LOG.warn("Duplicates detected when adding columns to RR: see previous message"); } // Add all columns from lateral view // First we extract the information that the query provides String tableAlias = null; List<String> columnAliases = new ArrayList<>(); Set<String> uniqueNames = new HashSet<>(); for (int i = 1; i < selExprClause.getChildren().size(); i++) { ASTNode child = (ASTNode) selExprClause.getChild(i); switch (child.getToken().getType()) { case HiveParser.TOK_TABALIAS: tableAlias = unescapeIdentifier(child.getChild(0).getText()); break; default: String colAlias = unescapeIdentifier(child.getText()); if (uniqueNames.contains(colAlias)) { // Column aliases defined by query for lateral view output are duplicated throw new SemanticException(ErrorMsg.COLUMN_ALIAS_ALREADY_EXISTS.getMsg(colAlias)); } columnAliases.add(colAlias); uniqueNames.add(colAlias); } } if (tableAlias == null) { // Parser enforces that table alias is added, but check again throw new SemanticException("Alias should be specified LVJ"); } if (!columnAliases.isEmpty() && columnAliases.size() != valuesRowType.getFieldCount()) { // Number of columns in the aliases does not match with number of columns // generated by the lateral view throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH.getMsg()); } if (columnAliases.isEmpty()) { // Auto-generate column aliases for (int i = 0; i < valuesRowType.getFieldCount(); i++) { columnAliases.add(SemanticAnalyzer.getColumnInternalName(i)); } } int numInputExprs = inputRR.getColumnInfos().size(); ListTypeInfo listTypeInfo = (ListTypeInfo) valuesExpr.getTypeInfo(); // Array should have ListTypeInfo StructTypeInfo typeInfos = (StructTypeInfo) listTypeInfo.getListElementTypeInfo(); // Within the list, we extract types for (int i = 0, j = 0; i < columnAliases.size(); i++) { String internalColName; do { internalColName = SemanticAnalyzer.getColumnInternalName(j++); } while (inputRR.getPosition(internalColName) != -1); outputRR.put(tableAlias, columnAliases.get(i), new ColumnInfo(internalColName, typeInfos.getAllStructFieldTypeInfos().get(i), tableAlias, false)); } this.relToHiveColNameCalcitePosMap .put(htfsRel, buildHiveToCalciteColumnMap(outputRR, htfsRel)); this.relToHiveRR.put(htfsRel, outputRR); // 4) Return new operator return htfsRel; } private boolean genSubQueryRelNode(QB qb, ASTNode node, RelNode srcRel, boolean forHavingClause, Map<ASTNode, RelNode> subQueryToRelNode) throws SemanticException { Set<ASTNode> corrScalarQueriesWithAgg = new HashSet<ASTNode>(); Set<ASTNode> scalarQueriesWithAggNoWinNoGby= new HashSet<ASTNode>(); //disallow subqueries which HIVE doesn't currently support subqueryRestrictionCheck(qb, node, srcRel, forHavingClause, corrScalarQueriesWithAgg, scalarQueriesWithAggNoWinNoGby); Deque<ASTNode> stack = new ArrayDeque<ASTNode>(); stack.push(node); boolean isSubQuery = false; while (!stack.isEmpty()) { ASTNode next = stack.pop(); switch(next.getType()) { case HiveParser.TOK_SUBQUERY_EXPR: /* * Restriction 2.h Subquery isnot allowed in LHS */ if(next.getChildren().size() == 3 && next.getChild(2).getType() == HiveParser.TOK_SUBQUERY_EXPR){ throw new CalciteSemanticException(ErrorMsg.UNSUPPORTED_SUBQUERY_EXPRESSION.getMsg( next.getChild(2), "SubQuery in LHS expressions are not supported.")); } String sbQueryAlias = "sq_" + qb.incrNumSubQueryPredicates(); QB qbSQ = new QB(qb.getId(), sbQueryAlias, true); Phase1Ctx ctx1 = initPhase1Ctx(); doPhase1((ASTNode)next.getChild(1), qbSQ, ctx1, null); getMetaData(qbSQ); this.subqueryId++; RelNode subQueryRelNode = genLogicalPlan(qbSQ, false, relToHiveColNameCalcitePosMap.get(srcRel), relToHiveRR.get(srcRel)); subQueryToRelNode.put(next, subQueryRelNode); //keep track of subqueries which are scalar, correlated and contains aggregate // subquery expression. This will later be special cased in Subquery remove rule // for correlated scalar queries with aggregate we have take care of the case where // inner aggregate happens on empty result if(corrScalarQueriesWithAgg.contains(next)) { corrScalarRexSQWithAgg.add(subQueryRelNode); } if(scalarQueriesWithAggNoWinNoGby.contains(next)) { scalarAggNoGbyNoWin.add(subQueryRelNode); } isSubQuery = true; break; default: int childCount = next.getChildCount(); for(int i = childCount - 1; i >= 0; i--) { stack.push((ASTNode) next.getChild(i)); } } } return isSubQuery; } private RelNode genFilterRelNode(QB qb, ASTNode searchCond, RelNode srcRel, Map<String, RelNode> aliasToRel, ImmutableMap<String, Integer> outerNameToPosMap, RowResolver outerRR, boolean forHavingClause) throws SemanticException { Map<ASTNode, RelNode> subQueryToRelNode = new HashMap<>(); boolean isSubQuery = genSubQueryRelNode(qb, searchCond, srcRel, forHavingClause, subQueryToRelNode); if(isSubQuery) { ExprNodeDesc subQueryExpr = genExprNodeDesc(searchCond, relToHiveRR.get(srcRel), outerRR, subQueryToRelNode, forHavingClause); ImmutableMap<String, Integer> hiveColNameCalcitePosMap = this.relToHiveColNameCalcitePosMap .get(srcRel); RexNode convertedFilterLHS = new RexNodeConverter(cluster, srcRel.getRowType(), outerNameToPosMap, hiveColNameCalcitePosMap, relToHiveRR.get(srcRel), outerRR, 0, true, subqueryId).convert(subQueryExpr); RelNode filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), srcRel, convertedFilterLHS); this.relToHiveColNameCalcitePosMap.put(filterRel, this.relToHiveColNameCalcitePosMap .get(srcRel)); relToHiveRR.put(filterRel, relToHiveRR.get(srcRel)); return filterRel; } else { return genFilterRelNode(searchCond, srcRel, outerNameToPosMap, outerRR, forHavingClause); } } private RelNode projectLeftOuterSide(RelNode srcRel, int numColumns) throws SemanticException { RowResolver iRR = relToHiveRR.get(srcRel); RowResolver oRR = new RowResolver(); RowResolver.add(oRR, iRR, numColumns); List<RexNode> calciteColLst = new ArrayList<RexNode>(); List<String> oFieldNames = new ArrayList<String>(); RelDataType iType = srcRel.getRowType(); for (int i = 0; i < iType.getFieldCount(); i++) { RelDataTypeField fType = iType.getFieldList().get(i); String fName = iType.getFieldNames().get(i); calciteColLst.add(cluster.getRexBuilder().makeInputRef(fType.getType(), i)); oFieldNames.add(fName); } HiveRelNode selRel = HiveProject.create(srcRel, calciteColLst, oFieldNames); this.relToHiveColNameCalcitePosMap.put(selRel, buildHiveToCalciteColumnMap(oRR, selRel)); this.relToHiveRR.put(selRel, oRR); return selRel; } private RelNode genFilterLogicalPlan(QB qb, RelNode srcRel, Map<String, RelNode> aliasToRel, ImmutableMap<String, Integer> outerNameToPosMap, RowResolver outerRR, boolean forHavingClause) throws SemanticException { RelNode filterRel = null; Iterator<ASTNode> whereClauseIterator = getQBParseInfo(qb).getDestToWhereExpr().values() .iterator(); if (whereClauseIterator.hasNext()) { filterRel = genFilterRelNode(qb, (ASTNode) whereClauseIterator.next().getChild(0), srcRel, aliasToRel, outerNameToPosMap, outerRR, forHavingClause); } return filterRel; } /** * Class to store GenericUDAF related information. */ private class AggInfo { private final List<ExprNodeDesc> m_aggParams; private final TypeInfo m_returnType; private final String m_udfName; private final boolean m_distinct; private AggInfo(List<ExprNodeDesc> aggParams, TypeInfo returnType, String udfName, boolean isDistinct) { m_aggParams = aggParams; m_returnType = returnType; m_udfName = udfName; m_distinct = isDistinct; } } private AggregateCall convertGBAgg(AggInfo agg, RelNode input, List<RexNode> gbChildProjLst, RexNodeConverter converter, HashMap<String, Integer> rexNodeToPosMap, Integer childProjLstIndx) throws SemanticException { // 1. Get agg fn ret type in Calcite RelDataType aggFnRetType = TypeConverter.convert(agg.m_returnType, this.cluster.getTypeFactory()); // 2. Convert Agg Fn args and type of args to Calcite // TODO: Does HQL allows expressions as aggregate args or can it only be // projections from child? Integer inputIndx; List<Integer> argList = new ArrayList<Integer>(); RexNode rexNd = null; RelDataTypeFactory dtFactory = this.cluster.getTypeFactory(); ImmutableList.Builder<RelDataType> aggArgRelDTBldr = new ImmutableList.Builder<RelDataType>(); for (ExprNodeDesc expr : agg.m_aggParams) { rexNd = converter.convert(expr); inputIndx = rexNodeToPosMap.get(rexNd.toString()); if (inputIndx == null) { gbChildProjLst.add(rexNd); rexNodeToPosMap.put(rexNd.toString(), childProjLstIndx); inputIndx = childProjLstIndx; childProjLstIndx++; } argList.add(inputIndx); // TODO: does arg need type cast? aggArgRelDTBldr.add(TypeConverter.convert(expr.getTypeInfo(), dtFactory)); } // 3. Get Aggregation FN from Calcite given name, ret type and input arg // type final SqlAggFunction aggregation = SqlFunctionConverter.getCalciteAggFn(agg.m_udfName, agg.m_distinct, aggArgRelDTBldr.build(), aggFnRetType); return new AggregateCall(aggregation, agg.m_distinct, argList, aggFnRetType, null); } private RelNode genGBRelNode(List<ExprNodeDesc> gbExprs, List<AggInfo> aggInfoLst, List<Long> groupSets, RelNode srcRel) throws SemanticException { ImmutableMap<String, Integer> posMap = this.relToHiveColNameCalcitePosMap.get(srcRel); RexNodeConverter converter = new RexNodeConverter(this.cluster, srcRel.getRowType(), posMap, 0, false); final boolean hasGroupSets = groupSets != null && !groupSets.isEmpty(); final List<RexNode> gbChildProjLst = Lists.newArrayList(); final HashMap<String, Integer> rexNodeToPosMap = new HashMap<String, Integer>(); final List<Integer> groupSetPositions = Lists.newArrayList(); Integer gbIndx = 0; RexNode rnd; for (ExprNodeDesc key : gbExprs) { rnd = converter.convert(key); gbChildProjLst.add(rnd); groupSetPositions.add(gbIndx); rexNodeToPosMap.put(rnd.toString(), gbIndx); gbIndx++; } final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions); // Grouping sets: we need to transform them into ImmutableBitSet // objects for Calcite List<ImmutableBitSet> transformedGroupSets = null; if(hasGroupSets) { Set<ImmutableBitSet> setTransformedGroupSets = new HashSet<ImmutableBitSet>(groupSets.size()); for(long val: groupSets) { setTransformedGroupSets.add(convert(val, groupSet.cardinality())); } // Calcite expects the grouping sets sorted and without duplicates transformedGroupSets = new ArrayList<ImmutableBitSet>(setTransformedGroupSets); Collections.sort(transformedGroupSets, ImmutableBitSet.COMPARATOR); } List<AggregateCall> aggregateCalls = Lists.newArrayList(); for (AggInfo agg : aggInfoLst) { aggregateCalls.add(convertGBAgg(agg, srcRel, gbChildProjLst, converter, rexNodeToPosMap, gbChildProjLst.size())); } if (hasGroupSets) { // Create GroupingID column AggregateCall aggCall = AggregateCall.create(HiveGroupingID.INSTANCE, false, new ImmutableList.Builder<Integer>().build(), -1, this.cluster.getTypeFactory().createSqlType(SqlTypeName.BIGINT), HiveGroupingID.INSTANCE.getName()); aggregateCalls.add(aggCall); } if (gbChildProjLst.isEmpty()) { // This will happen for count(*), in such cases we arbitarily pick // first element from srcRel gbChildProjLst.add(this.cluster.getRexBuilder().makeInputRef(srcRel, 0)); } RelNode gbInputRel = HiveProject.create(srcRel, gbChildProjLst, null); HiveRelNode aggregateRel = new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, groupSet, transformedGroupSets, aggregateCalls); return aggregateRel; } /* This method returns the flip big-endian representation of value */ private ImmutableBitSet convert(long value, int length) { BitSet bits = new BitSet(); for (int index = length - 1; index >= 0; index--) { if (value % 2 != 0) { bits.set(index); } value = value >>> 1; } // We flip the bits because Calcite considers that '1' // means that the column participates in the GroupBy // and '0' does not, as opposed to grouping_id. bits.flip(0, length); return ImmutableBitSet.FROM_BIT_SET.apply(bits); } private void addAlternateGByKeyMappings(ASTNode gByExpr, ColumnInfo colInfo, RowResolver gByInputRR, RowResolver gByRR) { if (gByExpr.getType() == HiveParser.DOT && gByExpr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL) { String tab_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getChild(0) .getText().toLowerCase()); String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(1).getText().toLowerCase()); gByRR.put(tab_alias, col_alias, colInfo); } else if (gByExpr.getType() == HiveParser.TOK_TABLE_OR_COL) { String col_alias = BaseSemanticAnalyzer.unescapeIdentifier(gByExpr.getChild(0).getText().toLowerCase()); String tab_alias = null; /* * If the input to the GBy has a tab alias for the column, then add an * entry based on that tab_alias. For e.g. this query: select b.x, * count(*) from t1 b group by x needs (tab_alias=b, col_alias=x) in the * GBy RR. tab_alias=b comes from looking at the RowResolver that is the * ancestor before any GBy/ReduceSinks added for the GBY operation. */ try { ColumnInfo pColInfo = gByInputRR.get(tab_alias, col_alias); tab_alias = pColInfo == null ? null : pColInfo.getTabAlias(); } catch (SemanticException se) { } gByRR.put(tab_alias, col_alias, colInfo); } } private void addToGBExpr(RowResolver groupByOutputRowResolver, RowResolver groupByInputRowResolver, ASTNode grpbyExpr, ExprNodeDesc grpbyExprNDesc, List<ExprNodeDesc> gbExprNDescLst, List<String> outputColumnNames) { // TODO: Should we use grpbyExprNDesc.getTypeInfo()? what if expr is // UDF int i = gbExprNDescLst.size(); String field = SemanticAnalyzer.getColumnInternalName(i); outputColumnNames.add(field); gbExprNDescLst.add(grpbyExprNDesc); ColumnInfo oColInfo = new ColumnInfo(field, grpbyExprNDesc.getTypeInfo(), null, false); groupByOutputRowResolver.putExpression(grpbyExpr, oColInfo); addAlternateGByKeyMappings(grpbyExpr, oColInfo, groupByInputRowResolver, groupByOutputRowResolver); } private AggInfo getHiveAggInfo(ASTNode aggAst, int aggFnLstArgIndx, RowResolver inputRR) throws SemanticException { AggInfo aInfo = null; // 1 Convert UDAF Params to ExprNodeDesc ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>(); for (int i = 1; i <= aggFnLstArgIndx; i++) { ASTNode paraExpr = (ASTNode) aggAst.getChild(i); ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, inputRR); aggParameters.add(paraExprNode); } // 2. Is this distinct UDAF boolean isDistinct = aggAst.getType() == HiveParser.TOK_FUNCTIONDI; // 3. Determine type of UDAF TypeInfo udafRetType = null; // 3.1 Obtain UDAF name String aggName = SemanticAnalyzer.unescapeIdentifier(aggAst.getChild(0).getText()); // 3.2 Rank functions type is 'int'/'double' if (FunctionRegistry.isRankingFunction(aggName)) { if (aggName.equalsIgnoreCase("percent_rank")) udafRetType = TypeInfoFactory.doubleTypeInfo; else udafRetType = TypeInfoFactory.intTypeInfo; } else { // 3.3 Try obtaining UDAF evaluators to determine the ret type try { boolean isAllColumns = aggAst.getType() == HiveParser.TOK_FUNCTIONSTAR; // 3.3.1 Get UDAF Evaluator Mode amode = SemanticAnalyzer.groupByDescModeToUDAFMode(GroupByDesc.Mode.COMPLETE, isDistinct); GenericUDAFEvaluator genericUDAFEvaluator = null; if (aggName.toLowerCase().equals(FunctionRegistry.LEAD_FUNC_NAME) || aggName.toLowerCase().equals(FunctionRegistry.LAG_FUNC_NAME)) { ArrayList<ObjectInspector> originalParameterTypeInfos = SemanticAnalyzer .getWritableObjectInspector(aggParameters); genericUDAFEvaluator = FunctionRegistry.getGenericWindowingEvaluator(aggName, originalParameterTypeInfos, isDistinct, isAllColumns); GenericUDAFInfo udaf = SemanticAnalyzer.getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters); udafRetType = ((ListTypeInfo) udaf.returnType).getListElementTypeInfo(); } else { genericUDAFEvaluator = SemanticAnalyzer.getGenericUDAFEvaluator(aggName, aggParameters, aggAst, isDistinct, isAllColumns); assert (genericUDAFEvaluator != null); // 3.3.2 Get UDAF Info using UDAF Evaluator GenericUDAFInfo udaf = SemanticAnalyzer.getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters); if (FunctionRegistry.pivotResult(aggName)) { udafRetType = ((ListTypeInfo)udaf.returnType).getListElementTypeInfo(); } else { udafRetType = udaf.returnType; } } } catch (Exception e) { LOG.debug("CBO: Couldn't Obtain UDAF evaluators for " + aggName + ", trying to translate to GenericUDF"); } // 3.4 Try GenericUDF translation if (udafRetType == null) { TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR); // We allow stateful functions in the SELECT list (but nowhere else) tcCtx.setAllowStatefulFunctions(true); tcCtx.setAllowDistinctFunctions(false); ExprNodeDesc exp = genExprNodeDesc((ASTNode) aggAst.getChild(0), inputRR, tcCtx); udafRetType = exp.getTypeInfo(); } } // 4. Construct AggInfo aInfo = new AggInfo(aggParameters, udafRetType, aggName, isDistinct); return aInfo; } /** * Generate GB plan. * * @param qb * @param srcRel * @return TODO: 1. Grouping Sets (roll up..) * @throws SemanticException */ private RelNode genGBLogicalPlan(QB qb, RelNode srcRel) throws SemanticException { RelNode gbRel = null; QBParseInfo qbp = getQBParseInfo(qb); // 1. Gather GB Expressions (AST) (GB + Aggregations) // NOTE: Multi Insert is not supported String detsClauseName = qbp.getClauseNames().iterator().next(); // Check and transform group by *. This will only happen for select distinct *. // Here the "genSelectPlan" is being leveraged. // The main benefits are (1) remove virtual columns that should // not be included in the group by; (2) add the fully qualified column names to unParseTranslator // so that view is supported. The drawback is that an additional SEL op is added. If it is // not necessary, it will be removed by NonBlockingOpDeDupProc Optimizer because it will match // SEL%SEL% rule. ASTNode selExprList = qb.getParseInfo().getSelForClause(detsClauseName); SubQueryUtils.checkForTopLevelSubqueries(selExprList); if (selExprList.getToken().getType() == HiveParser.TOK_SELECTDI && selExprList.getChildCount() == 1 && selExprList.getChild(0).getChildCount() == 1) { ASTNode node = (ASTNode) selExprList.getChild(0).getChild(0); if (node.getToken().getType() == HiveParser.TOK_ALLCOLREF) { // As we said before, here we use genSelectLogicalPlan to rewrite AllColRef srcRel = genSelectLogicalPlan(qb, srcRel, srcRel, null, null, true).getKey(); RowResolver rr = this.relToHiveRR.get(srcRel); qbp.setSelExprForClause(detsClauseName, SemanticAnalyzer.genSelectDIAST(rr)); } } // Select DISTINCT + windowing; GBy handled by genSelectForWindowing if (selExprList.getToken().getType() == HiveParser.TOK_SELECTDI && !qb.getAllWindowingSpecs().isEmpty()) { return null; } List<ASTNode> grpByAstExprs = getGroupByForClause(qbp, detsClauseName); HashMap<String, ASTNode> aggregationTrees = qbp.getAggregationExprsForClause(detsClauseName); boolean hasGrpByAstExprs = (grpByAstExprs != null && !grpByAstExprs.isEmpty()) ? true : false; boolean hasAggregationTrees = (aggregationTrees != null && !aggregationTrees.isEmpty()) ? true : false; final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty() || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty()); // 2. Sanity check if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW) && qbp.getDistinctFuncExprsForClause(detsClauseName).size() > 1) { throw new SemanticException(ErrorMsg.UNSUPPORTED_MULTIPLE_DISTINCTS.getMsg()); } if (cubeRollupGrpSetPresent) { if (!HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVEMAPSIDEAGGREGATE)) { throw new SemanticException(ErrorMsg.HIVE_GROUPING_SETS_AGGR_NOMAPAGGR.getMsg()); } if (conf.getBoolVar(HiveConf.ConfVars.HIVEGROUPBYSKEW)) { checkExpressionsForGroupingSet(grpByAstExprs, qb.getParseInfo() .getDistinctFuncExprsForClause(detsClauseName), aggregationTrees, this.relToHiveRR.get(srcRel)); if (qbp.getDestGroupingSets().size() > conf .getIntVar(HiveConf.ConfVars.HIVE_NEW_JOB_GROUPING_SET_CARDINALITY)) { String errorMsg = "The number of rows per input row due to grouping sets is " + qbp.getDestGroupingSets().size(); throw new SemanticException( ErrorMsg.HIVE_GROUPING_SETS_THRESHOLD_NOT_ALLOWED_WITH_SKEW.getMsg(errorMsg)); } } } if (hasGrpByAstExprs || hasAggregationTrees) { ArrayList<ExprNodeDesc> gbExprNDescLst = new ArrayList<ExprNodeDesc>(); ArrayList<String> outputColumnNames = new ArrayList<String>(); // 3. Input, Output Row Resolvers RowResolver groupByInputRowResolver = this.relToHiveRR.get(srcRel); RowResolver groupByOutputRowResolver = new RowResolver(); groupByOutputRowResolver.setIsExprResolver(true); if (hasGrpByAstExprs) { // 4. Construct GB Keys (ExprNode) for (int i = 0; i < grpByAstExprs.size(); ++i) { ASTNode grpbyExpr = grpByAstExprs.get(i); Map<ASTNode, ExprNodeDesc> astToExprNDescMap = genAllExprNodeDesc(grpbyExpr, groupByInputRowResolver); ExprNodeDesc grpbyExprNDesc = astToExprNDescMap.get(grpbyExpr); if (grpbyExprNDesc == null) throw new CalciteSemanticException("Invalid Column Reference: " + grpbyExpr.dump(), UnsupportedFeature.Invalid_column_reference); addToGBExpr(groupByOutputRowResolver, groupByInputRowResolver, grpbyExpr, grpbyExprNDesc, gbExprNDescLst, outputColumnNames); } } // 5. GroupingSets, Cube, Rollup int groupingColsSize = gbExprNDescLst.size(); List<Long> groupingSets = null; if (cubeRollupGrpSetPresent) { groupingSets = getGroupByGroupingSetsForClause(qbp, detsClauseName).getSecond(); } // 6. Construct aggregation function Info ArrayList<AggInfo> aggregations = new ArrayList<AggInfo>(); if (hasAggregationTrees) { assert (aggregationTrees != null); for (ASTNode value : aggregationTrees.values()) { // 6.1 Determine type of UDAF // This is the GenericUDAF name String aggName = SemanticAnalyzer.unescapeIdentifier(value.getChild(0).getText()); boolean isDistinct = value.getType() == HiveParser.TOK_FUNCTIONDI; boolean isAllColumns = value.getType() == HiveParser.TOK_FUNCTIONSTAR; // 6.2 Convert UDAF Params to ExprNodeDesc ArrayList<ExprNodeDesc> aggParameters = new ArrayList<ExprNodeDesc>(); for (int i = 1; i < value.getChildCount(); i++) { ASTNode paraExpr = (ASTNode) value.getChild(i); ExprNodeDesc paraExprNode = genExprNodeDesc(paraExpr, groupByInputRowResolver); aggParameters.add(paraExprNode); } Mode amode = SemanticAnalyzer.groupByDescModeToUDAFMode(GroupByDesc.Mode.COMPLETE, isDistinct); GenericUDAFEvaluator genericUDAFEvaluator = SemanticAnalyzer.getGenericUDAFEvaluator( aggName, aggParameters, value, isDistinct, isAllColumns); assert (genericUDAFEvaluator != null); GenericUDAFInfo udaf = SemanticAnalyzer.getGenericUDAFInfo(genericUDAFEvaluator, amode, aggParameters); AggInfo aInfo = new AggInfo(aggParameters, udaf.returnType, aggName, isDistinct); aggregations.add(aInfo); String field = getColumnInternalName(groupingColsSize + aggregations.size() - 1); outputColumnNames.add(field); groupByOutputRowResolver.putExpression(value, new ColumnInfo(field, aInfo.m_returnType, "", false)); } } // 7. If GroupingSets, Cube, Rollup were used, we account grouping__id if(groupingSets != null && !groupingSets.isEmpty()) { String field = getColumnInternalName(groupingColsSize + aggregations.size()); outputColumnNames.add(field); groupByOutputRowResolver.put(null, VirtualColumn.GROUPINGID.getName(), new ColumnInfo( field, VirtualColumn.GROUPINGID.getTypeInfo(), null, true)); } // 8. We create the group_by operator gbRel = genGBRelNode(gbExprNDescLst, aggregations, groupingSets, srcRel); relToHiveColNameCalcitePosMap.put(gbRel, buildHiveToCalciteColumnMap(groupByOutputRowResolver, gbRel)); this.relToHiveRR.put(gbRel, groupByOutputRowResolver); } return gbRel; } /** * Generate OB RelNode and input Select RelNode that should be used to * introduce top constraining Project. If Input select RelNode is not * present then don't introduce top constraining select. * * @param qb * @param selPair * @param outermostOB * @return RelNode OB RelNode * @throws SemanticException */ private RelNode genOBLogicalPlan(QB qb, Pair<RelNode, RowResolver> selPair, boolean outermostOB) throws SemanticException { // selPair.getKey() is the operator right before OB // selPair.getValue() is RR which only contains columns needed in result // set. Extra columns needed by order by will be absent from it. RelNode srcRel = selPair.getKey(); RowResolver selectOutputRR = selPair.getValue(); RelNode sortRel = null; RelNode returnRel = null; QBParseInfo qbp = getQBParseInfo(qb); String dest = qbp.getClauseNames().iterator().next(); ASTNode obAST = qbp.getOrderByForClause(dest); if (obAST != null) { // 1. OB Expr sanity test // in strict mode, in the presence of order by, limit must be // specified Integer limit = qb.getParseInfo().getDestLimit(dest); if (limit == null) { String error = StrictChecks.checkNoLimit(conf); if (error != null) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage(obAST, error)); } } // 2. Walk through OB exprs and extract field collations and additional // virtual columns needed final List<RexNode> newVCLst = new ArrayList<RexNode>(); final List<RelFieldCollation> fieldCollations = Lists.newArrayList(); int fieldIndex = 0; List<Node> obASTExprLst = obAST.getChildren(); ASTNode obASTExpr; ASTNode nullObASTExpr; List<Pair<ASTNode, TypeInfo>> vcASTTypePairs = new ArrayList<Pair<ASTNode, TypeInfo>>(); RowResolver inputRR = relToHiveRR.get(srcRel); RowResolver outputRR = new RowResolver(); RexNode rnd; RexNodeConverter converter = new RexNodeConverter(cluster, srcRel.getRowType(), relToHiveColNameCalcitePosMap.get(srcRel), 0, false); int srcRelRecordSz = srcRel.getRowType().getFieldCount(); for (int i = 0; i < obASTExprLst.size(); i++) { // 2.1 Convert AST Expr to ExprNode obASTExpr = (ASTNode) obASTExprLst.get(i); nullObASTExpr = (ASTNode) obASTExpr.getChild(0); ASTNode ref = (ASTNode) nullObASTExpr.getChild(0); Map<ASTNode, ExprNodeDesc> astToExprNDescMap = null; ExprNodeDesc obExprNDesc = null; boolean isBothByPos = HiveConf.getBoolVar(conf, ConfVars.HIVE_GROUPBY_ORDERBY_POSITION_ALIAS); boolean isObyByPos = isBothByPos || HiveConf.getBoolVar(conf, ConfVars.HIVE_ORDERBY_POSITION_ALIAS); // replace each of the position alias in ORDERBY with the actual column if (ref != null && ref.getToken().getType() == HiveParser.Number) { if (isObyByPos) { int pos = Integer.parseInt(ref.getText()); if (pos > 0 && pos <= selectOutputRR.getColumnInfos().size()) { // fieldIndex becomes so simple // Note that pos starts from 1 while fieldIndex starts from 0; fieldIndex = pos - 1; } else { throw new SemanticException( ErrorMsg.INVALID_POSITION_ALIAS_IN_ORDERBY.getMsg("Position alias: " + pos + " does not exist\n" + "The Select List is indexed from 1 to " + selectOutputRR.getColumnInfos().size())); } } else { // if not using position alias and it is a number. LOG.warn("Using constant number " + ref.getText() + " in order by. If you try to use position alias when hive.orderby.position.alias is false, the position alias will be ignored."); } } else { // first try to get it from select // in case of udtf, selectOutputRR may be null. if (selectOutputRR != null) { try { astToExprNDescMap = genAllExprNodeDesc(ref, selectOutputRR); obExprNDesc = astToExprNDescMap.get(ref); } catch (SemanticException ex) { // we can tolerate this as this is the previous behavior LOG.debug("Can not find column in " + ref.getText() + ". The error msg is " + ex.getMessage()); } } // then try to get it from all if (obExprNDesc == null) { astToExprNDescMap = genAllExprNodeDesc(ref, inputRR); obExprNDesc = astToExprNDescMap.get(ref); } if (obExprNDesc == null) { throw new SemanticException("Invalid order by expression: " + obASTExpr.toString()); } // 2.2 Convert ExprNode to RexNode rnd = converter.convert(obExprNDesc); // 2.3 Determine the index of ob expr in child schema // NOTE: Calcite can not take compound exprs in OB without it being // present in the child (& hence we add a child Project Rel) if (rnd instanceof RexInputRef) { fieldIndex = ((RexInputRef) rnd).getIndex(); } else { fieldIndex = srcRelRecordSz + newVCLst.size(); newVCLst.add(rnd); vcASTTypePairs.add(new Pair<ASTNode, TypeInfo>(ref, obExprNDesc.getTypeInfo())); } } // 2.4 Determine the Direction of order by RelFieldCollation.Direction order = RelFieldCollation.Direction.DESCENDING; if (obASTExpr.getType() == HiveParser.TOK_TABSORTCOLNAMEASC) { order = RelFieldCollation.Direction.ASCENDING; } RelFieldCollation.NullDirection nullOrder; if (nullObASTExpr.getType() == HiveParser.TOK_NULLS_FIRST) { nullOrder = RelFieldCollation.NullDirection.FIRST; } else if (nullObASTExpr.getType() == HiveParser.TOK_NULLS_LAST) { nullOrder = RelFieldCollation.NullDirection.LAST; } else { throw new SemanticException("Unexpected null ordering option: " + nullObASTExpr.getType()); } // 2.5 Add to field collations fieldCollations.add(new RelFieldCollation(fieldIndex, order, nullOrder)); } // 3. Add Child Project Rel if needed, Generate Output RR, input Sel Rel // for top constraining Sel RelNode obInputRel = srcRel; if (!newVCLst.isEmpty()) { List<RexNode> originalInputRefs = Lists.transform(srcRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() { @Override public RexNode apply(RelDataTypeField input) { return new RexInputRef(input.getIndex(), input.getType()); } }); RowResolver obSyntheticProjectRR = new RowResolver(); if (!RowResolver.add(obSyntheticProjectRR, inputRR)) { throw new CalciteSemanticException( "Duplicates detected when adding columns to RR: see previous message", UnsupportedFeature.Duplicates_in_RR); } int vcolPos = inputRR.getRowSchema().getSignature().size(); for (Pair<ASTNode, TypeInfo> astTypePair : vcASTTypePairs) { obSyntheticProjectRR.putExpression(astTypePair.getKey(), new ColumnInfo( SemanticAnalyzer.getColumnInternalName(vcolPos), astTypePair.getValue(), null, false)); vcolPos++; } obInputRel = genSelectRelNode(CompositeList.of(originalInputRefs, newVCLst), obSyntheticProjectRR, srcRel); if (outermostOB) { if (!RowResolver.add(outputRR, inputRR)) { throw new CalciteSemanticException( "Duplicates detected when adding columns to RR: see previous message", UnsupportedFeature.Duplicates_in_RR); } } else { if (!RowResolver.add(outputRR, obSyntheticProjectRR)) { throw new CalciteSemanticException( "Duplicates detected when adding columns to RR: see previous message", UnsupportedFeature.Duplicates_in_RR); } } } else { if (!RowResolver.add(outputRR, inputRR)) { throw new CalciteSemanticException( "Duplicates detected when adding columns to RR: see previous message", UnsupportedFeature.Duplicates_in_RR); } } // 4. Construct SortRel RelTraitSet traitSet = cluster.traitSetOf(HiveRelNode.CONVENTION); RelCollation canonizedCollation = traitSet.canonize(RelCollationImpl.of(fieldCollations)); sortRel = new HiveSortLimit(cluster, traitSet, obInputRel, canonizedCollation, null, null); // 5. Update the maps // NOTE: Output RR for SortRel is considered same as its input; we may // end up not using VC that is present in sort rel. Also note that // rowtype of sortrel is the type of it child; if child happens to be // synthetic project that we introduced then that projectrel would // contain the vc. ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap( outputRR, sortRel); relToHiveRR.put(sortRel, outputRR); relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap); if (selectOutputRR != null) { List<RexNode> originalInputRefs = Lists.transform(srcRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() { @Override public RexNode apply(RelDataTypeField input) { return new RexInputRef(input.getIndex(), input.getType()); } }); List<RexNode> selectedRefs = Lists.newArrayList(); for (int index = 0; index < selectOutputRR.getColumnInfos().size(); index++) { selectedRefs.add(originalInputRefs.get(index)); } // We need to add select since order by schema may have more columns than result schema. returnRel = genSelectRelNode(selectedRefs, selectOutputRR, sortRel); } else { returnRel = sortRel; } } return returnRel; } private RelNode genLimitLogicalPlan(QB qb, RelNode srcRel) throws SemanticException { HiveRelNode sortRel = null; QBParseInfo qbp = getQBParseInfo(qb); SimpleEntry<Integer,Integer> entry = qbp.getDestToLimit().get(qbp.getClauseNames().iterator().next()); Integer offset = (entry == null) ? 0 : entry.getKey(); Integer fetch = (entry == null) ? null : entry.getValue(); if (fetch != null) { RexNode offsetRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(offset)); RexNode fetchRN = cluster.getRexBuilder().makeExactLiteral(BigDecimal.valueOf(fetch)); RelTraitSet traitSet = cluster.traitSetOf(HiveRelNode.CONVENTION); RelCollation canonizedCollation = traitSet.canonize(RelCollations.EMPTY); sortRel = new HiveSortLimit(cluster, traitSet, srcRel, canonizedCollation, offsetRN, fetchRN); RowResolver inputRR = relToHiveRR.get(srcRel); RowResolver outputRR = inputRR.duplicate(); ImmutableMap<String, Integer> hiveColNameCalcitePosMap = buildHiveToCalciteColumnMap( outputRR, sortRel); relToHiveRR.put(sortRel, outputRR); relToHiveColNameCalcitePosMap.put(sortRel, hiveColNameCalcitePosMap); } return sortRel; } private List<RexNode> getPartitionKeys(PartitionSpec ps, RexNodeConverter converter, RowResolver inputRR) throws SemanticException { List<RexNode> pKeys = new ArrayList<RexNode>(); if (ps != null) { List<PartitionExpression> pExprs = ps.getExpressions(); for (PartitionExpression pExpr : pExprs) { TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR); tcCtx.setAllowStatefulFunctions(true); ExprNodeDesc exp = genExprNodeDesc(pExpr.getExpression(), inputRR, tcCtx); pKeys.add(converter.convert(exp)); } } return pKeys; } private List<RexFieldCollation> getOrderKeys(OrderSpec os, RexNodeConverter converter, RowResolver inputRR) throws SemanticException { List<RexFieldCollation> oKeys = new ArrayList<RexFieldCollation>(); if (os != null) { List<OrderExpression> oExprs = os.getExpressions(); for (OrderExpression oExpr : oExprs) { TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR); tcCtx.setAllowStatefulFunctions(true); ExprNodeDesc exp = genExprNodeDesc(oExpr.getExpression(), inputRR, tcCtx); RexNode ordExp = converter.convert(exp); Set<SqlKind> flags = new HashSet<SqlKind>(); if (oExpr.getOrder() == org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.Order.DESC) { flags.add(SqlKind.DESCENDING); } if (oExpr.getNullOrder() == org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder.NULLS_FIRST) { flags.add(SqlKind.NULLS_FIRST); } else if (oExpr.getNullOrder() == org.apache.hadoop.hive.ql.parse.PTFInvocationSpec.NullOrder.NULLS_LAST) { flags.add(SqlKind.NULLS_LAST); } else { throw new SemanticException( "Unexpected null ordering option: " + oExpr.getNullOrder()); } oKeys.add(new RexFieldCollation(ordExp, flags)); } } return oKeys; } private RexWindowBound getBound(BoundarySpec bs, RexNodeConverter converter) { RexWindowBound rwb = null; if (bs != null) { SqlParserPos pos = new SqlParserPos(1, 1); SqlNode amt = bs.getAmt() == 0 ? null : SqlLiteral.createExactNumeric( String.valueOf(bs.getAmt()), new SqlParserPos(2, 2)); RexNode amtLiteral = null; SqlCall sc = null; if (amt != null) amtLiteral = cluster.getRexBuilder().makeLiteral(new Integer(bs.getAmt()), cluster.getTypeFactory().createSqlType(SqlTypeName.INTEGER), true); switch (bs.getDirection()) { case PRECEDING: if (amt == null) { rwb = RexWindowBound.create(SqlWindow.createUnboundedPreceding(pos), null); } else { sc = (SqlCall) SqlWindow.createPreceding(amt, pos); rwb = RexWindowBound.create(sc, cluster.getRexBuilder().makeCall(sc.getOperator(), amtLiteral)); } break; case CURRENT: rwb = RexWindowBound.create(SqlWindow.createCurrentRow(new SqlParserPos(1, 1)), null); break; case FOLLOWING: if (amt == null) { rwb = RexWindowBound.create(SqlWindow.createUnboundedFollowing(new SqlParserPos(1, 1)), null); } else { sc = (SqlCall) SqlWindow.createFollowing(amt, pos); rwb = RexWindowBound.create(sc, cluster.getRexBuilder().makeCall(sc.getOperator(), amtLiteral)); } break; } } return rwb; } private int getWindowSpecIndx(ASTNode wndAST) { int wi = wndAST.getChildCount() - 1; if (wi <= 0 || (wndAST.getChild(wi).getType() != HiveParser.TOK_WINDOWSPEC)) { wi = -1; } return wi; } private Pair<RexNode, TypeInfo> genWindowingProj(QB qb, WindowExpressionSpec wExpSpec, RelNode srcRel) throws SemanticException { RexNode w = null; TypeInfo wHiveRetType = null; if (wExpSpec instanceof WindowFunctionSpec) { WindowFunctionSpec wFnSpec = (WindowFunctionSpec) wExpSpec; ASTNode windowProjAst = wFnSpec.getExpression(); // TODO: do we need to get to child? int wndSpecASTIndx = getWindowSpecIndx(windowProjAst); // 2. Get Hive Aggregate Info AggInfo hiveAggInfo = getHiveAggInfo(windowProjAst, wndSpecASTIndx - 1, this.relToHiveRR.get(srcRel)); // 3. Get Calcite Return type for Agg Fn wHiveRetType = hiveAggInfo.m_returnType; RelDataType calciteAggFnRetType = TypeConverter.convert(hiveAggInfo.m_returnType, this.cluster.getTypeFactory()); // 4. Convert Agg Fn args to Calcite ImmutableMap<String, Integer> posMap = this.relToHiveColNameCalcitePosMap.get(srcRel); RexNodeConverter converter = new RexNodeConverter(this.cluster, srcRel.getRowType(), posMap, 0, false); Builder<RexNode> calciteAggFnArgsBldr = ImmutableList.<RexNode> builder(); Builder<RelDataType> calciteAggFnArgsTypeBldr = ImmutableList.<RelDataType> builder(); for (int i = 0; i < hiveAggInfo.m_aggParams.size(); i++) { calciteAggFnArgsBldr.add(converter.convert(hiveAggInfo.m_aggParams.get(i))); calciteAggFnArgsTypeBldr.add(TypeConverter.convert(hiveAggInfo.m_aggParams.get(i) .getTypeInfo(), this.cluster.getTypeFactory())); } ImmutableList<RexNode> calciteAggFnArgs = calciteAggFnArgsBldr.build(); ImmutableList<RelDataType> calciteAggFnArgsType = calciteAggFnArgsTypeBldr.build(); // 5. Get Calcite Agg Fn final SqlAggFunction calciteAggFn = SqlFunctionConverter.getCalciteAggFn( hiveAggInfo.m_udfName, hiveAggInfo.m_distinct, calciteAggFnArgsType, calciteAggFnRetType); // 6. Translate Window spec RowResolver inputRR = relToHiveRR.get(srcRel); WindowSpec wndSpec = ((WindowFunctionSpec) wExpSpec).getWindowSpec(); List<RexNode> partitionKeys = getPartitionKeys(wndSpec.getPartition(), converter, inputRR); List<RexFieldCollation> orderKeys = getOrderKeys(wndSpec.getOrder(), converter, inputRR); RexWindowBound upperBound = getBound(wndSpec.getWindowFrame().getStart(), converter); RexWindowBound lowerBound = getBound(wndSpec.getWindowFrame().getEnd(), converter); boolean isRows = wndSpec.getWindowFrame().getWindowType() == WindowType.ROWS; w = cluster.getRexBuilder().makeOver(calciteAggFnRetType, calciteAggFn, calciteAggFnArgs, partitionKeys, ImmutableList.<RexFieldCollation> copyOf(orderKeys), lowerBound, upperBound, isRows, true, false, hiveAggInfo.m_distinct); } else { // TODO: Convert to Semantic Exception throw new RuntimeException("Unsupported window Spec"); } return new Pair<RexNode, TypeInfo>(w, wHiveRetType); } private RelNode genSelectForWindowing(QB qb, RelNode srcRel, HashSet<ColumnInfo> newColumns) throws SemanticException { getQBParseInfo(qb); WindowingSpec wSpec = (!qb.getAllWindowingSpecs().isEmpty()) ? qb.getAllWindowingSpecs() .values().iterator().next() : null; if (wSpec == null) return null; // 1. Get valid Window Function Spec wSpec.validateAndMakeEffective(); List<WindowExpressionSpec> windowExpressions = wSpec.getWindowExpressions(); if (windowExpressions == null || windowExpressions.isEmpty()) return null; RowResolver inputRR = this.relToHiveRR.get(srcRel); // 2. Get RexNodes for original Projections from below List<RexNode> projsForWindowSelOp = new ArrayList<RexNode>( HiveCalciteUtil.getProjsFromBelowAsInputRef(srcRel)); // 3. Construct new Row Resolver with everything from below. RowResolver out_rwsch = new RowResolver(); if (!RowResolver.add(out_rwsch, inputRR)) { LOG.warn("Duplicates detected when adding columns to RR: see previous message"); } // 4. Walk through Window Expressions & Construct RexNodes for those, // Update out_rwsch final QBParseInfo qbp = getQBParseInfo(qb); final String selClauseName = qbp.getClauseNames().iterator().next(); final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty() || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty()); for (WindowExpressionSpec wExprSpec : windowExpressions) { if (!qbp.getDestToGroupBy().isEmpty()) { // Special handling of grouping function wExprSpec.setExpression(rewriteGroupingFunctionAST( getGroupByForClause(qbp, selClauseName), wExprSpec.getExpression(), !cubeRollupGrpSetPresent)); } if (out_rwsch.getExpression(wExprSpec.getExpression()) == null) { Pair<RexNode, TypeInfo> wtp = genWindowingProj(qb, wExprSpec, srcRel); projsForWindowSelOp.add(wtp.getKey()); // 6.2.2 Update Output Row Schema ColumnInfo oColInfo = new ColumnInfo( SemanticAnalyzer.getColumnInternalName(projsForWindowSelOp.size()), wtp.getValue(), null, false); out_rwsch.putExpression(wExprSpec.getExpression(), oColInfo); newColumns.add(oColInfo); } } return genSelectRelNode(projsForWindowSelOp, out_rwsch, srcRel, windowExpressions); } private RelNode genSelectRelNode(List<RexNode> calciteColLst, RowResolver out_rwsch, RelNode srcRel) throws CalciteSemanticException { return genSelectRelNode(calciteColLst, out_rwsch, srcRel, null); } private RelNode genSelectRelNode(List<RexNode> calciteColLst, RowResolver out_rwsch, RelNode srcRel, List<WindowExpressionSpec> windowExpressions) throws CalciteSemanticException { // 1. Build Column Names Set<String> colNamesSet = new HashSet<String>(); List<ColumnInfo> cInfoLst = out_rwsch.getRowSchema().getSignature(); ArrayList<String> columnNames = new ArrayList<String>(); Map<String,String> windowToAlias = null; if (windowExpressions != null ) { windowToAlias = new HashMap<String,String>(); for (WindowExpressionSpec wes : windowExpressions) { windowToAlias.put(wes.getExpression().toStringTree().toLowerCase(), wes.getAlias()); } } String[] qualifiedColNames; String tmpColAlias; for (int i = 0; i < calciteColLst.size(); i++) { ColumnInfo cInfo = cInfoLst.get(i); qualifiedColNames = out_rwsch.reverseLookup(cInfo.getInternalName()); /* * if (qualifiedColNames[0] != null && !qualifiedColNames[0].isEmpty()) * tmpColAlias = qualifiedColNames[0] + "." + qualifiedColNames[1]; else */ tmpColAlias = qualifiedColNames[1]; if (tmpColAlias.contains(".") || tmpColAlias.contains(":")) { tmpColAlias = cInfo.getInternalName(); } // Prepend column names with '_o_' if it starts with '_c' /* * Hive treats names that start with '_c' as internalNames; so change * the names so we don't run into this issue when converting back to * Hive AST. */ if (tmpColAlias.startsWith("_c")) { tmpColAlias = "_o_" + tmpColAlias; } else if (windowToAlias != null && windowToAlias.containsKey(tmpColAlias)) { tmpColAlias = windowToAlias.get(tmpColAlias); } int suffix = 1; while (colNamesSet.contains(tmpColAlias)) { tmpColAlias = qualifiedColNames[1] + suffix; suffix++; } colNamesSet.add(tmpColAlias); columnNames.add(tmpColAlias); } // 3 Build Calcite Rel Node for project using converted projections & col // names HiveRelNode selRel = HiveProject.create(srcRel, calciteColLst, columnNames); // 4. Keep track of colname-to-posmap && RR for new select this.relToHiveColNameCalcitePosMap .put(selRel, buildHiveToCalciteColumnMap(out_rwsch, selRel)); this.relToHiveRR.put(selRel, out_rwsch); return selRel; } private void setQueryHints(QB qb) throws SemanticException { QBParseInfo qbp = getQBParseInfo(qb); String selClauseName = qbp.getClauseNames().iterator().next(); Tree selExpr0 = qbp.getSelForClause(selClauseName).getChild(0); if (selExpr0.getType() != HiveParser.QUERY_HINT) return; String hint = ctx.getTokenRewriteStream().toString( selExpr0.getTokenStartIndex(), selExpr0.getTokenStopIndex()); LOG.debug("Handling query hints: " + hint); ParseDriver pd = new ParseDriver(); try { ASTNode hintNode = pd.parseHint(hint); qbp.setHints(hintNode); } catch (ParseException e) { throw new SemanticException("failed to parse query hint: "+e.getMessage(), e); } } /** * NOTE: there can only be one select caluse since we don't handle multi * destination insert. * * @throws SemanticException */ /** * @param qb * @param srcRel * @param starSrcRel * @param outerNameToPosMap * @param outerRR * @param isAllColRefRewrite * when it is true, it means that it is called from group by *, where we use * genSelectLogicalPlan to rewrite * * @return RelNode: the select relnode RowResolver: i.e., originalRR, the RR after select when there is an order by. * @throws SemanticException */ private Pair<RelNode,RowResolver> genSelectLogicalPlan(QB qb, RelNode srcRel, RelNode starSrcRel, ImmutableMap<String, Integer> outerNameToPosMap, RowResolver outerRR, boolean isAllColRefRewrite) throws SemanticException { // 0. Generate a Select Node for Windowing // Exclude the newly-generated select columns from */etc. resolution. HashSet<ColumnInfo> excludedColumns = new HashSet<ColumnInfo>(); RelNode selForWindow = genSelectForWindowing(qb, srcRel, excludedColumns); srcRel = (selForWindow == null) ? srcRel : selForWindow; ArrayList<ExprNodeDesc> col_list = new ArrayList<ExprNodeDesc>(); // 1. Get Select Expression List QBParseInfo qbp = getQBParseInfo(qb); String selClauseName = qbp.getClauseNames().iterator().next(); ASTNode selExprList = qbp.getSelForClause(selClauseName); // make sure if there is subquery it is top level expression SubQueryUtils.checkForTopLevelSubqueries(selExprList); final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty() || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty()); // 2.Row resolvers for input, output RowResolver out_rwsch = new RowResolver(); Integer pos = Integer.valueOf(0); // TODO: will this also fix windowing? try RowResolver inputRR = this.relToHiveRR.get(srcRel), starRR = inputRR; inputRR.setCheckForAmbiguity(true); if (starSrcRel != null) { starRR = this.relToHiveRR.get(starSrcRel); } // 3. Query Hints // TODO: Handle Query Hints; currently we ignore them boolean selectStar = false; int posn = 0; boolean hintPresent = (selExprList.getChild(0).getType() == HiveParser.QUERY_HINT); if (hintPresent) { posn++; } // 4. Bailout if select involves Transform boolean isInTransform = (selExprList.getChild(posn).getChild(0).getType() == HiveParser.TOK_TRANSFORM); if (isInTransform) { String msg = String.format("SELECT TRANSFORM is currently not supported in CBO," + " turn off cbo to use TRANSFORM."); LOG.debug(msg); throw new CalciteSemanticException(msg, UnsupportedFeature.Select_transform); } // 5. Check if select involves UDTF String udtfTableAlias = null; GenericUDTF genericUDTF = null; String genericUDTFName = null; ArrayList<String> udtfColAliases = new ArrayList<String>(); ASTNode expr = (ASTNode) selExprList.getChild(posn).getChild(0); int exprType = expr.getType(); if (exprType == HiveParser.TOK_FUNCTION || exprType == HiveParser.TOK_FUNCTIONSTAR) { String funcName = TypeCheckProcFactory.DefaultExprProcessor.getFunctionText(expr, true); FunctionInfo fi = FunctionRegistry.getFunctionInfo(funcName); if (fi != null && fi.getGenericUDTF() != null) { LOG.debug("Find UDTF " + funcName); genericUDTF = fi.getGenericUDTF(); genericUDTFName = funcName; if (!fi.isNative()) { unparseTranslator.addIdentifierTranslation((ASTNode) expr.getChild(0)); } if (genericUDTF != null && (selectStar = exprType == HiveParser.TOK_FUNCTIONSTAR)) { genColListRegex(".*", null, (ASTNode) expr.getChild(0), col_list, null, inputRR, starRR, pos, out_rwsch, qb.getAliases(), false); } } } if (genericUDTF != null) { // Only support a single expression when it's a UDTF if (selExprList.getChildCount() > 1) { throw new SemanticException(generateErrorMessage( (ASTNode) selExprList.getChild(1), ErrorMsg.UDTF_MULTIPLE_EXPR.getMsg())); } ASTNode selExpr = (ASTNode) selExprList.getChild(posn); // Get the column / table aliases from the expression. Start from 1 as // 0 is the TOK_FUNCTION // column names also can be inferred from result of UDTF for (int i = 1; i < selExpr.getChildCount(); i++) { ASTNode selExprChild = (ASTNode) selExpr.getChild(i); switch (selExprChild.getType()) { case HiveParser.Identifier: udtfColAliases.add(unescapeIdentifier(selExprChild.getText().toLowerCase())); unparseTranslator.addIdentifierTranslation(selExprChild); break; case HiveParser.TOK_TABALIAS: assert (selExprChild.getChildCount() == 1); udtfTableAlias = unescapeIdentifier(selExprChild.getChild(0) .getText()); qb.addAlias(udtfTableAlias); unparseTranslator.addIdentifierTranslation((ASTNode) selExprChild .getChild(0)); break; default: throw new SemanticException("Find invalid token type " + selExprChild.getType() + " in UDTF."); } } LOG.debug("UDTF table alias is " + udtfTableAlias); LOG.debug("UDTF col aliases are " + udtfColAliases); } // 6. Iterate over all expression (after SELECT) ASTNode exprList; if (genericUDTF != null) { exprList = expr; } else { exprList = selExprList; } // For UDTF's, skip the function name to get the expressions int startPosn = genericUDTF != null ? posn + 1 : posn; for (int i = startPosn; i < exprList.getChildCount(); ++i) { // 6.1 child can be EXPR AS ALIAS, or EXPR. ASTNode child = (ASTNode) exprList.getChild(i); boolean hasAsClause = (!isInTransform) && (child.getChildCount() == 2); // 6.2 EXPR AS (ALIAS,...) parses, but is only allowed for UDTF's // This check is not needed and invalid when there is a transform b/c // the // AST's are slightly different. if (genericUDTF == null && child.getChildCount() > 2) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage( (ASTNode) child.getChild(2), ErrorMsg.INVALID_AS.getMsg())); } String tabAlias; String colAlias; if (genericUDTF != null) { tabAlias = null; colAlias = getAutogenColAliasPrfxLbl() + i; expr = child; } else { // 6.3 Get rid of TOK_SELEXPR expr = (ASTNode) child.getChild(0); String[] colRef = SemanticAnalyzer.getColAlias(child, getAutogenColAliasPrfxLbl(), inputRR, autogenColAliasPrfxIncludeFuncName(), i); tabAlias = colRef[0]; colAlias = colRef[1]; if (hasAsClause) { unparseTranslator.addIdentifierTranslation((ASTNode) child .getChild(1)); } } Map<ASTNode, RelNode> subQueryToRelNode = new HashMap<>(); boolean isSubQuery = genSubQueryRelNode(qb, expr, srcRel, false, subQueryToRelNode); if(isSubQuery) { ExprNodeDesc subQueryExpr = genExprNodeDesc(expr, relToHiveRR.get(srcRel), outerRR, subQueryToRelNode, true); col_list.add(subQueryExpr); ColumnInfo colInfo = new ColumnInfo(SemanticAnalyzer.getColumnInternalName(pos), subQueryExpr.getWritableObjectInspector(), tabAlias, false); if (!out_rwsch.putWithCheck(tabAlias, colAlias, null, colInfo)) { throw new CalciteSemanticException("Cannot add column to RR: " + tabAlias + "." + colAlias + " => " + colInfo + " due to duplication, see previous warnings", UnsupportedFeature.Duplicates_in_RR); } pos = Integer.valueOf(pos.intValue() + 1); } else { // 6.4 Build ExprNode corresponding to colums if (expr.getType() == HiveParser.TOK_ALLCOLREF) { pos = genColListRegex(".*", expr.getChildCount() == 0 ? null : SemanticAnalyzer .getUnescapedName((ASTNode) expr.getChild(0)).toLowerCase(), expr, col_list, excludedColumns, inputRR, starRR, pos, out_rwsch, qb.getAliases(), true); selectStar = true; } else if (expr.getType() == HiveParser.TOK_TABLE_OR_COL && !hasAsClause && !inputRR.getIsExprResolver() && SemanticAnalyzer.isRegex( SemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()), conf)) { // In case the expression is a regex COL. // This can only happen without AS clause // We don't allow this for ExprResolver - the Group By case pos = genColListRegex(SemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getText()), null, expr, col_list, excludedColumns, inputRR, starRR, pos, out_rwsch, qb.getAliases(), true); } else if (expr.getType() == HiveParser.DOT && expr.getChild(0).getType() == HiveParser.TOK_TABLE_OR_COL && inputRR.hasTableAlias(SemanticAnalyzer.unescapeIdentifier(expr.getChild(0) .getChild(0).getText().toLowerCase())) && !hasAsClause && !inputRR.getIsExprResolver() && SemanticAnalyzer.isRegex( SemanticAnalyzer.unescapeIdentifier(expr.getChild(1).getText()), conf)) { // In case the expression is TABLE.COL (col can be regex). // This can only happen without AS clause // We don't allow this for ExprResolver - the Group By case pos = genColListRegex( SemanticAnalyzer.unescapeIdentifier(expr.getChild(1).getText()), SemanticAnalyzer.unescapeIdentifier(expr.getChild(0).getChild(0).getText() .toLowerCase()), expr, col_list, excludedColumns, inputRR, starRR, pos, out_rwsch, qb.getAliases(), true); } else if (ParseUtils.containsTokenOfType(expr, HiveParser.TOK_FUNCTIONDI) && !(srcRel instanceof HiveAggregate)) { // Likely a malformed query eg, select hash(distinct c1) from t1; throw new CalciteSemanticException("Distinct without an aggregation.", UnsupportedFeature.Distinct_without_an_aggreggation); } else { // Case when this is an expression TypeCheckCtx tcCtx = new TypeCheckCtx(inputRR); // We allow stateful functions in the SELECT list (but nowhere else) tcCtx.setAllowStatefulFunctions(true); if (!qbp.getDestToGroupBy().isEmpty()) { // Special handling of grouping function expr = rewriteGroupingFunctionAST(getGroupByForClause(qbp, selClauseName), expr, !cubeRollupGrpSetPresent); } ExprNodeDesc exp = genExprNodeDesc(expr, inputRR, tcCtx); String recommended = recommendName(exp, colAlias); if (recommended != null && out_rwsch.get(null, recommended) == null) { colAlias = recommended; } col_list.add(exp); ColumnInfo colInfo = new ColumnInfo(SemanticAnalyzer.getColumnInternalName(pos), exp.getWritableObjectInspector(), tabAlias, false); colInfo.setSkewedCol((exp instanceof ExprNodeColumnDesc) ? ((ExprNodeColumnDesc) exp) .isSkewedCol() : false); out_rwsch.put(tabAlias, colAlias, colInfo); pos = Integer.valueOf(pos.intValue() + 1); } } } selectStar = selectStar && exprList.getChildCount() == posn + 1; // 7. Convert Hive projections to Calcite List<RexNode> calciteColLst = new ArrayList<RexNode>(); RexNodeConverter rexNodeConv = new RexNodeConverter(cluster, srcRel.getRowType(), outerNameToPosMap, buildHiveColNameToInputPosMap(col_list, inputRR), relToHiveRR.get(srcRel), outerRR, 0, false, subqueryId); for (ExprNodeDesc colExpr : col_list) { calciteColLst.add(rexNodeConv.convert(colExpr)); } // 8. Build Calcite Rel RelNode outputRel = null; if (genericUDTF != null) { // The basic idea for CBO support of UDTF is to treat UDTF as a special // project. // In AST return path, as we just need to generate a SEL_EXPR, we just // need to remember the expressions and the alias. // In OP return path, we need to generate a SEL and then a UDTF // following old semantic analyzer. outputRel = genUDTFPlan(genericUDTF, genericUDTFName, udtfTableAlias, udtfColAliases, qb, calciteColLst, out_rwsch, srcRel); } else { String dest = qbp.getClauseNames().iterator().next(); ASTNode obAST = qbp.getOrderByForClause(dest); RowResolver originalRR = null; // We only support limited unselected column following by order by. // TODO: support unselected columns in genericUDTF and windowing functions. // We examine the order by in this query block and adds in column needed // by order by in select list. if (obAST != null && !(selForWindow != null && selExprList.getToken().getType() == HiveParser.TOK_SELECTDI) && !isAllColRefRewrite) { // 1. OB Expr sanity test // in strict mode, in the presence of order by, limit must be // specified Integer limit = qb.getParseInfo().getDestLimit(dest); if (limit == null) { String error = StrictChecks.checkNoLimit(conf); if (error != null) { throw new SemanticException(SemanticAnalyzer.generateErrorMessage(obAST, error)); } } List<RexNode> originalInputRefs = Lists.transform(srcRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() { @Override public RexNode apply(RelDataTypeField input) { return new RexInputRef(input.getIndex(), input.getType()); } }); originalRR = out_rwsch.duplicate(); for (int i = 0; i < inputRR.getColumnInfos().size(); i++) { ColumnInfo colInfo = new ColumnInfo(inputRR.getColumnInfos().get(i)); String internalName = SemanticAnalyzer.getColumnInternalName(out_rwsch.getColumnInfos() .size() + i); colInfo.setInternalName(internalName); // if there is any confict, then we do not generate it in the new select // otherwise, we add it into the calciteColLst and generate the new select if (!out_rwsch.putWithCheck(colInfo.getTabAlias(), colInfo.getAlias(), internalName, colInfo)) { LOG.trace("Column already present in RR. skipping."); } else { calciteColLst.add(originalInputRefs.get(i)); } } outputRel = genSelectRelNode(calciteColLst, out_rwsch, srcRel); // outputRel is the generated augmented select with extra unselected // columns, and originalRR is the original generated select return new Pair<RelNode, RowResolver>(outputRel, originalRR); } else { outputRel = genSelectRelNode(calciteColLst, out_rwsch, srcRel); } } // 9. Handle select distinct as GBY if there exist windowing functions if (selForWindow != null && selExprList.getToken().getType() == HiveParser.TOK_SELECTDI) { ImmutableBitSet groupSet = ImmutableBitSet.range(outputRel.getRowType().getFieldList().size()); outputRel = new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), outputRel, groupSet, null, new ArrayList<AggregateCall>()); RowResolver groupByOutputRowResolver = new RowResolver(); for (int i = 0; i < out_rwsch.getColumnInfos().size(); i++) { ColumnInfo colInfo = out_rwsch.getColumnInfos().get(i); ColumnInfo newColInfo = new ColumnInfo(colInfo.getInternalName(), colInfo.getType(), colInfo.getTabAlias(), colInfo.getIsVirtualCol()); groupByOutputRowResolver.put(colInfo.getTabAlias(), colInfo.getAlias(), newColInfo); } relToHiveColNameCalcitePosMap.put(outputRel, buildHiveToCalciteColumnMap(groupByOutputRowResolver, outputRel)); this.relToHiveRR.put(outputRel, groupByOutputRowResolver); } inputRR.setCheckForAmbiguity(false); return new Pair<RelNode, RowResolver>(outputRel, null); } private RelNode genUDTFPlan(GenericUDTF genericUDTF, String genericUDTFName, String outputTableAlias, ArrayList<String> colAliases, QB qb, List<RexNode> selectColLst, RowResolver selectRR, RelNode input) throws SemanticException { // No GROUP BY / DISTRIBUTE BY / SORT BY / CLUSTER BY QBParseInfo qbp = qb.getParseInfo(); if (!qbp.getDestToGroupBy().isEmpty()) { throw new SemanticException(ErrorMsg.UDTF_NO_GROUP_BY.getMsg()); } if (!qbp.getDestToDistributeBy().isEmpty()) { throw new SemanticException(ErrorMsg.UDTF_NO_DISTRIBUTE_BY.getMsg()); } if (!qbp.getDestToSortBy().isEmpty()) { throw new SemanticException(ErrorMsg.UDTF_NO_SORT_BY.getMsg()); } if (!qbp.getDestToClusterBy().isEmpty()) { throw new SemanticException(ErrorMsg.UDTF_NO_CLUSTER_BY.getMsg()); } if (!qbp.getAliasToLateralViews().isEmpty()) { throw new SemanticException(ErrorMsg.UDTF_LATERAL_VIEW.getMsg()); } LOG.debug("Table alias: " + outputTableAlias + " Col aliases: " + colAliases); // Use the RowResolver from the input operator to generate a input // ObjectInspector that can be used to initialize the UDTF. Then, the // resulting output object inspector can be used to make the RowResolver // for the UDTF operator ArrayList<ColumnInfo> inputCols = selectRR.getColumnInfos(); // Create the object inspector for the input columns and initialize the // UDTF ArrayList<String> colNames = new ArrayList<String>(); ObjectInspector[] colOIs = new ObjectInspector[inputCols.size()]; for (int i = 0; i < inputCols.size(); i++) { colNames.add(inputCols.get(i).getInternalName()); colOIs[i] = inputCols.get(i).getObjectInspector(); } StandardStructObjectInspector rowOI = ObjectInspectorFactory .getStandardStructObjectInspector(colNames, Arrays.asList(colOIs)); StructObjectInspector outputOI = genericUDTF.initialize(rowOI); int numUdtfCols = outputOI.getAllStructFieldRefs().size(); if (colAliases.isEmpty()) { // user did not specfied alias names, infer names from outputOI for (StructField field : outputOI.getAllStructFieldRefs()) { colAliases.add(field.getFieldName()); } } // Make sure that the number of column aliases in the AS clause matches // the number of columns output by the UDTF int numSuppliedAliases = colAliases.size(); if (numUdtfCols != numSuppliedAliases) { throw new SemanticException(ErrorMsg.UDTF_ALIAS_MISMATCH.getMsg("expected " + numUdtfCols + " aliases " + "but got " + numSuppliedAliases)); } // Generate the output column info's / row resolver using internal names. ArrayList<ColumnInfo> udtfCols = new ArrayList<ColumnInfo>(); Iterator<String> colAliasesIter = colAliases.iterator(); for (StructField sf : outputOI.getAllStructFieldRefs()) { String colAlias = colAliasesIter.next(); assert (colAlias != null); // Since the UDTF operator feeds into a LVJ operator that will rename // all the internal names, we can just use field name from the UDTF's OI // as the internal name ColumnInfo col = new ColumnInfo(sf.getFieldName(), TypeInfoUtils.getTypeInfoFromObjectInspector(sf.getFieldObjectInspector()), outputTableAlias, false); udtfCols.add(col); } // Create the row resolver for this operator from the output columns RowResolver out_rwsch = new RowResolver(); for (int i = 0; i < udtfCols.size(); i++) { out_rwsch.put(outputTableAlias, colAliases.get(i), udtfCols.get(i)); } // Add the UDTFOperator to the operator DAG RelTraitSet traitSet = TraitsUtil.getDefaultTraitSet(cluster); // Build row type from field <type, name> RelDataType retType = TypeConverter.getType(cluster, out_rwsch, null); Builder<RelDataType> argTypeBldr = ImmutableList.<RelDataType> builder(); RexBuilder rexBuilder = cluster.getRexBuilder(); RelDataTypeFactory dtFactory = rexBuilder.getTypeFactory(); RowSchema rs = selectRR.getRowSchema(); for (ColumnInfo ci : rs.getSignature()) { argTypeBldr.add(TypeConverter.convert(ci.getType(), dtFactory)); } SqlOperator calciteOp = SqlFunctionConverter.getCalciteOperator(genericUDTFName, genericUDTF, argTypeBldr.build(), retType); // Hive UDTF only has a single input List<RelNode> list = new ArrayList<>(); list.add(input); RexNode rexNode = cluster.getRexBuilder().makeCall(calciteOp, selectColLst); RelNode udtf = HiveTableFunctionScan.create(cluster, traitSet, list, rexNode, null, retType, null); // Add new rel & its RR to the maps relToHiveColNameCalcitePosMap.put(udtf, this.buildHiveToCalciteColumnMap(out_rwsch, udtf)); relToHiveRR.put(udtf, out_rwsch); return udtf; } private RelNode genLogicalPlan(QBExpr qbexpr) throws SemanticException { switch (qbexpr.getOpcode()) { case NULLOP: return genLogicalPlan(qbexpr.getQB(), false, null, null); case UNION: case INTERSECT: case INTERSECTALL: case EXCEPT: case EXCEPTALL: RelNode qbexpr1Ops = genLogicalPlan(qbexpr.getQBExpr1()); RelNode qbexpr2Ops = genLogicalPlan(qbexpr.getQBExpr2()); return genSetOpLogicalPlan(qbexpr.getOpcode(), qbexpr.getAlias(), qbexpr.getQBExpr1() .getAlias(), qbexpr1Ops, qbexpr.getQBExpr2().getAlias(), qbexpr2Ops); default: return null; } } private RelNode genLogicalPlan(QB qb, boolean outerMostQB, ImmutableMap<String, Integer> outerNameToPosMap, RowResolver outerRR) throws SemanticException { RelNode srcRel = null; RelNode filterRel = null; RelNode gbRel = null; RelNode gbHavingRel = null; RelNode selectRel = null; RelNode obRel = null; RelNode limitRel = null; // First generate all the opInfos for the elements in the from clause Map<String, RelNode> aliasToRel = new HashMap<String, RelNode>(); // 0. Check if we can handle the SubQuery; // canHandleQbForCbo returns null if the query can be handled. String reason = canHandleQbForCbo(queryProperties, conf, false, LOG.isDebugEnabled(), qb); if (reason != null) { String msg = "CBO can not handle Sub Query"; if (LOG.isDebugEnabled()) { LOG.debug(msg + " because it: " + reason); } throw new CalciteSemanticException(msg, UnsupportedFeature.Subquery); } // 1. Build Rel For Src (SubQuery, TS, Join) // 1.1. Recurse over the subqueries to fill the subquery part of the plan for (String subqAlias : qb.getSubqAliases()) { QBExpr qbexpr = qb.getSubqForAlias(subqAlias); RelNode relNode = genLogicalPlan(qbexpr); aliasToRel.put(subqAlias, relNode); if (qb.getViewToTabSchema().containsKey(subqAlias)) { if (relNode instanceof HiveProject) { if (this.viewProjectToTableSchema == null) { this.viewProjectToTableSchema = new LinkedHashMap<>(); } viewProjectToTableSchema.put((HiveProject) relNode, qb.getViewToTabSchema().get(subqAlias)); } else { throw new SemanticException("View " + subqAlias + " is corresponding to " + relNode.toString() + ", rather than a HiveProject."); } } } // 1.2 Recurse over all the source tables for (String tableAlias : qb.getTabAliases()) { RelNode op = genTableLogicalPlan(tableAlias, qb); aliasToRel.put(tableAlias, op); } if (aliasToRel.isEmpty()) { // // This may happen for queries like select 1; (no source table) qb.getMetaData().setSrcForAlias(DUMMY_TABLE, getDummyTable()); qb.addAlias(DUMMY_TABLE); qb.setTabAlias(DUMMY_TABLE, DUMMY_TABLE); RelNode op = genTableLogicalPlan(DUMMY_TABLE, qb); aliasToRel.put(DUMMY_TABLE, op); } // 1.3 process join // 1.3.1 process hints setQueryHints(qb); // 1.3.2 process the actual join if (qb.getParseInfo().getJoinExpr() != null) { srcRel = genJoinLogicalPlan(qb.getParseInfo().getJoinExpr(), aliasToRel); } else { // If no join then there should only be either 1 TS or 1 SubQuery Map.Entry<String, RelNode> uniqueAliasToRel = aliasToRel.entrySet().iterator().next(); srcRel = uniqueAliasToRel.getValue(); // If it contains a LV List<ASTNode> lateralViews = getQBParseInfo(qb).getAliasToLateralViews().get(uniqueAliasToRel.getKey()); if (lateralViews != null) { srcRel = genLateralViewPlans(Iterables.getLast(lateralViews), aliasToRel); } } // 2. Build Rel for where Clause filterRel = genFilterLogicalPlan(qb, srcRel, aliasToRel, outerNameToPosMap, outerRR, false); srcRel = (filterRel == null) ? srcRel : filterRel; RelNode starSrcRel = srcRel; // 3. Build Rel for GB Clause gbRel = genGBLogicalPlan(qb, srcRel); srcRel = (gbRel == null) ? srcRel : gbRel; // 4. Build Rel for GB Having Clause gbHavingRel = genGBHavingLogicalPlan(qb, srcRel, aliasToRel); srcRel = (gbHavingRel == null) ? srcRel : gbHavingRel; // 5. Build Rel for Select Clause Pair<RelNode, RowResolver> selPair = genSelectLogicalPlan(qb, srcRel, starSrcRel, outerNameToPosMap, outerRR, false); selectRel = selPair.getKey(); srcRel = (selectRel == null) ? srcRel : selectRel; // 6. Build Rel for OB Clause obRel = genOBLogicalPlan(qb, selPair, outerMostQB); srcRel = (obRel == null) ? srcRel : obRel; // 7. Build Rel for Limit Clause limitRel = genLimitLogicalPlan(qb, srcRel); srcRel = (limitRel == null) ? srcRel : limitRel; // 8. Incase this QB corresponds to subquery then modify its RR to point // to subquery alias. if (qb.getParseInfo().getAlias() != null) { RowResolver rr = this.relToHiveRR.get(srcRel); RowResolver newRR = new RowResolver(); String alias = qb.getParseInfo().getAlias(); for (ColumnInfo colInfo : rr.getColumnInfos()) { String name = colInfo.getInternalName(); String[] tmp = rr.reverseLookup(name); if ("".equals(tmp[0]) || tmp[1] == null) { // ast expression is not a valid column name for table tmp[1] = colInfo.getInternalName(); } ColumnInfo newCi = new ColumnInfo(colInfo); newCi.setTabAlias(alias); newRR.putWithCheck(alias, tmp[1], colInfo.getInternalName(), newCi); } relToHiveRR.put(srcRel, newRR); relToHiveColNameCalcitePosMap.put(srcRel, buildHiveToCalciteColumnMap(newRR, srcRel)); } if (LOG.isDebugEnabled()) { LOG.debug("Created Plan for Query Block " + qb.getId()); } setQB(qb); return srcRel; } private RelNode genGBHavingLogicalPlan(QB qb, RelNode srcRel, Map<String, RelNode> aliasToRel) throws SemanticException { RelNode gbFilter = null; QBParseInfo qbp = getQBParseInfo(qb); String destClauseName = qbp.getClauseNames().iterator().next(); ASTNode havingClause = qbp.getHavingForClause(qbp.getClauseNames().iterator().next()); if (havingClause != null) { if (!(srcRel instanceof HiveAggregate)) { // ill-formed query like select * from t1 having c1 > 0; throw new CalciteSemanticException("Having clause without any group-by.", UnsupportedFeature.Having_clause_without_any_groupby); } ASTNode targetNode = (ASTNode) havingClause.getChild(0); validateNoHavingReferenceToAlias(qb, targetNode); if (!qbp.getDestToGroupBy().isEmpty()) { final boolean cubeRollupGrpSetPresent = (!qbp.getDestRollups().isEmpty() || !qbp.getDestGroupingSets().isEmpty() || !qbp.getDestCubes().isEmpty()); // Special handling of grouping function targetNode = rewriteGroupingFunctionAST(getGroupByForClause(qbp, destClauseName), targetNode, !cubeRollupGrpSetPresent); } gbFilter = genFilterRelNode(qb, targetNode, srcRel, aliasToRel, null, null, true); } return gbFilter; } /* * Bail if having clause uses Select Expression aliases for Aggregation * expressions. We could do what Hive does. But this is non standard * behavior. Making sure this doesn't cause issues when translating through * Calcite is not worth it. */ private void validateNoHavingReferenceToAlias(QB qb, ASTNode havingExpr) throws CalciteSemanticException { QBParseInfo qbPI = qb.getParseInfo(); Map<ASTNode, String> exprToAlias = qbPI.getAllExprToColumnAlias(); /* * a mouthful, but safe: - a QB is guaranteed to have atleast 1 * destination - we don't support multi insert, so picking the first dest. */ Set<String> aggExprs = qbPI.getDestToAggregationExprs().values().iterator().next().keySet(); for (Map.Entry<ASTNode, String> selExpr : exprToAlias.entrySet()) { ASTNode selAST = selExpr.getKey(); if (!aggExprs.contains(selAST.toStringTree().toLowerCase())) { continue; } final String aliasToCheck = selExpr.getValue(); final Set<Object> aliasReferences = new HashSet<Object>(); TreeVisitorAction action = new TreeVisitorAction() { @Override public Object pre(Object t) { if (ParseDriver.adaptor.getType(t) == HiveParser.TOK_TABLE_OR_COL) { Object c = ParseDriver.adaptor.getChild(t, 0); if (c != null && ParseDriver.adaptor.getType(c) == HiveParser.Identifier && ParseDriver.adaptor.getText(c).equals(aliasToCheck)) { aliasReferences.add(t); } } return t; } @Override public Object post(Object t) { return t; } }; new TreeVisitor(ParseDriver.adaptor).visit(havingExpr, action); if (aliasReferences.size() > 0) { String havingClause = ctx.getTokenRewriteStream().toString( havingExpr.getTokenStartIndex(), havingExpr.getTokenStopIndex()); String msg = String.format("Encountered Select alias '%s' in having clause '%s'" + " This non standard behavior is not supported with cbo on." + " Turn off cbo for these queries.", aliasToCheck, havingClause); LOG.debug(msg); throw new CalciteSemanticException(msg, UnsupportedFeature.Select_alias_in_having_clause); } } } private ImmutableMap<String, Integer> buildHiveToCalciteColumnMap(RowResolver rr, RelNode rNode) { ImmutableMap.Builder<String, Integer> b = new ImmutableMap.Builder<String, Integer>(); for (ColumnInfo ci : rr.getRowSchema().getSignature()) { b.put(ci.getInternalName(), rr.getPosition(ci.getInternalName())); } return b.build(); } private ImmutableMap<String, Integer> buildHiveColNameToInputPosMap( List<ExprNodeDesc> col_list, RowResolver inputRR) { // Build a map of Hive column Names (ExprNodeColumnDesc Name) // to the positions of those projections in the input Map<Integer, ExprNodeDesc> hashCodeTocolumnDescMap = new HashMap<Integer, ExprNodeDesc>(); ExprNodeDescUtils.getExprNodeColumnDesc(col_list, hashCodeTocolumnDescMap); ImmutableMap.Builder<String, Integer> hiveColNameToInputPosMapBuilder = new ImmutableMap.Builder<String, Integer>(); String exprNodecolName; for (ExprNodeDesc exprDesc : hashCodeTocolumnDescMap.values()) { exprNodecolName = ((ExprNodeColumnDesc) exprDesc).getColumn(); hiveColNameToInputPosMapBuilder.put(exprNodecolName, inputRR.getPosition(exprNodecolName)); } return hiveColNameToInputPosMapBuilder.build(); } private QBParseInfo getQBParseInfo(QB qb) throws CalciteSemanticException { return qb.getParseInfo(); } private List<String> getTabAliases(RowResolver inputRR) { List<String> tabAliases = new ArrayList<String>(); // TODO: this should be // unique for (ColumnInfo ci : inputRR.getColumnInfos()) { tabAliases.add(ci.getTabAlias()); } return tabAliases; } } private enum TableType { DRUID, NATIVE, JDBC } }
package edu.hm.hafner.analysis.parser.checkstyle; import java.io.IOException; import java.io.Reader; import org.apache.commons.digester3.Digester; import org.apache.commons.lang3.StringUtils; import org.xml.sax.SAXException; import edu.hm.hafner.analysis.IssueBuilder; import edu.hm.hafner.analysis.IssueParser; import edu.hm.hafner.analysis.ParsingException; import edu.hm.hafner.analysis.ReaderFactory; import edu.hm.hafner.analysis.Report; import edu.hm.hafner.analysis.SecureDigester; import edu.umd.cs.findbugs.annotations.Nullable; /** * A parser for Checkstyle XML files. * * @author Ullrich Hafner */ public class CheckStyleParser extends IssueParser { private static final long serialVersionUID = -3187275729854832128L; @Override public Report parse(final ReaderFactory readerFactory) throws ParsingException { Digester digester = new SecureDigester(CheckStyleParser.class); String rootXPath = "checkstyle"; digester.addObjectCreate(rootXPath, CheckStyle.class); digester.addSetProperties(rootXPath); String fileXPath = "checkstyle/file"; digester.addObjectCreate(fileXPath, File.class); digester.addSetProperties(fileXPath); digester.addSetNext(fileXPath, "addFile", File.class.getName()); String bugXPath = "checkstyle/file/error"; digester.addObjectCreate(bugXPath, Error.class); digester.addSetProperties(bugXPath); digester.addSetNext(bugXPath, "addError", Error.class.getName()); try (Reader reader = readerFactory.create()) { CheckStyle checkStyle = digester.parse(reader); if (checkStyle == null) { throw new ParsingException("Input stream is not a Checkstyle file."); } return convert(checkStyle); } catch (IOException | SAXException exception) { throw new ParsingException(exception); } } /** * Converts the internal structure to the annotations API. * * @param collection * the internal maven module * * @return a maven module of the annotations API */ private Report convert(final CheckStyle collection) { Report report = new Report(); for (File file : collection.getFiles()) { if (isValidWarning(file)) { for (Error error : file.getErrors()) { IssueBuilder builder = new IssueBuilder(); builder.guessSeverity(error.getSeverity()); String source = error.getSource(); builder.setType(getType(source)); builder.setCategory(getCategory(source)); builder.setMessage(error.getMessage()); builder.setLineStart(error.getLine()); builder.setFileName(file.getName()); builder.setColumnStart(error.getColumn()); report.add(builder.build()); } } } return report; } private String getCategory(@Nullable final String source) { return StringUtils.capitalize(getType(StringUtils.substringBeforeLast(source, "."))); } private String getType(@Nullable final String source) { return StringUtils.substringAfterLast(source, "."); } /** * Returns {@code true} if this warning is valid or {@code false} if the warning can't be processed by the * checkstyle plug-in. * * @param file * the file to check * * @return {@code true} if this warning is valid */ private boolean isValidWarning(final File file) { return !StringUtils.endsWith(file.getName(), "package.html"); } }
package me.kevinnovak.voidteleport; import java.util.ArrayList; import java.util.List; import org.bukkit.ChatColor; public class ColorConverter { public ColorConverter() { } String convert(String toConvert) { return ChatColor.translateAlternateColorCodes('&', toConvert); } List<String> convert(List<String> toConvert) { List<String> translatedColors = new ArrayList<String>(); for (String stringToTranslate: toConvert){ translatedColors.add(ChatColor.translateAlternateColorCodes('&',stringToTranslate)); } return translatedColors; } }
//package xxx.yyy.zzz.nanotificationcenter; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import android.content.ContentValues; public enum NANotificationCenter { //Only instance INSTANCE; // map of notifiable objects over a notification (key) private final Map<String, HashSet<NANotifiable>> notifiable; /* * Initializes the NotificationCenter. */ NANotificationCenter() { notifiable = new HashMap<String, HashSet<NANotifiable>>(); } /* * Adds a notifiable observer forNotificationName. * */ public void addNotifiable(NANotifiable observer, String forNotificatioName ) { if (notifiable.containsKey(forNotificatioName)) { notifiable.get(forNotificatioName).add(observer); } else { HashSet<NANotifiable> thisNotificationObservers = new HashSet<NANotifiable>(); thisNotificationObservers.add(observer); notifiable.put(forNotificatioName, thisNotificationObservers); } } /* * Removes a notifiable observer forNotificationName. * */ public void removeNotifiable(NANotifiable observer, String forNotificatioName ) { if (notifiable.containsKey(forNotificatioName)) { notifiable.get(forNotificatioName).remove(observer); } } /* * Method called to notify observers for notificationName. */ public void postNotification ( String notificationName, ContentValues message ) { for ( NANotifiable notifiableElement : notifiable.get(notificationName) ) { notifiableElement.getNotification(notificationName, message); } } }
/* * Copyright 2000-2021 Vaadin Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.vaadin.flow.webcomponent; import org.junit.Assert; import org.junit.Test; import org.openqa.selenium.By; import com.vaadin.flow.testutil.ChromeBrowserTest; public class DefaultValueInitializationIT extends ChromeBrowserTest implements HasById { @Override protected String getTestPath() { return Constants.PAGE_CONTEXT + "/defaultValue.html"; } @Test public void defaultValues_areSetToCorrectValues_withCorrectUpdateCounts() { open(); /* * The page contains three instances of * DefaultValueInitializationComponents (default-value-init). id:init-1 * will have the default value set in Java, id:init-2 will have default * value set by an attribute, and id:init-3 will have a value updated by * property change */ // Java default Assert.assertEquals("Java default value is correct", "1", value("init-1")); Assert.assertEquals("Java default's counter is correct", "1", counter("init-1")); // JS default Assert.assertEquals("JS default value is correct", "2", value("init-2")); Assert.assertEquals("JS default's counter is correct", "1", counter("init-2")); // Updated property Assert.assertEquals("Property updated default value is correct", "3", value("init-3")); Assert.assertEquals("Property updated default's counter is correct", "1", counter("init-3")); // Verify that counters actually update by clicking a button which // updates all the properties findElement(By.id("update-properties")).click(); // Java default Assert.assertEquals("Java default's value is changed", "4", value("init-1")); Assert.assertEquals("Java default's counter increases", "2", counter("init-1")); // JS default Assert.assertEquals("JS default's value is changed", "4", value("init-2")); Assert.assertEquals("JS default's counter increases", "2", counter("init-2")); // Updated property Assert.assertEquals("Property updated default's value is changed", "4", value("init-3")); Assert.assertEquals("Property updated default's counter increases", "2", counter("init-3")); } private String value(String componentId) { return byId(componentId, "value").getText(); } private String counter(String componentId) { return byId(componentId, "counter").getText(); } }
/* * The MIT License * Copyright © 2014-2021 Ilkka Seppälä * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.iluwatar.featuretoggle; import com.iluwatar.featuretoggle.pattern.Service; import com.iluwatar.featuretoggle.pattern.propertiesversion.PropertiesFeatureToggleVersion; import com.iluwatar.featuretoggle.pattern.tieredversion.TieredFeatureToggleVersion; import com.iluwatar.featuretoggle.user.User; import com.iluwatar.featuretoggle.user.UserGroup; import lombok.extern.slf4j.Slf4j; import java.util.Properties; /** * The Feature Toggle pattern allows for complete code executions to be turned on or off with ease. * This allows features to be controlled by either dynamic methods just as {@link User} information * or by {@link Properties}. In the App below there are two examples. Firstly the {@link Properties} * version of the feature toggle, where the enhanced version of the welcome message which is * personalised is turned either on or off at instance creation. This method is not as dynamic as * the {@link User} driven version where the feature of the personalised welcome message is * dependant on the {@link UserGroup} the {@link User} is in. So if the user is a memeber of the * {@link UserGroup#isPaid(User)} then they get an ehanced version of the welcome message. * * <p>Note that this pattern can easily introduce code complexity, and if not kept in check can * result in redundant unmaintained code within the codebase. */ @Slf4j public class App { /** * Block 1 shows the {@link PropertiesFeatureToggleVersion} being run with {@link Properties} * setting the feature toggle to enabled. * * <p>Block 2 shows the {@link PropertiesFeatureToggleVersion} being run with {@link Properties} * setting the feature toggle to disabled. Notice the difference with the printed welcome message * the username is not included. * * <p>Block 3 shows the {@link * com.iluwatar.featuretoggle.pattern.tieredversion.TieredFeatureToggleVersion} being set up with * two users on who is on the free level, while the other is on the paid level. When the {@link * Service#getWelcomeMessage(User)} is called with the paid {@link User} note that the welcome * message contains their username, while the same service call with the free tier user is more * generic. No username is printed. * * @see User * @see UserGroup * @see Service * @see PropertiesFeatureToggleVersion * @see com.iluwatar.featuretoggle.pattern.tieredversion.TieredFeatureToggleVersion */ public static void main(String[] args) { final var properties = new Properties(); properties.put("enhancedWelcome", true); var service = new PropertiesFeatureToggleVersion(properties); final var welcomeMessage = service.getWelcomeMessage(new User("Jamie No Code")); LOGGER.info(welcomeMessage); // --------------------------------------------- final var turnedOff = new Properties(); turnedOff.put("enhancedWelcome", false); var turnedOffService = new PropertiesFeatureToggleVersion(turnedOff); final var welcomeMessageturnedOff = turnedOffService.getWelcomeMessage(new User("Jamie No Code")); LOGGER.info(welcomeMessageturnedOff); // -------------------------------------------- var service2 = new TieredFeatureToggleVersion(); final var paidUser = new User("Jamie Coder"); final var freeUser = new User("Alan Defect"); UserGroup.addUserToPaidGroup(paidUser); UserGroup.addUserToFreeGroup(freeUser); final var welcomeMessagePaidUser = service2.getWelcomeMessage(paidUser); final var welcomeMessageFreeUser = service2.getWelcomeMessage(freeUser); LOGGER.info(welcomeMessageFreeUser); LOGGER.info(welcomeMessagePaidUser); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package freemarker.core; /** * Represents the CSS output format (MIME type "text/css", name "CSS"). This format doesn't support escaping. * * @since 2.3.24 */ public class CSSOutputFormat extends OutputFormat { /** * The only instance (singleton) of this {@link OutputFormat}. */ public static final CSSOutputFormat INSTANCE = new CSSOutputFormat(); private CSSOutputFormat() { // Only to decrease visibility } @Override public String getName() { return "CSS"; } @Override public String getMimeType() { return "text/css"; } @Override public boolean isOutputFormatMixingAllowed() { return false; } }
/** * iBizSys 5.0 机器人生产代码(不要直接修改当前代码) * http://www.ibizsys.net */ package com.sa.unip.app.srv.wf.ctrlmodel; import java.util.ArrayList; import java.util.List; import net.ibizsys.paas.util.StringHelper; import net.ibizsys.paas.web.AjaxActionResult; import net.ibizsys.paas.web.WebContext; import net.ibizsys.paas.util.GlobalContext; import net.ibizsys.paas.core.IDEDataSetCond; import net.ibizsys.paas.core.DEDataSetCond; import net.ibizsys.paas.core.DEDataSetFetchContext; import net.ibizsys.paas.db.DBFetchResult; import net.ibizsys.paas.web.WebContext; import net.ibizsys.paas.util.DataTypeHelper; import net.ibizsys.paas.data.IDataObject; import net.ibizsys.paas.datamodel.DataItemModel; import net.ibizsys.paas.datamodel.DataItemParamModel; import net.ibizsys.paas.entity.EntityFieldError; import net.ibizsys.paas.entity.EntityError; import net.ibizsys.paas.demodel.IDataEntityModel; import net.ibizsys.paas.demodel.DEModelGlobal; import net.ibizsys.paas.control.drctrl.DRCtrlItem; import net.ibizsys.paas.control.drctrl.DRCtrlRootItem; /** * 实体[工作流候选用户策略]数据关系栏[drbar]部件模型 */ public class WFUCPolicyDefaultDRBarModel extends net.ibizsys.paas.ctrlmodel.DRBarModelBase { public WFUCPolicyDefaultDRBarModel() { super(); } @Override protected void onInit() throws Exception { super.onInit(); } private net.ibizsys.psrt.srv.wf.demodel.WFUCPolicyDEModel wFUCPolicyDEModel; protected net.ibizsys.psrt.srv.wf.demodel.WFUCPolicyDEModel getWFUCPolicyDEModel() { if(this.wFUCPolicyDEModel==null) { try { this.wFUCPolicyDEModel = (net.ibizsys.psrt.srv.wf.demodel.WFUCPolicyDEModel)DEModelGlobal.getDEModel("net.ibizsys.psrt.srv.wf.demodel.WFUCPolicyDEModel"); } catch(Exception ex) { } } return this.wFUCPolicyDEModel; } @Override public IDataEntityModel getDEModel() { return this.getWFUCPolicyDEModel(); } /** * 准备数据关系根节点 * @param drCtrlRootItem * @throws Exception */ @Override protected void onPrepareRootItem(DRCtrlRootItem drCtrlRootItem) throws Exception { //添加 工作流候选用户策略 DRCtrlItem drCtrlItem0 = drCtrlRootItem.addItem("form",""); drCtrlItem0.setText("工作流候选用户策略"); drCtrlItem0.setDRViewId(""); drCtrlItem0.setExpanded(true); drCtrlItem0.setTextLanResTag(""); drCtrlItem0.setIconPath(""); drCtrlItem0.setIconCls(""); drCtrlItem0.setEnableMode("ALL"); } }
package com.clanassist.model.player; import com.clanassist.model.search.results.PlayerResult; import java.util.Map; /** * Created by Harrison on 4/15/2015. */ public class PlayerGraphs { private Map<String, Double> battlesPerClass; // pie private Map<String, Double> averageWN8PerClass; private Map<String, Integer> tanksPerClass; private Map<Integer, Double> battlesPerTier; // bar private Map<Integer, Double> averageWN8PerTier; // bar private Map<Integer, Integer> tanksPerTier; private Map<String, Double> battlesPerNation; // hBar private Map<String, Double> averageWN8PerNation; private Map<String, Integer> tanksPerNation; private Map<String, Integer> bestWN8PerNation; private Map<String, Integer> bestWN8PerNationID; private Map<Integer, Integer> bestWN8PerTier; private Map<Integer, Integer> bestWN8PerTierID; private Map<String, Integer> bestWN8PerClassType; private Map<String, Integer> bestWN8PerClassTypeID; @Override public String toString() { return "PlayerGraphs{" + "battlesPerClass=" + battlesPerClass + ", averageWN8PerClass=" + averageWN8PerClass + ", battlesPerTier=" + battlesPerTier + ", averageWN8PerTier=" + averageWN8PerTier + ", battlesPerNation=" + battlesPerNation + ", averageWN8PerNation=" + averageWN8PerNation + ", bestWN8PerNation=" + bestWN8PerNation + ", bestWN8PerNationID=" + bestWN8PerNationID + ", bestWN8PerTier=" + bestWN8PerTier + ", bestWN8PerTierID=" + bestWN8PerTierID + ", bestWN8PerClassType=" + bestWN8PerClassType + ", bestWN8PerClassTypeID=" + bestWN8PerClassTypeID + ", tanksPerClass=" + tanksPerClass + ", tanksPerTier=" + tanksPerTier + ", tanksPerNation=" + tanksPerNation + '}'; } public Map<String, Double> getBattlesPerClass() { return battlesPerClass; } public void setBattlesPerClass(Map<String, Double> battlesPerClass) { this.battlesPerClass = battlesPerClass; } public Map<String, Double> getAverageWN8PerClass() { return averageWN8PerClass; } public void setAverageWN8PerClass(Map<String, Double> averageWN8PerClass) { this.averageWN8PerClass = averageWN8PerClass; } public Map<Integer, Double> getBattlesPerTier() { return battlesPerTier; } public void setBattlesPerTier(Map<Integer, Double> battlesPerTier) { this.battlesPerTier = battlesPerTier; } public Map<Integer, Double> getAverageWN8PerTier() { return averageWN8PerTier; } public void setAverageWN8PerTier(Map<Integer, Double> averageWN8PerTier) { this.averageWN8PerTier = averageWN8PerTier; } public Map<String, Double> getBattlesPerNation() { return battlesPerNation; } public void setBattlesPerNation(Map<String, Double> battlesPerNation) { this.battlesPerNation = battlesPerNation; } public Map<String, Double> getAverageWN8PerNation() { return averageWN8PerNation; } public void setAverageWN8PerNation(Map<String, Double> averageWN8PerNation) { this.averageWN8PerNation = averageWN8PerNation; } public Map<String, Integer> getBestWN8PerNation() { return bestWN8PerNation; } public void setBestWN8PerNation(Map<String, Integer> bestWN8PerNation) { this.bestWN8PerNation = bestWN8PerNation; } public Map<String, Integer> getBestWN8PerNationID() { return bestWN8PerNationID; } public void setBestWN8PerNationID(Map<String, Integer> bestWN8PerNationID) { this.bestWN8PerNationID = bestWN8PerNationID; } public Map<Integer, Integer> getBestWN8PerTier() { return bestWN8PerTier; } public void setBestWN8PerTier(Map<Integer, Integer> bestWN8PerTier) { this.bestWN8PerTier = bestWN8PerTier; } public Map<Integer, Integer> getBestWN8PerTierID() { return bestWN8PerTierID; } public void setBestWN8PerTierID(Map<Integer, Integer> bestWN8PerTierID) { this.bestWN8PerTierID = bestWN8PerTierID; } public Map<String, Integer> getBestWN8PerClassType() { return bestWN8PerClassType; } public void setBestWN8PerClassType(Map<String, Integer> bestWN8PerClassType) { this.bestWN8PerClassType = bestWN8PerClassType; } public Map<String, Integer> getBestWN8PerClassTypeID() { return bestWN8PerClassTypeID; } public void setBestWN8PerClassTypeID(Map<String, Integer> bestWN8PerClassTypeID) { this.bestWN8PerClassTypeID = bestWN8PerClassTypeID; } public Map<String, Integer> getTanksPerClass() { return tanksPerClass; } public void setTanksPerClass(Map<String, Integer> tanksPerClass) { this.tanksPerClass = tanksPerClass; } public Map<Integer, Integer> getTanksPerTier() { return tanksPerTier; } public void setTanksPerTier(Map<Integer, Integer> tanksPerTier) { this.tanksPerTier = tanksPerTier; } public Map<String, Integer> getTanksPerNation() { return tanksPerNation; } public void setTanksPerNation(Map<String, Integer> tanksPerNation) { this.tanksPerNation = tanksPerNation; } }
/** * <copyright> * </copyright> * * $Id$ */ package com.hundsun.ares.studio.usermacro; import org.eclipse.emf.common.util.EList; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>User Macro</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * <ul> * <li>{@link com.hundsun.ares.studio.usermacro.UserMacro#getMacroItems <em>Macro Items</em>}</li> * </ul> * </p> * * @see com.hundsun.ares.studio.usermacro.UserMacroPackage#getUserMacro() * @model * @generated */ public interface UserMacro extends com.hundsun.ares.studio.core.model.JRESResourceInfo { /** * Returns the value of the '<em><b>Macro Items</b></em>' containment reference list. * The list contents are of type {@link com.hundsun.ares.studio.usermacro.UserMacroItem}. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Macro Items</em>' containment reference list isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Macro Items</em>' containment reference list. * @see com.hundsun.ares.studio.usermacro.UserMacroPackage#getUserMacro_MacroItems() * @model containment="true" * @generated */ EList<UserMacroItem> getMacroItems(); } // UserMacro
/** * UserDomainTargetingErrorReason.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.dfp.axis.v201602; public class UserDomainTargetingErrorReason implements java.io.Serializable { private java.lang.String _value_; private static java.util.HashMap _table_ = new java.util.HashMap(); // Constructor protected UserDomainTargetingErrorReason(java.lang.String value) { _value_ = value; _table_.put(_value_,this); } public static final java.lang.String _INVALID_DOMAIN_NAMES = "INVALID_DOMAIN_NAMES"; public static final java.lang.String _UNKNOWN = "UNKNOWN"; public static final UserDomainTargetingErrorReason INVALID_DOMAIN_NAMES = new UserDomainTargetingErrorReason(_INVALID_DOMAIN_NAMES); public static final UserDomainTargetingErrorReason UNKNOWN = new UserDomainTargetingErrorReason(_UNKNOWN); public java.lang.String getValue() { return _value_;} public static UserDomainTargetingErrorReason fromValue(java.lang.String value) throws java.lang.IllegalArgumentException { UserDomainTargetingErrorReason enumeration = (UserDomainTargetingErrorReason) _table_.get(value); if (enumeration==null) throw new java.lang.IllegalArgumentException(); return enumeration; } public static UserDomainTargetingErrorReason fromString(java.lang.String value) throws java.lang.IllegalArgumentException { return fromValue(value); } public boolean equals(java.lang.Object obj) {return (obj == this);} public int hashCode() { return toString().hashCode();} public java.lang.String toString() { return _value_;} public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);} public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumSerializer( _javaType, _xmlType); } public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumDeserializer( _javaType, _xmlType); } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(UserDomainTargetingErrorReason.class); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v201602", "UserDomainTargetingError.Reason")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } }
// ============================================================================ // // Copyright (C) 2006-2018 Talend Inc. - www.talend.com // // This source code is available under agreement available at // %InstallDIR%\features\org.talend.rcp.branding.%PRODUCTNAME%\%PRODUCTNAME%license.txt // // You should have received a copy of the agreement // along with this program; if not, write to Talend SA // 9 rue Pages 92150 Suresnes, France // // ============================================================================ package org.talend.hadoop.distribution.cdh4mr1.test; import org.junit.Test; import org.talend.hadoop.distribution.cdh4mr1.CDH4MR1Distribution; import org.talend.hadoop.distribution.component.HadoopComponent; import org.talend.hadoop.distribution.test.hive.AbstractVersionTest4HiveMetadataHelper; /** * DOC ggu class global comment. Detailled comment */ public class CDH4MR1HiveMetadataHelperTest extends AbstractVersionTest4HiveMetadataHelper { @Override protected Class<? extends HadoopComponent> getHadoopComponentClass() { return CDH4MR1Distribution.class; } @Test public void testHiveMode_CDH4MR1_WithAll() { doTestGetHiveModesDisplay(getDistributionVersion(), new String[0]); } @Test public void testHiveServer_CDH4MR1_WithAll() { doTestGetHiveServersDisplay(getDistributionVersion(), new String[0]); } }
/* * Copyright (c) 2014, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package org.openjdk.bench.java.lang.invoke; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodType; import java.util.concurrent.TimeUnit; /** * Benchmark assesses MethodHandles.catchException() performance */ @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) @State(Scope.Thread) public class MethodHandlesCatchException { /** * Implementation notes: * - emulating instance method handles because of current issue with instance methods * - exception is cached to harness the MH code, not exception instantiation * - measuring two modes: * a) always going through normal code path; * b) always going through exceptional one * - baselines do the same thing in pure Java */ private static final MyException MY_EXCEPTION = new MyException(); private int i1; private int i2; private static MethodHandle methNormal; private static MethodHandle methExceptional; @Setup public void setup() throws Throwable { MethodHandle bodyNormal = MethodHandles.lookup() .findStatic(MethodHandlesCatchException.class, "doWorkNormal", MethodType.methodType(void.class, MethodHandlesCatchException.class)); MethodHandle bodyExceptional = MethodHandles.lookup() .findStatic(MethodHandlesCatchException.class, "doWorkExceptional", MethodType.methodType(void.class, MethodHandlesCatchException.class)); MethodHandle fallback = MethodHandles.lookup() .findStatic(MethodHandlesCatchException.class, "fallback", MethodType.methodType(void.class, MyException.class, MethodHandlesCatchException.class)); methNormal = MethodHandles.catchException(bodyNormal, MyException.class, fallback); methExceptional = MethodHandles.catchException(bodyExceptional, MyException.class, fallback); } @Benchmark public void baselineNormal() { try { doWorkNormal(this); } catch (MyException e) { fallback(e, this); } } @Benchmark public void baselineExceptional() { try { doWorkExceptional(this); } catch (MyException e) { fallback(e, this); } } @Benchmark public void testNormal() throws Throwable { methNormal.invokeExact(this); } @Benchmark public void testExceptional() throws Throwable { methExceptional.invokeExact(this); } public static void doWorkNormal(MethodHandlesCatchException inst) throws MyException { inst.i1++; } public static void doWorkExceptional(MethodHandlesCatchException inst) throws MyException { inst.i1++; throw MY_EXCEPTION; } public static void fallback(MyException ex, MethodHandlesCatchException inst) { inst.i2++; } public static class MyException extends Exception { } }
package org.apereo.cas.support.events.dao; import org.apereo.cas.support.events.CasEventRepositoryFilter; import org.apereo.cas.support.events.dao.filter.GroovyCasEventRepositoryFilter; import org.junit.jupiter.api.Tag; import org.springframework.boot.test.context.TestConfiguration; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Import; import org.springframework.core.io.ClassPathResource; /** * This is {@link GroovyCasEventRepositoryFilterTests}. * * @author Misagh Moayyed * @since 6.2.0 */ @Tag("Groovy") @Import(GroovyCasEventRepositoryFilterTests.GroovyCasEventRepositoryFilterTestConfiguration.class) public class GroovyCasEventRepositoryFilterTests extends InMemoryCasEventRepositoryTests { @TestConfiguration("GroovyCasEventRepositoryFilterTestConfiguration") public static class GroovyCasEventRepositoryFilterTestConfiguration { @Bean public CasEventRepositoryFilter casEventRepositoryFilter() { return new GroovyCasEventRepositoryFilter(new ClassPathResource("GroovyCasEventRepositoryFilter.groovy")); } } }
/***** BEGIN LICENSE BLOCK ***** * Version: CPL 1.0/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Common Public * License Version 1.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a copy of * the License at http://www.eclipse.org/legal/cpl-v10.html * * Software distributed under the License is distributed on an "AS * IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or * implied. See the License for the specific language governing * rights and limitations under the License. * * Copyright (C) 2006 Ola Bini <ola@ologix.com> * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the CPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the CPL, the GPL or the LGPL. ***** END LICENSE BLOCK *****/ /** * $Id$ */ package org.jruby.util; import java.io.File; import java.io.FileFilter; import java.io.FilenameFilter; import java.io.IOException; import org.jruby.Ruby; import jnr.posix.JavaSecuredFile; import org.jruby.platform.Platform; /** * <p>This file acts as an alternative to NormalizedFile, due to the problems with current working * directory.</p> * */ public class JRubyFile extends JavaSecuredFile implements FileResource { private static final long serialVersionUID = 435364547567567L; public static JRubyFile create(String cwd, String pathname) { return createNoUnicodeConversion(cwd, pathname); } public static FileResource createResource(String cwd, String pathname) { if (pathname.indexOf('!') > 0) { return JarFileResource.load(pathname); } else { // Regular file is fine return create(cwd, pathname); } } public static String normalizeSeps(String path) { if (Platform.IS_WINDOWS) { return path.replace(File.separatorChar, '/'); } else { return path; } } private static JRubyFile createNoUnicodeConversion(String cwd, String pathname) { if (pathname == null || pathname.equals("") || Ruby.isSecurityRestricted()) { return JRubyNonExistentFile.NOT_EXIST; } File internal = new JavaSecuredFile(pathname); if(!internal.isAbsolute()) { internal = new JavaSecuredFile(cwd, pathname); if(!internal.isAbsolute()) { throw new IllegalArgumentException("Neither current working directory ("+cwd+") nor pathname ("+pathname+") led to an absolute path"); } } return new JRubyFile(internal); } public static String getFileProperty(String property) { return normalizeSeps(SafePropertyAccessor.getProperty(property, "/")); } private JRubyFile(File file) { this(file.getAbsolutePath()); } protected JRubyFile(String filename) { super(filename); } @Override public String getAbsolutePath() { return normalizeSeps(new File(super.getPath()).getAbsolutePath()); } @Override public String getCanonicalPath() throws IOException { try { return normalizeSeps(super.getCanonicalPath()); } catch (IOException e) { // usually IOExceptions don't tell us anything about the path, // so add an extra wrapper to give more debugging help. throw (IOException) new IOException("Unable to canonicalize path: " + getAbsolutePath()).initCause(e); } } @Override public String getPath() { return normalizeSeps(super.getPath()); } @Override public String toString() { return normalizeSeps(super.toString()); } @Override public File getAbsoluteFile() { return new JRubyFile(getAbsolutePath()); } @Override public File getCanonicalFile() throws IOException { return new JRubyFile(getCanonicalPath()); } @Override public String getParent() { String par = super.getParent(); if (par != null) { par = normalizeSeps(par); } return par; } @Override public File getParentFile() { String par = getParent(); if (par == null) { return this; } else { return new JRubyFile(par); } } public static File[] listRoots() { File[] roots = File.listRoots(); JRubyFile[] smartRoots = new JRubyFile[roots.length]; for(int i = 0, j = roots.length; i < j; i++) { smartRoots[i] = new JRubyFile(roots[i].getPath()); } return smartRoots; } public static File createTempFile(String prefix, String suffix, File directory) throws IOException { return new JRubyFile(File.createTempFile(prefix, suffix,directory)); } public static File createTempFile(String prefix, String suffix) throws IOException { return new JRubyFile(File.createTempFile(prefix, suffix)); } @Override public String[] list(FilenameFilter filter) { String[] files = super.list(filter); if (files == null) { return null; } String[] smartFiles = new String[files.length]; for (int i = 0; i < files.length; i++) { smartFiles[i] = normalizeSeps(files[i]); } return smartFiles; } @Override public File[] listFiles() { File[] files = super.listFiles(); if (files == null) { return null; } JRubyFile[] smartFiles = new JRubyFile[files.length]; for (int i = 0, j = files.length; i < j; i++) { smartFiles[i] = createNoUnicodeConversion(super.getAbsolutePath(), files[i].getPath()); } return smartFiles; } @Override public File[] listFiles(final FileFilter filter) { final File[] files = super.listFiles(filter); if (files == null) { return null; } JRubyFile[] smartFiles = new JRubyFile[files.length]; for (int i = 0,j = files.length; i < j; i++) { smartFiles[i] = createNoUnicodeConversion(super.getAbsolutePath(), files[i].getPath()); } return smartFiles; } @Override public File[] listFiles(final FilenameFilter filter) { final File[] files = super.listFiles(filter); if (files == null) { return null; } JRubyFile[] smartFiles = new JRubyFile[files.length]; for (int i = 0,j = files.length; i < j; i++) { smartFiles[i] = createNoUnicodeConversion(super.getAbsolutePath(), files[i].getPath()); } return smartFiles; } }
/** * Copyright (c) The openTCS Authors. * * This program is free software and subject to the MIT license. (For details, * see the licensing information (LICENSE.txt) you should have received with * this copy of the software.) */ package org.opentcs.guing.application.menus.menubar; import static java.util.Objects.requireNonNull; import javax.inject.Inject; import javax.swing.JMenu; import javax.swing.JMenuItem; import org.opentcs.guing.application.OperationMode; import org.opentcs.guing.application.action.ViewActionMap; import org.opentcs.guing.application.action.view.AddDrawingViewAction; import org.opentcs.guing.application.action.view.AddTransportOrderSequenceViewAction; import org.opentcs.guing.application.action.view.AddTransportOrderViewAction; import org.opentcs.guing.application.action.view.RestoreDockingLayoutAction; import org.opentcs.guing.util.I18nPlantOverviewOperating; import org.opentcs.thirdparty.jhotdraw.util.ResourceBundleUtil; /** * The application's menu for view-related operations. * * @author Stefan Walter (Fraunhofer IML) */ public class ViewMenu extends JMenu { /** * A menu item for adding a drawing view. */ private final JMenuItem menuAddDrawingView; /** * A menu item for adding a transport order view. */ private final JMenuItem menuTransportOrderView; /** * A menu item for adding an order sequence view. */ private final JMenuItem menuOrderSequenceView; /** * A menu for showing/hiding plugin panels. */ private final ViewPluginPanelsMenu menuPluginPanels; /** * A menu item for restoring the default GUI layout. */ private final JMenuItem menuItemRestoreDockingLayout; /** * Creates a new instance. * * @param actionMap The application's action map. * @param menuPluginPanels A menu for showing/hiding plugin panels. */ @Inject public ViewMenu(ViewActionMap actionMap, ViewPluginPanelsMenu menuPluginPanels) { requireNonNull(actionMap, "actionMap"); requireNonNull(menuPluginPanels, "menuPluginPanels"); final ResourceBundleUtil labels = ResourceBundleUtil.getBundle(I18nPlantOverviewOperating.MENU_PATH); this.setText(labels.getString("viewMenu.text")); this.setToolTipText(labels.getString("viewMenu.tooltipText")); this.setMnemonic('V'); // Menu item View -> Add course view menuAddDrawingView = new JMenuItem(actionMap.get(AddDrawingViewAction.ID)); add(menuAddDrawingView); // Menu item View -> Add transport order view menuTransportOrderView = new JMenuItem(actionMap.get(AddTransportOrderViewAction.ID)); add(menuTransportOrderView); // Menu item View -> Add transport order sequence view menuOrderSequenceView = new JMenuItem(actionMap.get(AddTransportOrderSequenceViewAction.ID)); add(menuOrderSequenceView); addSeparator(); // Menu item View -> Plugins this.menuPluginPanels = menuPluginPanels; menuPluginPanels.setOperationMode(OperationMode.OPERATING); add(menuPluginPanels); // Menu item View -> Restore docking layout menuItemRestoreDockingLayout = new JMenuItem(actionMap.get(RestoreDockingLayoutAction.ID)); menuItemRestoreDockingLayout.setText(labels.getString("viewMenu.menuItem_restoreWindowArrangement.text")); add(menuItemRestoreDockingLayout); } }
package duelistmod.cards.pools.aqua; import com.megacrit.cardcrawl.cards.AbstractCard; import com.megacrit.cardcrawl.characters.AbstractPlayer; import com.megacrit.cardcrawl.core.CardCrawlGame; import com.megacrit.cardcrawl.dungeons.AbstractDungeon; import com.megacrit.cardcrawl.localization.CardStrings; import com.megacrit.cardcrawl.monsters.AbstractMonster; import duelistmod.DuelistMod; import duelistmod.abstracts.DuelistCard; import duelistmod.actions.common.ModifyTributeAction; import duelistmod.helpers.Util; import duelistmod.patches.AbstractCardEnum; import duelistmod.powers.*; import duelistmod.variables.Tags; public class GiantRedSeasnake extends DuelistCard { // TEXT DECLARATION private static final CardStrings cardStrings = getCardStrings(); public static final String NAME = cardStrings.NAME; public static final String DESCRIPTION = cardStrings.DESCRIPTION; public static final String UPGRADE_DESCRIPTION = cardStrings.UPGRADE_DESCRIPTION; // /TEXT DECLARATION/ // STAT DECLARATION private static final CardRarity RARITY = CardRarity.UNCOMMON; private static final CardTarget TARGET = CardTarget.ENEMY; private static final CardType TYPE = CardType.ATTACK; public static final CardColor COLOR = AbstractCardEnum.DUELIST_MONSTERS; private static final int COST = 2; // /STAT DECLARATION/ public GiantRedSeasnake() { super(getCARDID(), NAME, getIMG(), COST, DESCRIPTION, TYPE, COLOR, RARITY, TARGET); this.tags.add(Tags.MONSTER); this.tags.add(Tags.AQUA); this.tags.add(Tags.GIANT); this.tags.add(Tags.EXEMPT); this.misc = 0; this.specialCanUseLogic = true; this.useTributeCanUse = true; this.originalName = this.name; this.damage = this.baseDamage = 55; this.baseTributes = this.tributes = 16; this.magicNumber = this.baseMagicNumber = 1; } @Override public void onIncrementWhileInHand(int amount, int newMaxSummons) { if (this.tributes > 0 && amount > 0) { this.modifyTributes(-this.magicNumber); } } @Override public void onIncrementWhileInDraw(int amount, int newMaxSummons) { if (this.tributes > 0 && amount > 0) { this.modifyTributes(-this.magicNumber); } } @Override public void onIncrementWhileInDiscard(int amount, int newMaxSummons) { if (this.tributes > 0 && amount > 0) { this.modifyTributes(-this.magicNumber); } } // Actions the card should do. @Override public void use(AbstractPlayer p, AbstractMonster m) { tribute(); attack(m); if (this.tributes == 0) { AbstractDungeon.actionManager.addToBottom(new ModifyTributeAction(this, 16 - this.tributes, true)); this.rawDescription = this.originalDescription; this.initializeDescription(); } else if (this.tributes != 16) { AbstractDungeon.actionManager.addToBottom(new ModifyTributeAction(this, 16 - this.tributes, true)); } } // Which card to return when making a copy of this card. @Override public AbstractCard makeCopy() { return new GiantRedSeasnake(); } // Upgraded stats. @Override public void upgrade() { if (!this.upgraded) { if (this.timesUpgraded > 0) { this.upgradeName(NAME + "+" + this.timesUpgraded); } else { this.upgradeName(NAME + "+"); } this.upgradeDamage(10); this.rawDescription = UPGRADE_DESCRIPTION; this.initializeDescription(); } } @Override public void onTribute(DuelistCard tributingCard) { } @Override public void onResummon(int summons) { // TODO Auto-generated method stub } @Override public void summonThis(int summons, DuelistCard c, int var) { // TODO Auto-generated method stub } @Override public void summonThis(int summons, DuelistCard c, int var, AbstractMonster m) { // TODO Auto-generated method stub } @Override public String getID() { return getCARDID(); } @Override public void optionSelected(AbstractPlayer arg0, AbstractMonster arg1, int arg2) { // TODO Auto-generated method stub } // AUTOSETUP - ID/IMG - Id, Img name, and class name all must match to use this public static String getCARDID() { return DuelistMod.makeID(getCurClassName()); } public static CardStrings getCardStrings() { return CardCrawlGame.languagePack.getCardStrings(getCARDID()); } public static String getIMG() { return DuelistMod.makeCardPath(getCurClassName() + ".png"); } public static String getCurClassName() { return (new CurClassNameGetter()).getClassName(); } public static class CurClassNameGetter extends SecurityManager{ public String getClassName(){ return getClassContext()[1].getSimpleName(); } } // END AUTOSETUP }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v2/enums/distance_bucket.proto package com.google.ads.googleads.v2.enums; public final class DistanceBucketProto { private DistanceBucketProto() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistryLite registry) { } public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions( (com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_ads_googleads_v2_enums_DistanceBucketEnum_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_ads_googleads_v2_enums_DistanceBucketEnum_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n3google/ads/googleads/v2/enums/distance" + "_bucket.proto\022\035google.ads.googleads.v2.e" + "nums\032\034google/api/annotations.proto\"\255\004\n\022D" + "istanceBucketEnum\"\226\004\n\016DistanceBucket\022\017\n\013" + "UNSPECIFIED\020\000\022\013\n\007UNKNOWN\020\001\022\017\n\013WITHIN_700" + "M\020\002\022\016\n\nWITHIN_1KM\020\003\022\016\n\nWITHIN_5KM\020\004\022\017\n\013W" + "ITHIN_10KM\020\005\022\017\n\013WITHIN_15KM\020\006\022\017\n\013WITHIN_" + "20KM\020\007\022\017\n\013WITHIN_25KM\020\010\022\017\n\013WITHIN_30KM\020\t" + "\022\017\n\013WITHIN_35KM\020\n\022\017\n\013WITHIN_40KM\020\013\022\017\n\013WI" + "THIN_45KM\020\014\022\017\n\013WITHIN_50KM\020\r\022\017\n\013WITHIN_5" + "5KM\020\016\022\017\n\013WITHIN_60KM\020\017\022\017\n\013WITHIN_65KM\020\020\022" + "\017\n\013BEYOND_65KM\020\021\022\023\n\017WITHIN_0_7MILES\020\022\022\020\n" + "\014WITHIN_1MILE\020\023\022\021\n\rWITHIN_5MILES\020\024\022\022\n\016WI" + "THIN_10MILES\020\025\022\022\n\016WITHIN_15MILES\020\026\022\022\n\016WI" + "THIN_20MILES\020\027\022\022\n\016WITHIN_25MILES\020\030\022\022\n\016WI" + "THIN_30MILES\020\031\022\022\n\016WITHIN_35MILES\020\032\022\022\n\016WI" + "THIN_40MILES\020\033\022\022\n\016BEYOND_40MILES\020\034B\350\001\n!c" + "om.google.ads.googleads.v2.enumsB\023Distan" + "ceBucketProtoP\001ZBgoogle.golang.org/genpr" + "oto/googleapis/ads/googleads/v2/enums;en" + "ums\242\002\003GAA\252\002\035Google.Ads.GoogleAds.V2.Enum" + "s\312\002\035Google\\Ads\\GoogleAds\\V2\\Enums\352\002!Goog" + "le::Ads::GoogleAds::V2::Enumsb\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.AnnotationsProto.getDescriptor(), }); internal_static_google_ads_googleads_v2_enums_DistanceBucketEnum_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_ads_googleads_v2_enums_DistanceBucketEnum_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_ads_googleads_v2_enums_DistanceBucketEnum_descriptor, new java.lang.String[] { }); com.google.api.AnnotationsProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
package com.github.wz2cool.dynamic; import com.github.wz2cool.dynamic.model.ExampleModel; import org.junit.Test; import java.math.BigDecimal; import java.sql.Timestamp; import static com.github.wz2cool.dynamic.builder.DynamicQueryBuilderHelper.greaterThanOrEqual; import static com.github.wz2cool.dynamic.builder.DynamicQueryBuilderHelper.isEqual; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; /** * \* Created with IntelliJ IDEA. * \* User: Frank * \* Date: 7/21/2017 * \* Time: 3:44 PM * \* To change this template use File | Settings | File Templates. * \* Description: * \ */ public class FilterGroupDescriptorTest { @Test public void addFiltersTest() { FilterDescriptor idFilter = new FilterDescriptor(FilterCondition.AND, "id", FilterOperator.EQUAL, 1); FilterGroupDescriptor groupDescriptor = new FilterGroupDescriptor(); groupDescriptor.addFilters(idFilter); assertTrue(groupDescriptor.getFilters().length > 0); } @Test public void removeFiltersTest() { FilterDescriptor idFilter = new FilterDescriptor(FilterCondition.AND, "id", FilterOperator.EQUAL, 1); FilterGroupDescriptor groupDescriptor = new FilterGroupDescriptor(); groupDescriptor.addFilters(idFilter); assertTrue(groupDescriptor.getFilters().length > 0); groupDescriptor.removeFilters(idFilter); assertTrue(groupDescriptor.getFilters().length == 0); } @Test public void testAndBigDecimal() { FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP1, isEqual(BigDecimal.ONE)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p1", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(BigDecimal.ONE, filterDescriptor.getValue()); } @Test public void testOrBigDecimal() { FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP1, isEqual(BigDecimal.ONE)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p1", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(BigDecimal.ONE, filterDescriptor.getValue()); } @Test public void testAndByte() { byte filterValue = 1; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP2, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p2", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrByte() { byte filterValue = 1; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP2, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p2", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndDate() { Timestamp filterValue = new Timestamp(System.currentTimeMillis()); FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP3, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p3", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrDate() { Timestamp filterValue = new Timestamp(System.currentTimeMillis()); FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP3, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p3", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndDouble() { Double filterValue = 1.2; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP4, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p4", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrDouble() { Double filterValue = 1.2; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP4, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p4", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndFloat() { Float filterValue = 1.2f; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP5, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p5", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrFloat() { Float filterValue = 1.2f; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP5, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p5", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndInteger() { Integer filterValue = 1; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP6, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p6", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrInteger() { Integer filterValue = 1; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP6, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p6", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndLong() { Long filterValue = 1L; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP7, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p7", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrLong() { Long filterValue = 1L; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP7, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p7", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndShort() { Short filterValue = 1; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP8, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p8", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrShort() { Short filterValue = 1; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP8, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p8", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndString() { String filterValue = "frank"; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP9, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p9", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrString() { String filterValue = "frank"; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(ExampleModel::getP9, isEqual(filterValue)); FilterDescriptor filterDescriptor = (FilterDescriptor) filterGroupDescriptor.getFilters()[0]; assertEquals(FilterCondition.OR, filterDescriptor.getCondition()); assertEquals("p9", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testAndGroupBegin() { BigDecimal filterValue = BigDecimal.ONE; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and((group) -> group.and(ExampleModel::getP1, isEqual(filterValue))); FilterGroupDescriptor<ExampleModel> internalFilterGroup = (FilterGroupDescriptor<ExampleModel>) filterGroupDescriptor.getFilters()[0]; FilterDescriptor filterDescriptor = (FilterDescriptor) internalFilterGroup.getFilters()[0]; assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p1", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testOrGroupBegin() { BigDecimal filterValue = BigDecimal.ONE; FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or((group) -> group.and(ExampleModel::getP1, isEqual(filterValue))); FilterGroupDescriptor<ExampleModel> internalFilterGroup = (FilterGroupDescriptor<ExampleModel>) filterGroupDescriptor.getFilters()[0]; FilterDescriptor filterDescriptor = (FilterDescriptor) internalFilterGroup.getFilters()[0]; assertEquals(FilterCondition.OR, internalFilterGroup.getCondition()); assertEquals(FilterCondition.AND, filterDescriptor.getCondition()); assertEquals("p1", filterDescriptor.getPropertyName()); assertEquals(FilterOperator.EQUAL, filterDescriptor.getOperator()); assertEquals(filterValue, filterDescriptor.getValue()); } @Test public void testDisableFilter() { FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(ExampleModel::getP1, isEqual(BigDecimal.ONE)) .and(false, ExampleModel::getP1, greaterThanOrEqual(BigDecimal.TEN)); assertEquals(1, filterGroupDescriptor.getFilters().length); } @Test public void testDisableAndFilterGroup() { FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and(false, (group) -> group.and(ExampleModel::getP1, greaterThanOrEqual(BigDecimal.TEN))); assertEquals(0, filterGroupDescriptor.getFilters().length); } @Test public void testDisableOrFilterGroup() { FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.or(false, (group) -> group.and(ExampleModel::getP1, greaterThanOrEqual(BigDecimal.TEN))); assertEquals(0, filterGroupDescriptor.getFilters().length); } @Test public void testDisableOrFilterGroup2() { FilterGroupDescriptor<ExampleModel> filterGroupDescriptor = new FilterGroupDescriptor<>(); filterGroupDescriptor.and((group) -> group.and(ExampleModel::getP1, isEqual(BigDecimal.TEN))); assertEquals(1, filterGroupDescriptor.getFilters().length); } }
package com.autodoc.impl; import com.autodoc.contract.ManufacturerService; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import static org.junit.jupiter.api.Assertions.assertNotNull; class ManufacturerServiceImplTest extends HelperTest { private ManufacturerService service; @BeforeEach void init() { this.token = getToken(); service = new ManufacturerServiceImpl(); } @Test void getObjectClass() { } @Test void getById() { assertNotNull(service.getById(token, 2)); } }
/* * Copyright (c) 2013, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * */ package com.example.lenovo.editorimageandtext.helper; /** * SpringSystemListener provides an interface for listening to events before and after each Physics * solving loop the BaseSpringSystem runs. */ public interface SpringSystemListener { /** * Runs before each pass through the physics integration loop providing an opportunity to do any * setup or alterations to the Physics state before integrating. * @param springSystem the BaseSpringSystem listened to */ void onBeforeIntegrate(BaseSpringSystem springSystem); /** * Runs after each pass through the physics integration loop providing an opportunity to do any * setup or alterations to the Physics state after integrating. * @param springSystem the BaseSpringSystem listened to */ void onAfterIntegrate(BaseSpringSystem springSystem); }
/* * Copyright (c) 1997-2018 Oracle and/or its affiliates. All rights reserved. * Copyright 2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.el; /** * Thrown when a method could not be found while evaluating a * {@link MethodExpression}. * * @see MethodExpression * @since JSP 2.1 */ public class MethodNotFoundException extends ELException { /** * Creates a <code>MethodNotFoundException</code> with no detail message. */ public MethodNotFoundException() { super (); } /** * Creates a <code>MethodNotFoundException</code> with the provided * detail message. * * @param message the detail message */ public MethodNotFoundException(String message) { super (message); } /** * Creates a <code>MethodNotFoundException</code> with the given root * cause. * * @param exception the originating cause of this exception */ public MethodNotFoundException(Throwable exception) { super (exception); } /** * Creates a <code>MethodNotFoundException</code> with the given detail * message and root cause. * * @param pMessage the detail message * @param pRootCause the originating cause of this exception */ public MethodNotFoundException(String pMessage, Throwable pRootCause) { super (pMessage, pRootCause); } }
package org.pushingpixels.demo.flamingo.svg.filetypes.transcoded; import java.awt.*; import java.awt.geom.*; import java.awt.image.BufferedImage; import java.io.*; import java.lang.ref.WeakReference; import java.util.Base64; import java.util.Stack; import javax.imageio.ImageIO; import javax.swing.SwingUtilities; import javax.swing.plaf.UIResource; import org.pushingpixels.neon.api.icon.ResizableIcon; import org.pushingpixels.neon.api.icon.ResizableIconUIResource; /** * This class has been automatically generated using <a * href="https://github.com/kirill-grouchnikov/radiance">Photon SVG transcoder</a>. */ public class ext_wmv implements ResizableIcon { private Shape shape = null; private GeneralPath generalPath = null; private Paint paint = null; private Stroke stroke = null; private Shape clip = null; private Stack<AffineTransform> transformsStack = new Stack<>(); private void _paint0(Graphics2D g,float origAlpha) { transformsStack.push(g.getTransform()); // g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha)); transformsStack.push(g.getTransform()); g.transform(new AffineTransform(0.009999999776482582f, 0.0f, 0.0f, 0.009999999776482582f, 0.13999999687075615f, -0.0f)); // _0 g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha)); transformsStack.push(g.getTransform()); g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)); // _0_0 if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(45.2f, 1.0f); generalPath.lineTo(72.2f, 27.7f); generalPath.lineTo(72.2f, 99.0f); generalPath.lineTo(0.2f, 99.0f); generalPath.lineTo(0.2f, 1.0f); generalPath.lineTo(45.2f, 1.0f); generalPath.closePath(); shape = generalPath; paint = new LinearGradientPaint(new Point2D.Double(36.20000076293945, 101.0), new Point2D.Double(36.20000076293945, 3.005000114440918), new float[] {0.0f,0.17f,0.313f,0.447f,0.575f,0.698f,0.819f,0.934f,1.0f}, new Color[] {new Color(226, 205, 228, 255),new Color(224, 202, 226, 255),new Color(219, 192, 221, 255),new Color(210, 177, 212, 255),new Color(199, 157, 199, 255),new Color(186, 132, 185, 255),new Color(171, 104, 169, 255),new Color(156, 69, 152, 255),new Color(147, 42, 142, 255)}, MultipleGradientPaint.CycleMethod.NO_CYCLE, MultipleGradientPaint.ColorSpaceType.SRGB, new AffineTransform(1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 102.0f)); g.setPaint(paint); g.fill(shape); g.setTransform(transformsStack.pop()); g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha)); transformsStack.push(g.getTransform()); g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)); // _0_1 if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(45.2f, 1.0f); generalPath.lineTo(72.2f, 27.7f); generalPath.lineTo(72.2f, 99.0f); generalPath.lineTo(0.2f, 99.0f); generalPath.lineTo(0.2f, 1.0f); generalPath.lineTo(45.2f, 1.0f); generalPath.closePath(); shape = generalPath; paint = new Color(0, 0, 0, 0); g.setPaint(paint); g.fill(shape); paint = new Color(136, 35, 131, 255); stroke = new BasicStroke(2.0f,0,0,4.0f,null,0.0f); if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(45.2f, 1.0f); generalPath.lineTo(72.2f, 27.7f); generalPath.lineTo(72.2f, 99.0f); generalPath.lineTo(0.2f, 99.0f); generalPath.lineTo(0.2f, 1.0f); generalPath.lineTo(45.2f, 1.0f); generalPath.closePath(); shape = generalPath; g.setPaint(paint); g.setStroke(stroke); g.draw(shape); g.setTransform(transformsStack.pop()); g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha)); transformsStack.push(g.getTransform()); g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)); // _0_2 if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(9.1f, 91.1f); generalPath.lineTo(4.7f, 72.5f); generalPath.lineTo(8.6f, 72.5f); generalPath.lineTo(11.400001f, 85.3f); generalPath.lineTo(14.800001f, 72.5f); generalPath.lineTo(19.300001f, 72.5f); generalPath.lineTo(22.6f, 85.5f); generalPath.lineTo(25.5f, 72.5f); generalPath.lineTo(29.3f, 72.5f); generalPath.lineTo(24.699999f, 91.1f); generalPath.lineTo(20.699999f, 91.1f); generalPath.lineTo(17.0f, 77.2f); generalPath.lineTo(13.3f, 91.1f); generalPath.lineTo(9.1f, 91.1f); generalPath.closePath(); generalPath.moveTo(31.2f, 91.1f); generalPath.lineTo(31.2f, 72.5f); generalPath.lineTo(36.9f, 72.5f); generalPath.lineTo(40.300003f, 85.2f); generalPath.lineTo(43.700005f, 72.5f); generalPath.lineTo(49.400005f, 72.5f); generalPath.lineTo(49.400005f, 91.1f); generalPath.lineTo(45.900005f, 91.1f); generalPath.lineTo(45.900005f, 76.4f); generalPath.lineTo(42.200005f, 91.1f); generalPath.lineTo(38.500004f, 91.1f); generalPath.lineTo(34.800003f, 76.4f); generalPath.lineTo(34.800003f, 91.1f); generalPath.lineTo(31.200003f, 91.1f); generalPath.closePath(); generalPath.moveTo(57.9f, 91.1f); generalPath.lineTo(51.2f, 72.5f); generalPath.lineTo(55.3f, 72.5f); generalPath.lineTo(60.1f, 86.3f); generalPath.lineTo(64.7f, 72.5f); generalPath.lineTo(68.7f, 72.5f); generalPath.lineTo(62.0f, 91.1f); generalPath.lineTo(57.9f, 91.1f); generalPath.closePath(); shape = generalPath; paint = new Color(255, 255, 255, 255); g.setPaint(paint); g.fill(shape); g.setTransform(transformsStack.pop()); g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha)); transformsStack.push(g.getTransform()); g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)); // _0_3 if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(64.3f, 55.5f); generalPath.curveTo(62.600002f, 55.3f, 60.9f, 55.2f, 59.200005f, 55.2f); generalPath.curveTo(51.900005f, 55.100002f, 45.900005f, 56.8f, 40.400005f, 58.9f); generalPath.curveTo(34.900005f, 61.000004f, 29.6f, 63.6f, 23.3f, 64.0f); generalPath.curveTo(19.9f, 64.2f, 15.999999f, 63.4f, 14.799999f, 61.6f); generalPath.curveTo(13.999999f, 60.3f, 13.999999f, 58.1f, 13.799999f, 55.899998f); generalPath.curveTo(13.199999f, 50.199997f, 12.199999f, 44.199997f, 11.4f, 38.6f); generalPath.curveTo(12.2f, 37.699997f, 13.5f, 37.3f, 14.799999f, 36.899998f); generalPath.curveTo(15.199999f, 37.999996f, 14.999999f, 39.6f, 15.4f, 40.699997f); generalPath.curveTo(22.5f, 41.399998f, 29.0f, 40.299995f, 35.4f, 39.199997f); generalPath.curveTo(41.7f, 38.1f, 47.800003f, 36.999996f, 54.800003f, 36.6f); generalPath.curveTo(58.200005f, 36.399998f, 61.700005f, 36.399998f, 65.100006f, 36.6f); generalPath.moveTo(55.200005f, 51.899998f); generalPath.curveTo(55.700005f, 51.699997f, 56.300003f, 51.6f, 57.100006f, 51.699997f); generalPath.curveTo(57.300007f, 47.999996f, 57.400005f, 44.399998f, 57.400005f, 40.499996f); generalPath.curveTo(51.200005f, 40.699997f, 45.500008f, 41.399998f, 40.400005f, 42.699997f); generalPath.curveTo(40.600006f, 46.699997f, 40.800007f, 50.499996f, 40.700005f, 54.699997f); generalPath.curveTo(44.700005f, 53.6f, 48.400005f, 52.199997f, 53.300003f, 51.999996f); generalPath.moveTo(55.300003f, 39.899994f); generalPath.lineTo(56.4f, 39.899994f); generalPath.curveTo(56.800003f, 39.499992f, 56.600002f, 38.699993f, 56.600002f, 37.999992f); generalPath.curveTo(55.100002f, 37.399994f, 54.800003f, 38.999992f, 55.300003f, 39.899994f); generalPath.closePath(); generalPath.moveTo(59.200005f, 39.699993f); generalPath.lineTo(60.700005f, 39.699993f); generalPath.lineTo(60.700005f, 38.0f); generalPath.lineTo(59.400005f, 38.0f); generalPath.curveTo(59.400005f, 38.7f, 59.000004f, 38.9f, 59.200005f, 39.7f); generalPath.closePath(); generalPath.moveTo(63.200005f, 39.699993f); generalPath.curveTo(63.700005f, 39.599995f, 64.00001f, 39.699993f, 64.3f, 39.899994f); generalPath.curveTo(64.700005f, 39.599995f, 64.5f, 38.699993f, 64.5f, 37.999992f); generalPath.lineTo(63.2f, 37.999992f); generalPath.lineTo(63.2f, 39.699993f); generalPath.closePath(); generalPath.moveTo(51.700005f, 39.999992f); generalPath.lineTo(52.600006f, 39.999992f); generalPath.curveTo(53.000008f, 39.699993f, 52.800007f, 38.79999f, 52.800007f, 38.09999f); generalPath.curveTo(51.400005f, 37.69999f, 51.20001f, 39.29999f, 51.70001f, 39.999992f); generalPath.closePath(); generalPath.moveTo(47.700005f, 40.399994f); generalPath.curveTo(48.400005f, 40.599995f, 48.500004f, 40.099995f, 49.200005f, 40.199993f); generalPath.lineTo(49.200005f, 38.499992f); generalPath.curveTo(47.700005f, 38.09999f, 47.500004f, 39.09999f, 47.700005f, 40.399994f); generalPath.closePath(); generalPath.moveTo(44.100006f, 39.299995f); generalPath.curveTo(44.100006f, 39.899994f, 44.000008f, 40.699997f, 44.300007f, 40.999996f); generalPath.curveTo(44.800007f, 41.099995f, 44.800007f, 40.599995f, 45.400005f, 40.799995f); generalPath.curveTo(45.200005f, 40.199997f, 45.900005f, 38.799995f, 45.000004f, 38.899994f); generalPath.curveTo(44.900005f, 39.299995f, 44.200005f, 38.999992f, 44.100002f, 39.299995f); generalPath.closePath(); generalPath.moveTo(12.600006f, 40.099995f); generalPath.curveTo(13.000006f, 39.999996f, 13.7000065f, 40.699993f, 13.900006f, 40.099995f); generalPath.curveTo(13.400006f, 40.099995f, 13.800006f, 39.299995f, 13.7000065f, 38.999996f); generalPath.curveTo(13.000007f, 39.199997f, 12.400006f, 39.299995f, 12.600006f, 40.099995f); generalPath.closePath(); generalPath.moveTo(40.900005f, 39.699993f); generalPath.curveTo(40.600006f, 39.999992f, 41.100006f, 40.79999f, 40.900005f, 41.599995f); generalPath.curveTo(41.500004f, 41.799995f, 41.500004f, 41.299995f, 42.000004f, 41.399994f); generalPath.curveTo(41.800003f, 40.799995f, 42.500004f, 39.399994f, 41.600002f, 39.499992f); generalPath.curveTo(41.500004f, 39.79999f, 41.2f, 39.699993f, 40.9f, 39.699993f); generalPath.closePath(); generalPath.moveTo(37.400005f, 42.499992f); generalPath.curveTo(37.900005f, 42.399994f, 38.300007f, 42.29999f, 38.700005f, 42.09999f); generalPath.curveTo(38.900005f, 41.29999f, 38.300003f, 41.19999f, 38.500004f, 40.39999f); generalPath.lineTo(37.600002f, 40.39999f); generalPath.curveTo(37.300003f, 40.69999f, 37.500004f, 41.69999f, 37.4f, 42.49999f); generalPath.closePath(); generalPath.moveTo(64.3f, 40.699993f); generalPath.curveTo(62.200005f, 40.599995f, 61.000004f, 40.499992f, 58.800003f, 40.499992f); generalPath.curveTo(58.300003f, 43.899994f, 58.800003f, 48.29999f, 58.300003f, 51.699993f); generalPath.curveTo(60.700005f, 51.699993f, 61.9f, 51.79999f, 64.100006f, 51.999992f); generalPath.moveTo(33.4f, 41.6f); generalPath.curveTo(33.9f, 41.8f, 33.5f, 42.8f, 33.600002f, 43.3f); generalPath.curveTo(34.100002f, 43.2f, 34.7f, 43.1f, 35.100002f, 42.899998f); generalPath.curveTo(35.7f, 40.999996f, 34.2f, 40.499996f, 33.4f, 41.6f); generalPath.closePath(); generalPath.moveTo(28.7f, 42.199997f); generalPath.lineTo(28.7f, 44.1f); generalPath.curveTo(29.6f, 44.3f, 29.900002f, 43.899998f, 30.6f, 43.899998f); generalPath.curveTo(30.5f, 43.199997f, 30.800001f, 42.199997f, 30.4f, 41.8f); generalPath.curveTo(29.9f, 42.0f, 29.1f, 41.899998f, 28.699999f, 42.2f); generalPath.closePath(); generalPath.moveTo(23.400002f, 42.799995f); generalPath.curveTo(23.7f, 43.299995f, 23.400002f, 44.399994f, 23.800001f, 44.899994f); generalPath.curveTo(24.500002f, 44.999992f, 24.6f, 44.499992f, 25.300001f, 44.699993f); generalPath.curveTo(25.2f, 43.999992f, 25.000002f, 43.499992f, 25.1f, 42.599995f); generalPath.curveTo(24.300001f, 42.399994f, 24.2f, 42.899994f, 23.4f, 42.799995f); generalPath.closePath(); generalPath.moveTo(15.900002f, 44.799995f); generalPath.lineTo(17.0f, 44.799995f); generalPath.curveTo(17.2f, 43.899994f, 16.6f, 43.599995f, 16.8f, 42.699997f); generalPath.curveTo(16.4f, 42.799995f, 15.599999f, 42.399998f, 15.499999f, 42.899998f); generalPath.curveTo(16.099998f, 43.1f, 15.399999f, 44.6f, 15.899999f, 44.8f); generalPath.closePath(); generalPath.moveTo(19.300001f, 45.799995f); generalPath.curveTo(19.400002f, 49.899994f, 20.2f, 55.099995f, 20.7f, 59.499996f); generalPath.curveTo(28.7f, 59.599995f, 33.800003f, 56.799995f, 39.9f, 54.999996f); generalPath.curveTo(39.4f, 51.099995f, 40.0f, 46.299995f, 39.2f, 42.799995f); generalPath.curveTo(33.0f, 44.399994f, 27.1f, 45.999996f, 19.300001f, 45.799995f); generalPath.closePath(); generalPath.moveTo(19.800001f, 44.999996f); generalPath.lineTo(20.900002f, 44.999996f); generalPath.curveTo(21.300001f, 44.499996f, 20.7f, 43.799995f, 20.900002f, 42.899998f); generalPath.lineTo(19.400002f, 42.899998f); generalPath.curveTo(19.500002f, 43.6f, 19.500002f, 44.499996f, 19.800001f, 44.999996f); generalPath.closePath(); generalPath.moveTo(14.400002f, 52.799995f); generalPath.curveTo(14.600001f, 52.799995f, 14.700002f, 52.999996f, 14.800001f, 53.199997f); generalPath.curveTo(14.400002f, 52.499996f, 14.100001f, 53.699997f, 14.600001f, 53.799995f); generalPath.curveTo(14.700002f, 53.599995f, 14.600001f, 53.399994f, 14.800001f, 53.399994f); generalPath.curveTo(15.100001f, 53.899994f, 14.000001f, 54.099995f, 14.600001f, 54.199993f); generalPath.curveTo(15.300001f, 53.699993f, 15.900002f, 52.999992f, 17.000002f, 52.699993f); generalPath.curveTo(16.900002f, 54.199993f, 17.400002f, 55.099995f, 17.400002f, 56.499992f); generalPath.curveTo(16.7f, 56.999992f, 15.700002f, 57.199993f, 15.500002f, 58.199993f); generalPath.curveTo(16.700003f, 58.899994f, 18.000002f, 59.399994f, 19.7f, 59.499992f); generalPath.curveTo(19.0f, 54.59999f, 18.6f, 50.699993f, 18.1f, 45.79999f); generalPath.curveTo(15.900001f, 46.09999f, 14.1f, 44.999992f, 13.0f, 44.89999f); generalPath.curveTo(13.9f, 45.69999f, 13.6f, 47.39999f, 13.8f, 48.49999f); generalPath.curveTo(13.8f, 48.299988f, 13.8f, 48.099987f, 14.0f, 48.099987f); generalPath.curveTo(13.9f, 48.799988f, 14.1f, 49.799988f, 13.8f, 50.199986f); generalPath.curveTo(14.5f, 50.499985f, 14.3f, 49.999985f, 14.2f, 51.099987f); generalPath.moveTo(58.8f, 54.299988f); generalPath.lineTo(59.899998f, 54.299988f); generalPath.curveTo(60.199997f, 53.99999f, 60.1f, 53.19999f, 60.1f, 52.599987f); generalPath.lineTo(58.8f, 52.599987f); generalPath.lineTo(58.8f, 54.299988f); generalPath.closePath(); generalPath.moveTo(54.8f, 52.899986f); generalPath.lineTo(54.8f, 54.199986f); generalPath.curveTo(55.2f, 54.599987f, 55.5f, 53.999985f, 56.3f, 54.199986f); generalPath.lineTo(56.3f, 52.699986f); generalPath.curveTo(55.7f, 52.699986f, 55.1f, 52.699986f, 54.8f, 52.899986f); generalPath.closePath(); generalPath.moveTo(62.399998f, 54.299988f); generalPath.lineTo(63.699997f, 54.299988f); generalPath.lineTo(63.699997f, 52.799988f); generalPath.lineTo(62.399998f, 52.799988f); generalPath.curveTo(62.499996f, 53.299988f, 62.399998f, 53.799988f, 62.399998f, 54.299988f); generalPath.closePath(); generalPath.moveTo(51.399998f, 53.299988f); generalPath.lineTo(51.399998f, 54.599987f); generalPath.lineTo(52.499996f, 54.599987f); generalPath.curveTo(52.799995f, 54.299988f, 52.899998f, 52.899986f, 52.299995f, 52.899986f); generalPath.curveTo(52.199997f, 53.299988f, 51.499996f, 52.999985f, 51.399994f, 53.299988f); generalPath.closePath(); generalPath.moveTo(47.8f, 53.69999f); generalPath.curveTo(47.899998f, 54.299988f, 47.5f, 55.39999f, 48.2f, 55.39999f); generalPath.curveTo(48.2f, 55.09999f, 48.7f, 55.19999f, 49.100002f, 55.19999f); generalPath.curveTo(48.9f, 54.69999f, 49.500004f, 53.39999f, 48.7f, 53.49999f); generalPath.curveTo(48.600002f, 53.799988f, 48.100002f, 53.69999f, 47.8f, 53.69999f); generalPath.closePath(); generalPath.moveTo(44.399998f, 54.69999f); generalPath.lineTo(44.399998f, 56.19999f); generalPath.curveTo(45.1f, 56.39999f, 44.999996f, 55.799988f, 45.699997f, 55.99999f); generalPath.curveTo(45.499996f, 55.49999f, 46.1f, 54.19999f, 45.299995f, 54.299988f); generalPath.curveTo(45.199997f, 54.599987f, 44.499996f, 54.49999f, 44.399994f, 54.69999f); generalPath.closePath(); generalPath.moveTo(15.0f, 57.0f); generalPath.curveTo(15.7f, 56.5f, 16.3f, 55.3f, 15.2f, 54.7f); generalPath.curveTo(14.5f, 55.100002f, 14.4f, 56.3f, 15.0f, 57.0f); generalPath.closePath(); generalPath.moveTo(41.1f, 55.7f); generalPath.curveTo(41.0f, 56.4f, 41.5f, 56.5f, 41.3f, 57.2f); generalPath.curveTo(42.2f, 57.2f, 42.5f, 56.600002f, 42.399998f, 55.5f); generalPath.curveTo(41.999996f, 55.0f, 41.6f, 55.6f, 41.1f, 55.7f); generalPath.closePath(); generalPath.moveTo(38.1f, 58.4f); generalPath.curveTo(39.1f, 58.4f, 39.3f, 57.600002f, 39.199997f, 56.5f); generalPath.lineTo(38.299995f, 56.5f); generalPath.curveTo(37.999996f, 56.9f, 38.199997f, 57.8f, 38.099995f, 58.4f); generalPath.closePath(); generalPath.moveTo(34.5f, 58.0f); generalPath.lineTo(34.5f, 59.7f); generalPath.curveTo(35.1f, 59.600002f, 35.8f, 59.5f, 36.0f, 58.9f); generalPath.curveTo(35.4f, 58.9f, 36.3f, 57.300003f, 35.4f, 57.600002f); generalPath.curveTo(35.4f, 58.000004f, 34.7f, 57.7f, 34.5f, 58.000004f); generalPath.closePath(); generalPath.moveTo(16.0f, 60.8f); generalPath.curveTo(15.6f, 60.1f, 15.8f, 58.8f, 14.7f, 58.899998f); generalPath.curveTo(14.9f, 59.6f, 14.9f, 61.6f, 16.0f, 60.8f); generalPath.closePath(); generalPath.moveTo(29.8f, 59.899998f); generalPath.curveTo(30.3f, 59.899998f, 29.9f, 60.8f, 30.0f, 61.199997f); generalPath.curveTo(30.8f, 61.299995f, 31.2f, 60.999996f, 31.7f, 60.799995f); generalPath.lineTo(31.7f, 59.099995f); generalPath.curveTo(30.800001f, 58.999996f, 30.1f, 59.199993f, 29.800001f, 59.899994f); generalPath.closePath(); generalPath.moveTo(25.099998f, 60.499996f); generalPath.curveTo(25.099998f, 61.299995f, 24.999998f, 62.199997f, 25.499998f, 62.399998f); generalPath.curveTo(25.499998f, 61.899998f, 26.299997f, 62.3f, 26.599998f, 62.199997f); generalPath.curveTo(26.899998f, 61.899998f, 26.399998f, 61.1f, 26.599998f, 60.299995f); generalPath.curveTo(25.899998f, 60.099995f, 25.599998f, 60.399994f, 25.099998f, 60.499996f); generalPath.closePath(); generalPath.moveTo(19.0f, 62.3f); generalPath.lineTo(19.0f, 60.6f); generalPath.curveTo(18.5f, 60.6f, 18.4f, 60.199997f, 17.7f, 60.399998f); generalPath.curveTo(17.6f, 61.499996f, 17.7f, 62.499996f, 19.0f, 62.3f); generalPath.closePath(); generalPath.moveTo(21.5f, 62.5f); generalPath.lineTo(22.8f, 62.5f); generalPath.curveTo(23.0f, 61.6f, 22.5f, 61.4f, 22.599998f, 60.6f); generalPath.lineTo(21.3f, 60.6f); generalPath.curveTo(21.199999f, 61.5f, 21.5f, 61.8f, 21.5f, 62.5f); generalPath.closePath(); shape = generalPath; paint = new LinearGradientPaint(new Point2D.Double(11.51099967956543, 51.715999603271484), new Point2D.Double(65.21099853515625, 51.715999603271484), new float[] {0.005f,1.0f}, new Color[] {new Color(150, 52, 145, 255),new Color(112, 19, 107, 255)}, MultipleGradientPaint.CycleMethod.NO_CYCLE, MultipleGradientPaint.ColorSpaceType.SRGB, new AffineTransform(1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 102.0f)); g.setPaint(paint); g.fill(shape); g.setTransform(transformsStack.pop()); g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha)); transformsStack.push(g.getTransform()); g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)); // _0_4 if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(45.2f, 1.0f); generalPath.lineTo(72.2f, 27.7f); generalPath.lineTo(45.199997f, 27.7f); generalPath.lineTo(45.199997f, 1.0f); generalPath.closePath(); shape = generalPath; paint = new LinearGradientPaint(new Point2D.Double(45.26900100708008, 74.20600128173828), new Point2D.Double(58.76900100708008, 87.70600128173828), new float[] {0.0f,0.378f,0.515f,0.612f,0.69f,0.757f,0.817f,0.871f,0.921f,0.965f,1.0f}, new Color[] {new Color(249, 239, 246, 255),new Color(248, 237, 245, 255),new Color(243, 230, 241, 255),new Color(236, 219, 235, 255),new Color(227, 204, 226, 255),new Color(215, 184, 215, 255),new Color(202, 161, 201, 255),new Color(188, 136, 187, 255),new Color(174, 108, 171, 255),new Color(159, 77, 155, 255),new Color(147, 42, 142, 255)}, MultipleGradientPaint.CycleMethod.NO_CYCLE, MultipleGradientPaint.ColorSpaceType.SRGB, new AffineTransform(1.0f, 0.0f, 0.0f, -1.0f, 0.0f, 102.0f)); g.setPaint(paint); g.fill(shape); g.setTransform(transformsStack.pop()); g.setComposite(AlphaComposite.getInstance(3, 1.0f * origAlpha)); transformsStack.push(g.getTransform()); g.transform(new AffineTransform(1.0f, 0.0f, 0.0f, 1.0f, 0.0f, 0.0f)); // _0_5 if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(45.2f, 1.0f); generalPath.lineTo(72.2f, 27.7f); generalPath.lineTo(45.199997f, 27.7f); generalPath.lineTo(45.199997f, 1.0f); generalPath.closePath(); shape = generalPath; paint = new Color(0, 0, 0, 0); g.setPaint(paint); g.fill(shape); paint = new Color(136, 35, 131, 255); stroke = new BasicStroke(2.0f,0,2,4.0f,null,0.0f); if (generalPath == null) { generalPath = new GeneralPath(); } else { generalPath.reset(); } generalPath.moveTo(45.2f, 1.0f); generalPath.lineTo(72.2f, 27.7f); generalPath.lineTo(45.199997f, 27.7f); generalPath.lineTo(45.199997f, 1.0f); generalPath.closePath(); shape = generalPath; g.setPaint(paint); g.setStroke(stroke); g.draw(shape); g.setTransform(transformsStack.pop()); g.setTransform(transformsStack.pop()); g.setTransform(transformsStack.pop()); } @SuppressWarnings("unused") private void innerPaint(Graphics2D g) { float origAlpha = 1.0f; Composite origComposite = g.getComposite(); if (origComposite instanceof AlphaComposite) { AlphaComposite origAlphaComposite = (AlphaComposite)origComposite; if (origAlphaComposite.getRule() == AlphaComposite.SRC_OVER) { origAlpha = origAlphaComposite.getAlpha(); } } _paint0(g, origAlpha); shape = null; generalPath = null; paint = null; stroke = null; clip = null; transformsStack.clear(); } /** * Returns the X of the bounding box of the original SVG image. * * @return The X of the bounding box of the original SVG image. */ public static double getOrigX() { return 0.13199996948242188; } /** * Returns the Y of the bounding box of the original SVG image. * * @return The Y of the bounding box of the original SVG image. */ public static double getOrigY() { return 0.0; } /** * Returns the width of the bounding box of the original SVG image. * * @return The width of the bounding box of the original SVG image. */ public static double getOrigWidth() { return 0.7400000095367432; } /** * Returns the height of the bounding box of the original SVG image. * * @return The height of the bounding box of the original SVG image. */ public static double getOrigHeight() { return 1.0; } /** The current width of this resizable icon. */ private int width; /** The current height of this resizable icon. */ private int height; /** * Creates a new transcoded SVG image. This is marked as private to indicate that app * code should be using the {@link #of(int, int)} method to obtain a pre-configured instance. */ private ext_wmv() { this.width = (int) getOrigWidth(); this.height = (int) getOrigHeight(); } @Override public int getIconHeight() { return height; } @Override public int getIconWidth() { return width; } @Override public synchronized void setDimension(Dimension newDimension) { this.width = newDimension.width; this.height = newDimension.height; } @Override public synchronized void paintIcon(Component c, Graphics g, int x, int y) { Graphics2D g2d = (Graphics2D) g.create(); g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); g2d.translate(x, y); double coef1 = (double) this.width / getOrigWidth(); double coef2 = (double) this.height / getOrigHeight(); double coef = Math.min(coef1, coef2); g2d.clipRect(0, 0, this.width, this.height); g2d.scale(coef, coef); g2d.translate(-getOrigX(), -getOrigY()); if (coef1 != coef2) { if (coef1 < coef2) { int extraDy = (int) ((getOrigWidth() - getOrigHeight()) / 2.0); g2d.translate(0, extraDy); } else { int extraDx = (int) ((getOrigHeight() - getOrigWidth()) / 2.0); g2d.translate(extraDx, 0); } } Graphics2D g2ForInner = (Graphics2D) g2d.create(); innerPaint(g2ForInner); g2ForInner.dispose(); g2d.dispose(); } /** * Returns a new instance of this icon with specified dimensions. * * @param width Required width of the icon * @param height Required height of the icon * @return A new instance of this icon with specified dimensions. */ public static ResizableIcon of(int width, int height) { ext_wmv base = new ext_wmv(); base.width = width; base.height = height; return base; } /** * Returns a new {@link UIResource} instance of this icon with specified dimensions. * * @param width Required width of the icon * @param height Required height of the icon * @return A new {@link UIResource} instance of this icon with specified dimensions. */ public static ResizableIconUIResource uiResourceOf(int width, int height) { ext_wmv base = new ext_wmv(); base.width = width; base.height = height; return new ResizableIconUIResource(base); } /** * Returns a factory that returns instances of this icon on demand. * * @return Factory that returns instances of this icon on demand. */ public static Factory factory() { return ext_wmv::new; } }
package de.kittybot.backend.utils; import de.kittybot.backend.objects.data.LavalinkNode; import de.kittybot.backend.objects.exceptions.MissingConfigValuesException; import net.dv8tion.jda.api.utils.data.DataObject; import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.util.Base64; import java.util.HashSet; import java.util.Set; public class Config{ public static String BOT_TOKEN; public static long BOT_ID; public static String BOT_SECRET; public static Set<Long> DEV_IDS; public static Long TEST_GUILD; public static int BACKEND_PORT; public static String BACKEND_HOST; public static int PROMETHEUS_PORT; public static String HASTEBIN_URL; public static String ORIGIN_URL; public static String REDIRECT_URL; public static String LOG_WEBHOOK_URL; public static String SUPPORT_GUILD_INVITE_URL; public static String BOT_INVITE_URL; public static String TWITCH_CLIENT_ID; public static String TWITCH_CLIENT_SECRET; public static String SIGNING_KEY; public static String SPOTIFY_CLIENT_ID; public static String SPOTIFY_CLIENT_SECRET; public static String TOP_GG_TOKEN; public static String DISCORD_EXTREME_LIST_TOKEN; public static String DISCORD_BOATS_TOKEN; public static String DISCORD_BOTS_TOKEN; public static String BOTLIST_SPACE_TOKEN; public static String BOTS_FOR_DISCORD_TOKEN; public static String DB_HOST; public static String DB_PORT; public static String DB_DATABASE; public static String DB_USER; public static String DB_PASSWORD; public static Set<LavalinkNode> LAVALINK_NODES; private Config(){} public static void init(String path) throws IOException, MissingConfigValuesException{ File config = new File(path); if(!config.exists()){ throw new IOException("Config file not found"); } var json = DataObject.fromJson(Files.readAllBytes(config.toPath())); checkMandatoryValues(json, "bot_token", "dev_ids", "db_host", "db_port", "db_database", "db_user", "db_password", "signing_key", "backend_port", "origin_url", "redirect_url"); BOT_TOKEN = json.getString("bot_token", ""); if(BOT_TOKEN.isBlank()){ BOT_ID = -1; } else{ BOT_ID = getIdFromToken(); } BOT_SECRET = json.getString("bot_secret", ""); var ownerIds = json.optArray("dev_ids"); DEV_IDS = new HashSet<>(); if(ownerIds.isPresent()){ var val = ownerIds.get(); for(var i = 0; i < val.length(); i++){ DEV_IDS.add(val.getLong(i, -1)); } } TEST_GUILD = json.getLong("test_guild", -1); BACKEND_PORT = json.getInt("backend_port", -1); BACKEND_HOST = json.getString("backend_host", "0.0.0.0"); PROMETHEUS_PORT = json.getInt("prometheus_port", -1); REDIRECT_URL = json.getString("redirect_url", ""); ORIGIN_URL = json.getString("origin_url", ""); HASTEBIN_URL = json.getString("hastebin_url", ""); LOG_WEBHOOK_URL = json.getString("log_webhook_url", ""); SUPPORT_GUILD_INVITE_URL = json.getString("support_guild_invite_url", ""); BOT_INVITE_URL = json.getString("bot_invite_url", ""); TWITCH_CLIENT_ID = json.getString("twitch_client_id", ""); TWITCH_CLIENT_SECRET = json.getString("twitch_client_secret", ""); SIGNING_KEY = json.getString("signing_key", ""); SPOTIFY_CLIENT_ID = json.getString("spotify_client_id", ""); SPOTIFY_CLIENT_SECRET = json.getString("spotify_client_secret", ""); DISCORD_BOTS_TOKEN = json.getString("discord_bots_token", ""); TOP_GG_TOKEN = json.getString("top_gg_token", ""); DISCORD_EXTREME_LIST_TOKEN = json.getString("discord_extreme_list_token", ""); DISCORD_BOATS_TOKEN = json.getString("discord_boats_token", ""); BOTLIST_SPACE_TOKEN = json.getString("botlist_space_token", ""); BOTS_FOR_DISCORD_TOKEN = json.getString("bots_for_discord_token", ""); DB_HOST = json.getString("db_host", ""); DB_PORT = json.getString("db_port", ""); DB_DATABASE = json.getString("db_database", ""); DB_USER = json.getString("db_user", ""); DB_PASSWORD = json.getString("db_password", ""); var lavalinkNodes = json.optArray("lavalink_nodes"); LAVALINK_NODES = new HashSet<>(); if(lavalinkNodes.isPresent()){ var val = lavalinkNodes.get(); for(var i = 0; i < val.length(); i++){ LAVALINK_NODES.add(new LavalinkNode(val.getObject(i))); } } } private static void checkMandatoryValues(DataObject config, String... keys) throws MissingConfigValuesException{ var missingKeys = new HashSet<String>(); for(var key : keys){ if(!config.hasKey(key)){ missingKeys.add(key); } } if(!missingKeys.isEmpty()){ throw new MissingConfigValuesException(missingKeys); } } private static long getIdFromToken(){ return Long.parseLong( new String( Base64.getDecoder().decode( BOT_TOKEN.split("\\.")[0] ) ) ); } }
package com.longluo.android.ui.activity; import android.view.Gravity; import android.view.View; import com.bumptech.glide.Glide; import com.hjq.http.EasyHttp; import com.hjq.http.listener.HttpCallback; import com.longluo.android.R; import com.longluo.android.aop.SingleClick; import com.longluo.android.app.AppActivity; import com.longluo.android.http.api.LogoutApi; import com.longluo.android.http.model.HttpData; import com.longluo.android.manager.ActivityManager; import com.longluo.android.manager.CacheDataManager; import com.longluo.android.manager.ThreadPoolManager; import com.longluo.android.other.AppConfig; import com.longluo.android.ui.dialog.MenuDialog; import com.longluo.android.ui.dialog.SafeDialog; import com.longluo.android.ui.dialog.UpdateDialog; import io.github.longluo.base.BaseDialog; import io.github.longluo.ui.layout.SettingBar; import io.github.longluo.ui.view.SwitchButton; /** * 设置界面 */ public final class SettingActivity extends AppActivity implements SwitchButton.OnCheckedChangeListener { private SettingBar mLanguageView; private SettingBar mPhoneView; private SettingBar mPasswordView; private SettingBar mCleanCacheView; private SwitchButton mAutoSwitchView; @Override protected int getLayoutId() { return R.layout.setting_activity; } @Override protected void initView() { mLanguageView = findViewById(R.id.sb_setting_language); mPhoneView = findViewById(R.id.sb_setting_phone); mPasswordView = findViewById(R.id.sb_setting_password); mCleanCacheView = findViewById(R.id.sb_setting_cache); mAutoSwitchView = findViewById(R.id.sb_setting_switch); // 设置切换按钮的监听 mAutoSwitchView.setOnCheckedChangeListener(this); setOnClickListener(R.id.sb_setting_language, R.id.sb_setting_update, R.id.sb_setting_phone, R.id.sb_setting_password, R.id.sb_setting_agreement, R.id.sb_setting_about, R.id.sb_setting_cache, R.id.sb_setting_auto, R.id.sb_setting_exit); } @Override protected void initData() { // 获取应用缓存大小 mCleanCacheView.setRightText(CacheDataManager.getTotalCacheSize(this)); mLanguageView.setRightText("简体中文"); mPhoneView.setRightText("181****1413"); mPasswordView.setRightText("密码强度较低"); } @SingleClick @Override public void onClick(View view) { int viewId = view.getId(); if (viewId == R.id.sb_setting_language) { // 底部选择框 new MenuDialog.Builder(this) // 设置点击按钮后不关闭对话框 //.setAutoDismiss(false) .setList(R.string.setting_language_simple, R.string.setting_language_complex) .setListener((MenuDialog.OnListener<String>) (dialog, position, string) -> { mLanguageView.setRightText(string); BrowserActivity.start(getActivity(), "https://github.com/longluo/Android_Dev_Quick"); }) .setGravity(Gravity.BOTTOM) .setAnimStyle(BaseDialog.ANIM_BOTTOM) .show(); } else if (viewId == R.id.sb_setting_update) { // 本地的版本码和服务器的进行比较 if (20 > AppConfig.getVersionCode()) { new UpdateDialog.Builder(this) .setVersionName("2.0") .setForceUpdate(false) .setUpdateLog("修复Bug\n优化用户体验") .setDownloadUrl("https://down.qq.com/qqweb/QQ_1/android_apk/Android_8.5.0.5025_537066738.apk") .setFileMd5("560017dc94e8f9b65f4ca997c7feb326") .show(); } else { toast(R.string.update_no_update); } } else if (viewId == R.id.sb_setting_phone) { new SafeDialog.Builder(this) .setListener((dialog, phone, code) -> PhoneResetActivity.start(getActivity(), code)) .show(); } else if (viewId == R.id.sb_setting_password) { new SafeDialog.Builder(this) .setListener((dialog, phone, code) -> PasswordResetActivity.start(getActivity(), phone, code)) .show(); } else if (viewId == R.id.sb_setting_agreement) { BrowserActivity.start(this, "https://github.com/getActivity/Donate"); } else if (viewId == R.id.sb_setting_about) { startActivity(AboutActivity.class); } else if (viewId == R.id.sb_setting_auto) { // 自动登录 mAutoSwitchView.setChecked(!mAutoSwitchView.isChecked()); } else if (viewId == R.id.sb_setting_cache) { // 清除内存缓存(必须在主线程) Glide.get(getActivity()).clearMemory(); ThreadPoolManager.getInstance().execute(() -> { CacheDataManager.clearAllCache(this); // 清除本地缓存(必须在子线程) Glide.get(getActivity()).clearDiskCache(); post(() -> { // 重新获取应用缓存大小 mCleanCacheView.setRightText(CacheDataManager.getTotalCacheSize(getActivity())); }); }); } else if (viewId == R.id.sb_setting_exit) { if (true) { startActivity(LoginActivity.class); // 进行内存优化,销毁除登录页之外的所有界面 ActivityManager.getInstance().finishAllActivities(LoginActivity.class); return; } // 退出登录 EasyHttp.post(this) .api(new LogoutApi()) .request(new HttpCallback<HttpData<Void>>(this) { @Override public void onSucceed(HttpData<Void> data) { startActivity(LoginActivity.class); // 进行内存优化,销毁除登录页之外的所有界面 ActivityManager.getInstance().finishAllActivities(LoginActivity.class); } }); } } /** * {@link SwitchButton.OnCheckedChangeListener} */ @Override public void onCheckedChanged(SwitchButton button, boolean checked) { toast(checked); } }
package ual.dra.gamesapi; import org.springframework.web.filter.GenericFilterBean; import javax.servlet.*; import javax.servlet.http.HttpServletResponse; import java.io.IOException; /** * CORS Filter * * This filter is an implementation of W3C's CORS * (Cross-Origin Resource Sharing) specification, * which is a mechanism that enab */ public class CORSFilter extends GenericFilterBean implements Filter { @Override public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException { HttpServletResponse httpResponse = (HttpServletResponse) response; httpResponse.setHeader("Access-Control-Allow-Origin", "*"); httpResponse.setHeader("Access-Control-Allow-Methods", "*"); httpResponse.setHeader("Access-Control-Allow-Headers", "*"); httpResponse.setHeader("Access-Control-Allow-Credentials", "false"); httpResponse.setHeader("Access-Control-Max-Age", "3600"); System.out.println("********** CORS Configuration Completed **********"); chain.doFilter(request, response); } }
/* * Licensed to Apereo under one or more contributor license * agreements. See the NOTICE file distributed with this work * for additional information regarding copyright ownership. * Apereo licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file * except in compliance with the License. You may obtain a * copy of the License at the following location: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jasig.cas.util; import java.io.InputStream; import java.security.KeyFactory; import java.security.PrivateKey; import java.security.spec.PKCS8EncodedKeySpec; import org.springframework.beans.factory.config.AbstractFactoryBean; import org.springframework.core.io.Resource; import javax.validation.constraints.NotNull; /** * Factory Bean for creating a private key from a file. * * @author Scott Battaglia * @since 3.1 * */ public final class PrivateKeyFactoryBean extends AbstractFactoryBean<PrivateKey> { @NotNull private Resource location; @NotNull private String algorithm; @Override protected PrivateKey createInstance() throws Exception { try (InputStream privKey = this.location.getInputStream()) { final byte[] bytes = new byte[privKey.available()]; privKey.read(bytes); final PKCS8EncodedKeySpec privSpec = new PKCS8EncodedKeySpec(bytes); final KeyFactory factory = KeyFactory.getInstance(this.algorithm); return factory.generatePrivate(privSpec); } } public Class getObjectType() { return PrivateKey.class; } public void setLocation(final Resource location) { this.location = location; } public void setAlgorithm(final String algorithm) { this.algorithm = algorithm; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.atmosphere; import java.lang.reflect.Method; import org.apache.wicket.Component; import org.apache.wicket.WicketRuntimeException; import org.apache.wicket.behavior.Behavior; import com.google.common.base.Objects; import com.google.common.base.Predicate; import com.google.common.base.Predicates; /** * The subscription of a method on a component to certain events. This is used by {@link EventBus} * to track the subscriptions. * * @author papegaaij */ public class EventSubscription { private final String componentPath; private final Integer behaviorIndex; private final String methodName; private final Predicate<AtmosphereEvent> filter; private final Predicate<AtmosphereEvent> contextAwareFilter; /** * Construct. * * @param component * @param behavior * @param method */ public EventSubscription(Component component, Behavior behavior, Method method) { componentPath = component.getPageRelativePath(); behaviorIndex = behavior == null ? null : component.getBehaviorId(behavior); Class<?> eventType = method.getParameterTypes()[1]; Subscribe subscribe = method.getAnnotation(Subscribe.class); filter = Predicates.and(payloadOfType(eventType), createFilter(subscribe.filter())); contextAwareFilter = createFilter(subscribe.contextAwareFilter()); methodName = method.getName(); } /** * Construct. * * @param component * @param behavior * @param method * @param filter * @param contextAwareFilter */ public EventSubscription(Component component, Behavior behavior, Method method, Predicate<AtmosphereEvent> filter, Predicate<AtmosphereEvent> contextAwareFilter) { componentPath = component.getPageRelativePath(); behaviorIndex = behavior == null ? null : component.getBehaviorId(behavior); this.filter = filter == null ? new NoFilterPredicate() : filter; this.contextAwareFilter = contextAwareFilter == null ? new NoFilterPredicate() : contextAwareFilter; methodName = method.getName(); } private static Predicate<AtmosphereEvent> payloadOfType(final Class<?> type) { return new Predicate<AtmosphereEvent>() { @Override public boolean apply(AtmosphereEvent input) { return type.isInstance(input.getPayload()); } }; } private static Predicate<AtmosphereEvent> createFilter( Class<? extends Predicate<AtmosphereEvent>> filterClass) { try { return filterClass.newInstance(); } catch (InstantiationException e) { throw new WicketRuntimeException(e); } catch (IllegalAccessException e) { throw new WicketRuntimeException(e); } } /** * @return The path of the subscribed component */ public String getComponentPath() { return componentPath; } /** * @return The index of the subscribed behavior, or null if the subscription is for the * component itself */ public Integer getBehaviorIndex() { return behaviorIndex; } /** * @return The filter on incoming events, a combination of the type and the * {@link Subscribe#filter()} parameter. */ public Predicate<AtmosphereEvent> getFilter() { return filter; } /** * @return The context ware filter on incoming events, constructed from the * {@link Subscribe#contextAwareFilter()} parameter. */ public Predicate<AtmosphereEvent> getContextAwareFilter() { return contextAwareFilter; } /** * @return The method that is subscribed */ public String getMethodName() { return methodName; } @Override public int hashCode() { return Objects.hashCode(componentPath, behaviorIndex, methodName); } @Override public boolean equals(Object obj) { if (obj instanceof EventSubscription) { EventSubscription other = (EventSubscription)obj; return Objects.equal(componentPath, other.getComponentPath()) && Objects.equal(behaviorIndex, other.getBehaviorIndex()) && Objects.equal(methodName, other.getMethodName()); } return false; } }
// Copyright 2016 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.bazel.repository.skylark; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Optional; import com.google.common.base.Strings; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.FileValue; import com.google.devtools.build.lib.bazel.debug.WorkspaceRuleEvent; import com.google.devtools.build.lib.bazel.repository.DecompressorDescriptor; import com.google.devtools.build.lib.bazel.repository.DecompressorValue; import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache; import com.google.devtools.build.lib.bazel.repository.cache.RepositoryCache.KeyType; import com.google.devtools.build.lib.bazel.repository.downloader.HttpDownloader; import com.google.devtools.build.lib.bazel.repository.downloader.HttpUtils; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.events.ExtendedEventHandler.FetchProgress; import com.google.devtools.build.lib.events.Location; import com.google.devtools.build.lib.packages.Attribute; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.StructImpl; import com.google.devtools.build.lib.packages.StructProvider; import com.google.devtools.build.lib.rules.repository.RepositoryFunction; import com.google.devtools.build.lib.rules.repository.RepositoryFunction.RepositoryFunctionException; import com.google.devtools.build.lib.rules.repository.WorkspaceAttributeMapper; import com.google.devtools.build.lib.skylarkbuildapi.repository.SkylarkRepositoryContextApi; import com.google.devtools.build.lib.syntax.EvalException; import com.google.devtools.build.lib.syntax.EvalUtils; import com.google.devtools.build.lib.syntax.Runtime; import com.google.devtools.build.lib.syntax.SkylarkDict; import com.google.devtools.build.lib.syntax.SkylarkList; import com.google.devtools.build.lib.syntax.SkylarkType; import com.google.devtools.build.lib.util.OsUtils; import com.google.devtools.build.lib.util.StringUtilities; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import com.google.devtools.build.lib.vfs.RootedPath; import com.google.devtools.build.lib.vfs.Symlinks; import com.google.devtools.build.skyframe.SkyFunction.Environment; import com.google.devtools.build.skyframe.SkyFunctionException.Transience; import com.google.devtools.build.skyframe.SkyKey; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.List; import java.util.Map; /** Skylark API for the repository_rule's context. */ public class SkylarkRepositoryContext implements SkylarkRepositoryContextApi<RepositoryFunctionException> { private final Rule rule; private final Path outputDirectory; private final StructImpl attrObject; private final SkylarkOS osObject; private final Environment env; private final HttpDownloader httpDownloader; private final double timeoutScaling; private final Map<String, String> markerData; /** * Create a new context (repository_ctx) object for a skylark repository rule ({@code rule} * argument). */ SkylarkRepositoryContext( Rule rule, Path outputDirectory, Environment environment, Map<String, String> env, HttpDownloader httpDownloader, double timeoutScaling, Map<String, String> markerData) throws EvalException { this.rule = rule; this.outputDirectory = outputDirectory; this.env = environment; this.osObject = new SkylarkOS(env); this.httpDownloader = httpDownloader; this.timeoutScaling = timeoutScaling; this.markerData = markerData; WorkspaceAttributeMapper attrs = WorkspaceAttributeMapper.of(rule); ImmutableMap.Builder<String, Object> attrBuilder = new ImmutableMap.Builder<>(); for (String name : attrs.getAttributeNames()) { if (!name.equals("$local")) { Object val = attrs.getObject(name); attrBuilder.put( Attribute.getSkylarkName(name), val == null ? Runtime.NONE // Attribute values should be type safe : SkylarkType.convertToSkylark(val, (com.google.devtools.build.lib.syntax.Environment) null)); } } attrObject = StructProvider.STRUCT.create(attrBuilder.build(), "No such attribute '%s'"); } @Override public String getName() { return rule.getName(); } @Override public StructImpl getAttr() { return attrObject; } @Override public SkylarkPath path(Object path) throws EvalException, InterruptedException { return getPath("path()", path); } private SkylarkPath getPath(String method, Object path) throws EvalException, InterruptedException { if (path instanceof String) { PathFragment pathFragment = PathFragment.create(path.toString()); return new SkylarkPath(pathFragment.isAbsolute() ? outputDirectory.getFileSystem().getPath(path.toString()) : outputDirectory.getRelative(pathFragment)); } else if (path instanceof Label) { return getPathFromLabel((Label) path); } else if (path instanceof SkylarkPath) { return (SkylarkPath) path; } else { throw new EvalException(Location.BUILTIN, method + " can only take a string or a label."); } } @Override public void reportProgress(String status) { final String message = status == null ? "" : status; final String id = "@" + getName(); env.getListener() .post( new FetchProgress() { @Override public String getResourceIdentifier() { return id; } @Override public String getProgress() { return message; } @Override public boolean isFinished() { return false; } }); } @Override public void symlink(Object from, Object to, Location location) throws RepositoryFunctionException, EvalException, InterruptedException { SkylarkPath fromPath = getPath("symlink()", from); SkylarkPath toPath = getPath("symlink()", to); WorkspaceRuleEvent w = WorkspaceRuleEvent.newSymlinkEvent( fromPath.toString(), toPath.toString(), rule.getLabel().toString(), location); env.getListener().post(w); try { checkInOutputDirectory("write", toPath); makeDirectories(toPath.getPath()); toPath.getPath().createSymbolicLink(fromPath.getPath()); } catch (IOException e) { throw new RepositoryFunctionException( new IOException( "Could not create symlink from " + fromPath + " to " + toPath + ": " + e.getMessage(), e), Transience.TRANSIENT); } } private void checkInOutputDirectory(String operation, SkylarkPath path) throws RepositoryFunctionException { if (!path.getPath().getPathString().startsWith(outputDirectory.getPathString())) { throw new RepositoryFunctionException( new EvalException( Location.fromFile(path.getPath()), "Cannot " + operation + " outside of the repository directory for path " + path), Transience.PERSISTENT); } } @Override public void createFile( Object path, String content, Boolean executable, Boolean legacyUtf8, Location location) throws RepositoryFunctionException, EvalException, InterruptedException { SkylarkPath p = getPath("file()", path); byte[] contentBytes; if (legacyUtf8) { contentBytes = content.getBytes(StandardCharsets.UTF_8); } else { contentBytes = content.getBytes(StandardCharsets.ISO_8859_1); } WorkspaceRuleEvent w = WorkspaceRuleEvent.newFileEvent( p.toString(), content, executable, rule.getLabel().toString(), location); env.getListener().post(w); try { checkInOutputDirectory("write", p); makeDirectories(p.getPath()); p.getPath().delete(); try (OutputStream stream = p.getPath().getOutputStream()) { stream.write(contentBytes); } if (executable) { p.getPath().setExecutable(true); } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } } @Override public void createFileFromTemplate( Object path, Object template, SkylarkDict<String, String> substitutions, Boolean executable, Location location) throws RepositoryFunctionException, EvalException, InterruptedException { SkylarkPath p = getPath("template()", path); SkylarkPath t = getPath("template()", template); WorkspaceRuleEvent w = WorkspaceRuleEvent.newTemplateEvent( p.toString(), t.toString(), substitutions, executable, rule.getLabel().toString(), location); env.getListener().post(w); try { checkInOutputDirectory("write", p); makeDirectories(p.getPath()); String tpl = FileSystemUtils.readContent(t.getPath(), StandardCharsets.UTF_8); for (Map.Entry<String, String> substitution : substitutions.entrySet()) { tpl = StringUtilities.replaceAllLiteral(tpl, substitution.getKey(), substitution.getValue()); } p.getPath().delete(); try (OutputStream stream = p.getPath().getOutputStream()) { stream.write(tpl.getBytes(StandardCharsets.UTF_8)); } if (executable) { p.getPath().setExecutable(true); } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } } @Override public String readFile(Object path, Location location) throws RepositoryFunctionException, EvalException, InterruptedException { SkylarkPath p = getPath("read()", path); WorkspaceRuleEvent w = WorkspaceRuleEvent.newReadEvent(p.toString(), rule.getLabel().toString(), location); env.getListener().post(w); try { checkInOutputDirectory("read", p); return FileSystemUtils.readContent(p.getPath(), StandardCharsets.ISO_8859_1); } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } } // Create parent directories for the given path private void makeDirectories(Path path) throws IOException { Path parent = path.getParentDirectory(); if (parent != null) { parent.createDirectoryAndParents(); } } @Override public SkylarkOS getOS(Location location) { WorkspaceRuleEvent w = WorkspaceRuleEvent.newOsEvent(rule.getLabel().toString(), location); env.getListener().post(w); return osObject; } private void createDirectory(Path directory) throws RepositoryFunctionException { try { if (!directory.exists()) { makeDirectories(directory); directory.createDirectory(); } } catch (IOException e) { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } } @Override public SkylarkExecutionResult execute( SkylarkList<Object> arguments, Integer timeout, SkylarkDict<String, String> environment, boolean quiet, String workingDirectory, Location location) throws EvalException, RepositoryFunctionException, InterruptedException { WorkspaceRuleEvent w = WorkspaceRuleEvent.newExecuteEvent( arguments, timeout, osObject.getEnvironmentVariables(), environment, outputDirectory.getPathString(), quiet, rule.getLabel().toString(), location); env.getListener().post(w); createDirectory(outputDirectory); Path workingDirectoryPath = outputDirectory; if (workingDirectory != null && !workingDirectory.isEmpty()) { workingDirectoryPath = getPath("execute()", workingDirectory).getPath(); } createDirectory(workingDirectoryPath); return SkylarkExecutionResult.builder(osObject.getEnvironmentVariables()) .addArguments(arguments) .setDirectory(workingDirectoryPath.getPathFile()) .addEnvironmentVariables(environment) .setTimeout(Math.round(timeout.longValue() * 1000 * timeoutScaling)) .setQuiet(quiet) .execute(); } @Override public SkylarkPath which(String program, Location location) throws EvalException { WorkspaceRuleEvent w = WorkspaceRuleEvent.newWhichEvent(program, rule.getLabel().toString(), location); env.getListener().post(w); if (program.contains("/") || program.contains("\\")) { throw new EvalException( Location.BUILTIN, "Program argument of which() may not contains a / or a \\ ('" + program + "' given)"); } try { SkylarkPath commandPath = findCommandOnPath(program); if (commandPath != null) { return commandPath; } if (!program.endsWith(OsUtils.executableExtension())) { program += OsUtils.executableExtension(); return findCommandOnPath(program); } } catch (IOException e) { // IOException when checking executable file means we cannot read the file data so // we cannot execute it, swallow the exception. } return null; } private SkylarkPath findCommandOnPath(String program) throws IOException { for (String p : getPathEnvironment()) { PathFragment fragment = PathFragment.create(p); if (fragment.isAbsolute()) { // We ignore relative path as they don't mean much here (relative to where? the workspace // root?). Path path = outputDirectory.getFileSystem().getPath(fragment).getChild(program); if (path.exists() && path.isFile(Symlinks.FOLLOW) && path.isExecutable()) { return new SkylarkPath(path); } } } return null; } private void warnAboutSha256Error(List<URL> urls, String sha256) { // Inform the user immediately, even though the file will still be downloaded. // This cannot be done by a regular error event, as all regular events are recorded // and only shown once the execution of the repository rule is finished. // So we have to provide the information as update on the progress String url = "(unknown)"; if (urls.size() > 0) { url = urls.get(0).toString(); } reportProgress("Will fail after download of " + url + ". Invalid SHA256 '" + sha256 + "'"); } @Override public StructImpl download( Object url, Object output, String sha256, Boolean executable, Boolean allowFail, Location location) throws RepositoryFunctionException, EvalException, InterruptedException { List<URL> urls = getUrls(url, /* ensureNonEmpty= */ !allowFail); RepositoryFunctionException sha256Validation = validateSha256(sha256, location); if (sha256Validation != null) { warnAboutSha256Error(urls, sha256); sha256 = ""; } SkylarkPath outputPath = getPath("download()", output); WorkspaceRuleEvent w = WorkspaceRuleEvent.newDownloadEvent( urls, output.toString(), sha256, executable, rule.getLabel().toString(), location); env.getListener().post(w); Path downloadedPath; try { checkInOutputDirectory("write", outputPath); makeDirectories(outputPath.getPath()); downloadedPath = httpDownloader.download( urls, sha256, Optional.<String>absent(), outputPath.getPath(), env.getListener(), osObject.getEnvironmentVariables(), getName()); if (executable) { outputPath.getPath().setExecutable(true); } } catch (InterruptedException e) { throw new RepositoryFunctionException( new IOException("thread interrupted"), Transience.TRANSIENT); } catch (IOException e) { if (allowFail) { SkylarkDict<String, Object> dict = SkylarkDict.of(null, "success", false); return StructProvider.STRUCT.createStruct(dict, null); } else { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } } if (sha256Validation != null) { throw sha256Validation; } String finalSha256; try { finalSha256 = calculateSha256(sha256, downloadedPath); } catch (IOException e) { throw new RepositoryFunctionException( new IOException( "Couldn't hash downloaded file (" + downloadedPath.getPathString() + ")", e), Transience.PERSISTENT); } SkylarkDict<String, Object> dict = SkylarkDict.of(null, "sha256", finalSha256, "susccess", true); return StructProvider.STRUCT.createStruct(dict, null); } @Override public void extract(Object archive, Object output, String stripPrefix, Location location) throws RepositoryFunctionException, InterruptedException, EvalException { SkylarkPath archivePath = getPath("extract()", archive); if (!archivePath.exists()) { throw new RepositoryFunctionException( new EvalException( Location.fromFile(archivePath.getPath()), String.format("Archive path '%s' does not exist.", archivePath.toString())), Transience.TRANSIENT); } SkylarkPath outputPath = getPath("extract()", output); checkInOutputDirectory("write", outputPath); WorkspaceRuleEvent w = WorkspaceRuleEvent.newExtractEvent( archive.toString(), output.toString(), stripPrefix, rule.getLabel().toString(), location); env.getListener().post(w); DecompressorValue.decompress( DecompressorDescriptor.builder() .setTargetKind(rule.getTargetKind()) .setTargetName(rule.getName()) .setArchivePath(archivePath.getPath()) .setRepositoryPath(outputPath.getPath()) .setPrefix(stripPrefix) .build()); } @Override public StructImpl downloadAndExtract( Object url, Object output, String sha256, String type, String stripPrefix, Boolean allowFail, Location location) throws RepositoryFunctionException, InterruptedException, EvalException { List<URL> urls = getUrls(url, /* ensureNonEmpty= */ !allowFail); RepositoryFunctionException sha256Validation = validateSha256(sha256, location); if (sha256Validation != null) { warnAboutSha256Error(urls, sha256); sha256 = ""; } WorkspaceRuleEvent w = WorkspaceRuleEvent.newDownloadAndExtractEvent( urls, output.toString(), sha256, type, stripPrefix, rule.getLabel().toString(), location); // Download to outputDirectory and delete it after extraction SkylarkPath outputPath = getPath("download_and_extract()", output); checkInOutputDirectory("write", outputPath); createDirectory(outputPath.getPath()); Path downloadedPath; try { downloadedPath = httpDownloader.download( urls, sha256, Optional.of(type), outputPath.getPath(), env.getListener(), osObject.getEnvironmentVariables(), getName()); } catch (InterruptedException e) { env.getListener().post(w); throw new RepositoryFunctionException( new IOException("thread interrupted"), Transience.TRANSIENT); } catch (IOException e) { env.getListener().post(w); if (allowFail) { SkylarkDict<String, Object> dict = SkylarkDict.of(null, "success", false); return StructProvider.STRUCT.createStruct(dict, null); } else { throw new RepositoryFunctionException(e, Transience.TRANSIENT); } } if (sha256Validation != null) { throw sha256Validation; } env.getListener().post(w); DecompressorValue.decompress( DecompressorDescriptor.builder() .setTargetKind(rule.getTargetKind()) .setTargetName(rule.getName()) .setArchivePath(downloadedPath) .setRepositoryPath(outputPath.getPath()) .setPrefix(stripPrefix) .build()); String finalSha256 = null; try { finalSha256 = calculateSha256(sha256, downloadedPath); } catch (IOException e) { throw new RepositoryFunctionException( new IOException( "Couldn't hash downloaded file (" + downloadedPath.getPathString() + ")", e), Transience.PERSISTENT); } try { if (downloadedPath.exists()) { downloadedPath.delete(); } } catch (IOException e) { throw new RepositoryFunctionException( new IOException( "Couldn't delete temporary file (" + downloadedPath.getPathString() + ")", e), Transience.TRANSIENT); } SkylarkDict<String, Object> dict = SkylarkDict.of(null, "sha256", finalSha256, "success", true); return StructProvider.STRUCT.createStruct(dict, null); } private String calculateSha256(String originalSha, Path path) throws IOException { if (!Strings.isNullOrEmpty(originalSha)) { // The sha is checked on download, so if we got here, the user provided sha is good return originalSha; } return RepositoryCache.getChecksum(KeyType.SHA256, path); } private RepositoryFunctionException validateSha256(String sha256, Location loc) { if (!sha256.isEmpty() && !KeyType.SHA256.isValid(sha256)) { return new RepositoryFunctionException( new EvalException( loc, "Definition of repository " + rule.getName() + ": Syntactically invalid SHA256 checksum: '" + sha256 + "' at " + rule.getLocation()), Transience.PERSISTENT); } return null; } private static ImmutableList<String> checkAllUrls(Iterable<?> urlList) throws EvalException { ImmutableList.Builder<String> result = ImmutableList.builder(); for (Object o : urlList) { if (!(o instanceof String)) { throw new EvalException( null, String.format( "Expected a string or sequence of strings for 'url' argument, " + "but got '%s' item in the sequence", EvalUtils.getDataTypeName(o))); } result.add((String) o); } return result.build(); } private static List<URL> getUrls(Object urlOrList) throws RepositoryFunctionException, EvalException { return getUrls(urlOrList, /* ensureNonEmpty= */ true); } private static List<URL> getUrls(Object urlOrList, boolean ensureNonEmpty) throws RepositoryFunctionException, EvalException { List<String> urlStrings; if (urlOrList instanceof String) { urlStrings = ImmutableList.of((String) urlOrList); } else { urlStrings = checkAllUrls((Iterable<?>) urlOrList); } if (ensureNonEmpty && urlStrings.isEmpty()) { throw new RepositoryFunctionException(new IOException("urls not set"), Transience.PERSISTENT); } List<URL> urls = new ArrayList<>(); for (String urlString : urlStrings) { URL url; try { url = new URL(urlString); } catch (MalformedURLException e) { throw new RepositoryFunctionException( new IOException("Bad URL: " + urlString), Transience.PERSISTENT); } if (!HttpUtils.isUrlSupportedByDownloader(url)) { throw new RepositoryFunctionException( new IOException("Unsupported protocol: " + url.getProtocol()), Transience.PERSISTENT); } urls.add(url); } return urls; } // This is just for test to overwrite the path environment private static ImmutableList<String> pathEnv = null; @VisibleForTesting static void setPathEnvironment(String... pathEnv) { SkylarkRepositoryContext.pathEnv = ImmutableList.<String>copyOf(pathEnv); } private ImmutableList<String> getPathEnvironment() { if (pathEnv != null) { return pathEnv; } String pathEnviron = osObject.getEnvironmentVariables().get("PATH"); if (pathEnviron == null) { return ImmutableList.of(); } return ImmutableList.copyOf(pathEnviron.split(File.pathSeparator)); } @Override public String toString() { return "repository_ctx[" + rule.getLabel() + "]"; } // Resolve the label given by value into a file path. private SkylarkPath getPathFromLabel(Label label) throws EvalException, InterruptedException { RootedPath rootedPath = RepositoryFunction.getRootedPathFromLabel(label, env); SkyKey fileSkyKey = FileValue.key(rootedPath); FileValue fileValue = null; try { fileValue = (FileValue) env.getValueOrThrow(fileSkyKey, IOException.class); } catch (IOException e) { throw new EvalException(Location.BUILTIN, e); } if (fileValue == null) { throw RepositoryFunction.restart(); } if (!fileValue.isFile() || fileValue.isSpecialFile()) { throw new EvalException( Location.BUILTIN, "Not a regular file: " + rootedPath.asPath().getPathString()); } // A label does not contains space so it safe to use as a key. try { markerData.put("FILE:" + label, RepositoryFunction.fileValueToMarkerValue(fileValue)); } catch (IOException e) { throw new EvalException(Location.BUILTIN, e); } return new SkylarkPath(rootedPath.asPath()); } /** * Try to compute the paths of all attibutes that are labels, including labels in list arguments. * * <p>The value is ignored, but any missing information from the environment is detected (and an * exception thrown). In this way, we can enforce that all arguments are evaluated before we start * potentially more expensive operations. */ public void enforceLabelAttributes() throws EvalException, InterruptedException { StructImpl attr = getAttr(); for (String name : attr.getFieldNames()) { Object value = attr.getValue(name); if (value instanceof Label) { getPathFromLabel((Label) value); } if (value instanceof SkylarkList) { for (Object entry : (SkylarkList) value) { if (entry instanceof Label) { getPathFromLabel((Label) entry); } } } } } }
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.wb.selenium.model.persps; import org.jboss.arquillian.graphene.Graphene; import org.jboss.arquillian.graphene.page.Page; import org.kie.wb.selenium.model.PageObject; import org.kie.wb.selenium.model.PrimaryNavbar; import org.kie.wb.selenium.util.Waits; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; import org.openqa.selenium.support.FindBy; import static org.kie.wb.selenium.util.ObstructedClickExceptionHandler.retryClickUntilNotObstructed; public abstract class AbstractPerspective extends PageObject { @Page private PrimaryNavbar navbar; @FindBy(css = "input[type='submit']") private WebElement loginAgainButton; public PrimaryNavbar getNavbar() { return navbar; } public void logout() { navbar.logout(); // Click 'Login again' to get back to login page Graphene.waitModel().until().element(loginAgainButton).is().present(); loginAgainButton.click(); } public abstract boolean isDisplayed(); /** * Waiting for the perspective to be fully loaded. No-op by default. */ public void waitForLoaded() { } public void click(By locatorOfThingToClick) { WebElement thingToClick = Waits.elementPresent(locatorOfThingToClick); retryClickUntilNotObstructed(thingToClick); } }
package de.dlyt.yanndroid.oneui.sesl.recyclerview; import android.graphics.Canvas; import android.os.Build; import android.view.View; import androidx.core.view.ViewCompat; import de.dlyt.yanndroid.oneui.R; import de.dlyt.yanndroid.oneui.view.RecyclerView; class ItemTouchUIUtilImpl implements ItemTouchUIUtil { static final ItemTouchUIUtil INSTANCE = new ItemTouchUIUtilImpl(); @Override public void onDraw(Canvas c, RecyclerView recyclerView, View view, float dX, float dY, int actionState, boolean isCurrentlyActive) { if (Build.VERSION.SDK_INT >= 21) { if (isCurrentlyActive) { Object originalElevation = view.getTag(R.id.item_touch_helper_previous_elevation); if (originalElevation == null) { originalElevation = ViewCompat.getElevation(view); float newElevation = 1f + findMaxElevation(recyclerView, view); ViewCompat.setElevation(view, newElevation); view.setTag(R.id.item_touch_helper_previous_elevation, originalElevation); } } } view.setTranslationX(dX); view.setTranslationY(dY); } private static float findMaxElevation(RecyclerView recyclerView, View itemView) { final int childCount = recyclerView.getChildCount(); float max = 0; for (int i = 0; i < childCount; i++) { final View child = recyclerView.getChildAt(i); if (child == itemView) { continue; } final float elevation = ViewCompat.getElevation(child); if (elevation > max) { max = elevation; } } return max; } @Override public void onDrawOver(Canvas c, RecyclerView recyclerView, View view, float dX, float dY, int actionState, boolean isCurrentlyActive) { } @Override public void clearView(View view) { if (Build.VERSION.SDK_INT >= 21) { final Object tag = view.getTag(R.id.item_touch_helper_previous_elevation); if (tag instanceof Float) { ViewCompat.setElevation(view, (Float) tag); } view.setTag(R.id.item_touch_helper_previous_elevation, null); } view.setTranslationX(0f); view.setTranslationY(0f); } @Override public void onSelected(View view) { } }
package uk.gov.hmcts.reform.notificationservice; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import com.warrenstrange.googleauth.GoogleAuthenticator; import io.restassured.RestAssured; import io.restassured.response.Response; import org.springframework.http.HttpHeaders; import org.springframework.http.MediaType; import uk.gov.hmcts.reform.logging.appinsights.SyntheticHeaders; import java.io.IOException; import java.util.Map; import static org.assertj.core.api.Assertions.assertThat; import static uk.gov.hmcts.reform.notificationservice.Configuration.TEST_S2S_SECRET; import static uk.gov.hmcts.reform.notificationservice.Configuration.TEST_S2S_URL; import static uk.gov.hmcts.reform.notificationservice.Configuration.TEST_URL; final class RestAssuredHelper { private static final String SERVICE_AUTH_HEADER = "ServiceAuthorization"; private static final String SYNTHETIC_HEADER_VALUE = "Reform Scan Notification Service functional test"; private static final ObjectMapper MAPPER = new ObjectMapper(); private RestAssuredHelper() { // utility class construct } static String s2sSignIn(String microservice) { Map<String, Object> params = ImmutableMap.of( "microservice", microservice, "oneTimePassword", new GoogleAuthenticator().getTotpPassword(TEST_S2S_SECRET) ); Response response = RestAssured .given() .relaxedHTTPSValidation() .baseUri(TEST_S2S_URL) .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) .header(SyntheticHeaders.SYNTHETIC_TEST_SOURCE, SYNTHETIC_HEADER_VALUE) .body(params) .when() .post("/lease") .andReturn(); assertThat(response.getStatusCode()) .as("Signed into S2S successfully") .isEqualTo(200); return response .getBody() .asString(); } static JsonNode getNotification(String s2sToken, String zipFilename) { Response response = RestAssured .given() .relaxedHTTPSValidation() .baseUri(TEST_URL) .header(SERVICE_AUTH_HEADER, "Bearer " + s2sToken) .header(HttpHeaders.CONTENT_TYPE, MediaType.APPLICATION_JSON_VALUE) .header(SyntheticHeaders.SYNTHETIC_TEST_SOURCE, SYNTHETIC_HEADER_VALUE) .queryParam("file_name", zipFilename) .when() .get("/notifications") .andReturn(); assertThat(response.getStatusCode()) .as("Notifications received from service") .isEqualTo(200); try { return MAPPER.readTree(response.getBody().asByteArray()); } catch (IOException exception) { throw new RuntimeException( "Unable to read body from notification endpoint. Body: " + response.getBody().prettyPrint() ); } } }
package org.springnext.builder.generator; import java.util.List; import org.springnext.builder.entity.Class; import org.springnext.builder.entity.Column; import org.springnext.builder.entity.Field; import org.springnext.builder.entity.Method; import org.springnext.builder.entity.Parameter; import org.springnext.builder.entity.ProjectInfo; import org.springnext.builder.entity.Table; import org.springnext.builder.entity.Type; import org.springnext.builder.resolver.TypeResolver; /** * 简单Java对象(Plain Ordinary Java Object)生成器. * * @author HyDe */ public class POJOGenerator extends Generator { /** * 类的类型(类名) */ protected Type classType; /** * <strong>Description:</strong> * <pre> * 构造初始化实例. * </pre> * * @param table 表格对象 */ public POJOGenerator(Table table,ProjectInfo projectInfo) { // 调用父类构造函数 super(table,projectInfo); // 初始化类的类型(类名). this.classType = new Type(projectInfo.getPackageName()+"."+projectInfo.getModelName()+".entity." + getTableName()); } /** * <strong>Description:</strong> * <pre> * 生成代码. * </pre> * * @return {@code core.file.java.Class} - 生成的POJO类 */ @Override public Class generate() { // 新建一个类 Class class_ = new Class(); // 设置类的类型 class_.setType(classType); // 设置类的访问控制符 class_.setVisibility("public "); // 获取表中的所有列 List<Column> columns = table.getColumns(); // 遍历列集合 for (Column column : columns) { // 新建一个属性 Field field = new Field(); // 设置属性的访问控制符 field.setVisibility("private "); // 设置属性的类型 field.setType(new TypeResolver().resolve(column.getType())); // 设置属性名 field.setName(getColumnName(column)); // 为类添加属性 class_.addField(field); // 为类添加需要导入的类型 class_.addImport(field.getType()); // 生成Getter方法 generateGetterMethod(class_, field); // 生成Setter方法 generateSetterMethod(class_, field); } // 返回POJO类 return class_; } /** * <strong>Description:</strong> * <pre> * 根据属性生成Getter方法. * </pre> * * @param class_ 类 * @param field 属性 */ protected void generateGetterMethod(Class class_, Field field) { // 获取属性的类型 Type type = field.getType(); // 获取属性名 String property = field.getName(); // 新建一个方法 Method method = new Method(); // 设置访问控制符 method.setVisibility("public "); // 设置返回值类型 method.setType(type); // 设置方法名 method.setName(getGetterMethodName(property, type)); // 定义方法语句变量 StringBuilder statement = new StringBuilder(); // 添加返回关键字 statement.append("return "); // 添加属性名 statement.append(property); // 添加分号 statement.append(";"); // 为方法添加语句 method.addStatement(statement.toString()); // 为类添加方法 class_.addMethod(method); } /** * <strong>Description:</strong> * <pre> * 获取Getter方法的方法名. * </pre> * * @param property 属性 * @param type 属性的类型 * @return {@code java.lang.String} - 方法名 */ private String getGetterMethodName(String property, Type type) { // 新建方法名变量 StringBuilder methodName = new StringBuilder(); // 将属性名放入变量中 methodName.append(property); // 如果方法名中的第一个字符是小写 if (Character.isLowerCase(methodName.charAt(0))) { // 如果方法名只有一个字符或者第二个字符不是大写 if (methodName.length() == 1 || !Character.isUpperCase(methodName.charAt(1))) { // 将变量中的第一个字符变成大写 methodName.setCharAt(0, Character.toUpperCase(methodName.charAt(0))); } } // 如果属性的类型是布尔型 if (type.equals(new Type("boolean"))) { // 在方法名前加入is methodName.insert(0, "is"); } else {// 否则 // 在方法名前加入get methodName.insert(0, "get"); } // 返回方法名 return methodName.toString(); } /** * <strong>Description:</strong> * <pre> * 根据属性生成Setter方法. * </pre> * * @param class_ 类 * @param field 属性 */ protected void generateSetterMethod(Class class_, Field field) { // 获取属性的类型 Type type = field.getType(); // 获取属性名 String property = field.getName(); // 新建一个方法 Method method = new Method(); // 设置方法的访问控制符 method.setVisibility("public "); // 设置方法名 method.setName(getSetterMethodName(property)); // 为方法添加参数 method.addParameter(new Parameter(type, property)); // 定义方法语句变量 StringBuilder statement = new StringBuilder(); // 为语句变量添加内容 statement.append("this."); statement.append(property); statement.append(" = "); statement.append(property); statement.append(';'); // 为方法添加方法语句 method.addStatement(statement.toString()); // 为类添加方法 class_.addMethod(method); } /** * <strong>Description:</strong> * <pre> * 获取Setter方法的方法名. * </pre> * * @param property 属性名 * @return {@code java.lang.String} - 方法名 */ private static String getSetterMethodName(String property) { // 新建方法名变量 StringBuilder methodName = new StringBuilder(); // 将属性名放入变量中 methodName.append(property); // 如果方法名中的第一个字符是小写 if (Character.isLowerCase(methodName.charAt(0))) { // 如果方法名只有一个字符或者第二个字符不是大写 if (methodName.length() == 1 || !Character.isUpperCase(methodName.charAt(1))) { // 将变量中的第一个字符变成大写 methodName.setCharAt(0, Character.toUpperCase(methodName.charAt(0))); } } // 在方法名前加入set methodName.insert(0, "set"); // 返回方法名 return methodName.toString(); } }
package es.uniovi.miw.monitora.server.core.impl.informeConsulta; import es.uniovi.miw.monitora.server.core.impl.Command; import es.uniovi.miw.monitora.server.model.InformeConsulta; import es.uniovi.miw.monitora.server.model.exceptions.BusinessException; import es.uniovi.miw.monitora.server.persistence.util.Jpa; public class UpdateInformeConsulta implements Command { private InformeConsulta InformeConsulta; public UpdateInformeConsulta(InformeConsulta InformeConsulta) { this.InformeConsulta = InformeConsulta; } @Override public Object execute() throws BusinessException { Jpa.getManager().merge(InformeConsulta); return null; } }
package militaryElite.interfaces; import java.util.HashSet; public interface Commando extends Soldier { HashSet<Mission> getMissions(); }
package com.atguigu.gulimall.coupon.controller; import java.util.Arrays; import java.util.Map; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; import com.atguigu.gulimall.coupon.entity.SkuLadderEntity; import com.atguigu.gulimall.coupon.service.SkuLadderService; import com.atguigu.common.utils.PageUtils; import com.atguigu.common.utils.R; /** * 商品阶梯价格 * * @author ZD * @email sunlightcs@gmail.com * @date 2020-05-25 14:51:37 */ @RestController @RequestMapping("coupon/skuladder") public class SkuLadderController { @Autowired private SkuLadderService skuLadderService; /** * 列表 */ @RequestMapping("/list") //@RequiresPermissions("coupon:skuladder:list") public R list(@RequestParam Map<String, Object> params){ PageUtils page = skuLadderService.queryPage(params); return R.ok().put("page", page); } /** * 信息 */ @RequestMapping("/info/{id}") //@RequiresPermissions("coupon:skuladder:info") public R info(@PathVariable("id") Long id){ SkuLadderEntity skuLadder = skuLadderService.getById(id); return R.ok().put("skuLadder", skuLadder); } /** * 保存 */ @RequestMapping("/save") //@RequiresPermissions("coupon:skuladder:save") public R save(@RequestBody SkuLadderEntity skuLadder){ skuLadderService.save(skuLadder); return R.ok(); } /** * 修改 */ @RequestMapping("/update") //@RequiresPermissions("coupon:skuladder:update") public R update(@RequestBody SkuLadderEntity skuLadder){ skuLadderService.updateById(skuLadder); return R.ok(); } /** * 删除 */ @RequestMapping("/delete") //@RequiresPermissions("coupon:skuladder:delete") public R delete(@RequestBody Long[] ids){ skuLadderService.removeByIds(Arrays.asList(ids)); return R.ok(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.resources.domainobjects; import java.io.IOException; import java.io.InputStream; import java.text.DateFormat; import java.text.SimpleDateFormat; import java.util.List; import java.util.Map; import java.util.Map.Entry; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.ResponseBuilder; import com.google.common.base.Charsets; import com.google.common.collect.Lists; import com.google.common.io.ByteStreams; import org.codehaus.jackson.JsonParseException; import org.codehaus.jackson.map.JsonMappingException; import uk.co.objectconnexions.expressiveobjects.applib.annotation.ActionSemantics; import uk.co.objectconnexions.expressiveobjects.applib.annotation.Where; import uk.co.objectconnexions.expressiveobjects.core.commons.authentication.AuthenticationSession; import uk.co.objectconnexions.expressiveobjects.core.metamodel.adapter.ObjectAdapter; import uk.co.objectconnexions.expressiveobjects.core.metamodel.adapter.version.Version; import uk.co.objectconnexions.expressiveobjects.core.metamodel.consent.Consent; import uk.co.objectconnexions.expressiveobjects.core.metamodel.facets.object.value.ValueFacet; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.ObjectSpecification; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.feature.ObjectAction; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.feature.ObjectActionParameter; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.feature.ObjectAssociation; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.feature.ObjectAssociationFilters; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.feature.ObjectMember; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.feature.OneToManyAssociation; import uk.co.objectconnexions.expressiveobjects.core.metamodel.spec.feature.OneToOneAssociation; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.applib.JsonRepresentation; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.applib.RepresentationType; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.applib.RestfulResponse.HttpStatusCode; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.applib.util.JsonMapper; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.applib.util.UrlEncodingUtils; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.ResourceContext; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.RestfulObjectsApplicationException; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.representations.RendererFactory; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.representations.RendererFactoryRegistry; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.resources.ResourceAbstract; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.resources.ResourceAbstract.Caching; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.resources.domainobjects.JsonValueEncoder.ExpectedStringRepresentingValueException; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.util.OidUtils; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.util.UrlDecoderUtils; import uk.co.objectconnexions.expressiveobjects.viewer.restfulobjects.viewer.util.UrlParserUtils; public final class DomainResourceHelper { private static final DateFormat ETAG_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); private final ResourceContext resourceContext; private ObjectAdapterLinkTo adapterLinkTo; private final ObjectAdapter objectAdapter; public DomainResourceHelper(final ResourceContext resourceContext, final ObjectAdapter objectAdapter) { this.resourceContext = resourceContext; this.objectAdapter = objectAdapter; using(new DomainObjectLinkTo()); } public DomainResourceHelper using(final ObjectAdapterLinkTo linkTo) { adapterLinkTo = linkTo; adapterLinkTo.usingResourceContext(resourceContext).with(objectAdapter); return this; } // ////////////////////////////////////////////////////////////// // multiple properties (persist or multi-property update) // ////////////////////////////////////////////////////////////// static boolean copyOverProperties(final ResourceContext resourceContext, final ObjectAdapter objectAdapter, final JsonRepresentation propertiesList) { final ObjectSpecification objectSpec = objectAdapter.getSpecification(); final List<ObjectAssociation> properties = objectSpec.getAssociations(ObjectAssociationFilters.PROPERTIES); boolean allOk = true; for (final ObjectAssociation association : properties) { final OneToOneAssociation property = (OneToOneAssociation) association; final ObjectSpecification propertySpec = property.getSpecification(); final String id = property.getId(); final JsonRepresentation propertyRepr = propertiesList.getRepresentation("[id=%s]", id); if (propertyRepr == null) { if (property.isMandatory()) { throw new IllegalArgumentException(String.format("Mandatory field %s missing", property.getName())); } continue; } final JsonRepresentation valueRepr = propertyRepr.getRepresentation("value"); final Consent usable = property.isUsable(resourceContext.getAuthenticationSession() , objectAdapter, resourceContext.getWhere()); if (usable.isVetoed()) { propertyRepr.mapPut("invalidReason", usable.getReason()); allOk = false; continue; } final ObjectAdapter valueAdapter = objectAdapterFor(resourceContext, propertySpec, valueRepr); final Consent consent = property.isAssociationValid(objectAdapter, valueAdapter); if (consent.isAllowed()) { try { property.set(objectAdapter, valueAdapter); } catch (final IllegalArgumentException ex) { propertyRepr.mapPut("invalidReason", ex.getMessage()); allOk = false; } } else { propertyRepr.mapPut("invalidReason", consent.getReason()); allOk = false; } } return allOk; } // ////////////////////////////////////////////////////////////// // propertyDetails // ////////////////////////////////////////////////////////////// public Response objectRepresentation() { final RendererFactory rendererFactory = getRendererFactoryRegistry().find(RepresentationType.DOMAIN_OBJECT); final DomainObjectReprRenderer renderer = (DomainObjectReprRenderer) rendererFactory.newRenderer(resourceContext, null, JsonRepresentation.newMap()); renderer.with(objectAdapter).includesSelf(); final ResponseBuilder respBuilder = ResourceAbstract.responseOfOk(renderer, Caching.NONE); final Version version = objectAdapter.getVersion(); if (version != null && version.getTime() != null) { respBuilder.tag(ETAG_FORMAT.format(version.getTime())); } return respBuilder.build(); } // ////////////////////////////////////////////////////////////// // propertyDetails // ////////////////////////////////////////////////////////////// public enum MemberMode { NOT_MUTATING { @Override public void apply(final AbstractObjectMemberReprRenderer<?, ?> renderer) { renderer.asStandalone(); } }, MUTATING { @Override public void apply(final AbstractObjectMemberReprRenderer<?, ?> renderer) { renderer.asMutated(); } }; public abstract void apply(AbstractObjectMemberReprRenderer<?, ?> renderer); } Response propertyDetails(final ObjectAdapter objectAdapter, final String propertyId, final MemberMode memberMode, final Caching caching, Where where) { final OneToOneAssociation property = getPropertyThatIsVisibleAndUsable(propertyId, Intent.ACCESS, where); final RendererFactory factory = getRendererFactoryRegistry().find(RepresentationType.OBJECT_PROPERTY); final ObjectPropertyReprRenderer renderer = (ObjectPropertyReprRenderer) factory.newRenderer(resourceContext, null, JsonRepresentation.newMap()); renderer.with(new ObjectAndProperty(objectAdapter, property)).usingLinkTo(adapterLinkTo); memberMode.apply(renderer); return ResourceAbstract.responseOfOk(renderer, caching).build(); } // ////////////////////////////////////////////////////////////// // collectionDetails // ////////////////////////////////////////////////////////////// Response collectionDetails(final ObjectAdapter objectAdapter, final String collectionId, final MemberMode memberMode, final Caching caching, Where where) { final OneToManyAssociation collection = getCollectionThatIsVisibleAndUsable(collectionId, Intent.ACCESS, where); final RendererFactory factory = RendererFactoryRegistry.instance.find(RepresentationType.OBJECT_COLLECTION); final ObjectCollectionReprRenderer renderer = (ObjectCollectionReprRenderer) factory.newRenderer(resourceContext, null, JsonRepresentation.newMap()); renderer.with(new ObjectAndCollection(objectAdapter, collection)).usingLinkTo(adapterLinkTo); memberMode.apply(renderer); return ResourceAbstract.responseOfOk(renderer, caching).build(); } // ////////////////////////////////////////////////////////////// // action Prompt // ////////////////////////////////////////////////////////////// Response actionPrompt(final String actionId, Where where) { final ObjectAction action = getObjectActionThatIsVisibleAndUsable(actionId, Intent.ACCESS, where); final RendererFactory factory = getRendererFactoryRegistry().find(RepresentationType.OBJECT_ACTION); final ObjectActionReprRenderer renderer = (ObjectActionReprRenderer) factory.newRenderer(resourceContext, null, JsonRepresentation.newMap()); renderer.with(new ObjectAndAction(objectAdapter, action)).usingLinkTo(adapterLinkTo).asStandalone(); return ResourceAbstract.responseOfOk(renderer, Caching.NONE).build(); } // ////////////////////////////////////////////////////////////// // invoke action // ////////////////////////////////////////////////////////////// enum Intent { ACCESS, MUTATE; public boolean isMutate() { return this == MUTATE; } } Response invokeActionQueryOnly(final String actionId, final JsonRepresentation arguments, Where where) { final ObjectAction action = getObjectActionThatIsVisibleAndUsable(actionId, Intent.ACCESS, where); final ActionSemantics.Of actionSemantics = action.getSemantics(); if (actionSemantics != ActionSemantics.Of.SAFE) { throw RestfulObjectsApplicationException.create(HttpStatusCode.METHOD_NOT_ALLOWED, "Method not allowed; action '%s' is not query only", action.getId()); } return invokeActionUsingAdapters(action, arguments); } Response invokeActionIdempotent(final String actionId, final InputStream body, Where where) { final ObjectAction action = getObjectActionThatIsVisibleAndUsable(actionId, Intent.MUTATE, where); final ActionSemantics.Of actionSemantics = action.getSemantics(); if (!actionSemantics.isIdempotentInNature()) { throw RestfulObjectsApplicationException.create(HttpStatusCode.METHOD_NOT_ALLOWED, "Method not allowed; action '%s' is not idempotent", action.getId()); } final String bodyAsString = asStringUtf8(body); final JsonRepresentation arguments = readAsMap(bodyAsString); return invokeActionUsingAdapters(action, arguments); } Response invokeAction(final String actionId, final InputStream body, Where where) { final ObjectAction action = getObjectActionThatIsVisibleAndUsable(actionId, Intent.MUTATE, where); final String bodyAsString = asStringUtf8(body); final JsonRepresentation arguments = readAsMap(bodyAsString); return invokeActionUsingAdapters(action, arguments); } Response invokeActionUsingAdapters(final ObjectAction action, final JsonRepresentation arguments) { final List<ObjectAdapter> argAdapters = parseArguments(action, arguments); // validate individual args final List<ObjectActionParameter> parameters = action.getParameters(); for (int i = 0; i < parameters.size(); i++) { final ObjectActionParameter parameter = parameters.get(i); final ObjectAdapter argAdapter = argAdapters.get(i); if (argAdapter == null) { // can only happen if this is an optional parameter; nothing to // do continue; } if (argAdapter.getSpecification().containsFacet(ValueFacet.class)) { final Object arg = argAdapter.getObject(); final String reasonNotValid = parameter.isValid(objectAdapter, arg, null); if (reasonNotValid != null) { throw RestfulObjectsApplicationException.create(HttpStatusCode.NOT_ACCEPTABLE, reasonNotValid); } } } // validate all args final ObjectAdapter[] argArray = argAdapters.toArray(new ObjectAdapter[0]); final Consent consent = action.isProposedArgumentSetValid(objectAdapter, argArray); if (consent.isVetoed()) { throw RestfulObjectsApplicationException.create(HttpStatusCode.NOT_ACCEPTABLE, consent.getReason()); } // invoke final ObjectAdapter returnedAdapter = action.execute(objectAdapter, argArray); // response (void) final RendererFactory factory = getRendererFactoryRegistry().find(RepresentationType.ACTION_RESULT); final ActionResultReprRenderer renderer = (ActionResultReprRenderer) factory.newRenderer(resourceContext, null, JsonRepresentation.newMap()); renderer.with(new ObjectAndActionInvocation(objectAdapter, action, arguments, returnedAdapter)).using(adapterLinkTo); final ResponseBuilder respBuilder = ResourceAbstract.responseOfOk(renderer, Caching.NONE); final Version version = objectAdapter.getVersion(); ResourceAbstract.addLastModifiedAndETagIfAvailable(respBuilder, version); return respBuilder.build(); } /** * * @param resourceContext * @param objectSpec * - the {@link ObjectSpecification} to interpret the object as. * @param node * - expected to be either a String or a Map (ie from within a * List, built by parsing a JSON structure). */ private static ObjectAdapter objectAdapterFor(final ResourceContext resourceContext, final ObjectSpecification objectSpec, final JsonRepresentation representation) { if (representation == null) { return null; } // value (encodable) if (objectSpec.isEncodeable()) { return new JsonValueEncoder().asAdapter(objectSpec, representation); } // reference if (!representation.isLink()) { throw new ExpectedMapRepresentingLinkException(); } final JsonRepresentation argLink = representation.asLink(); final String oidFromHref = UrlParserUtils.oidFromLink(argLink); if (oidFromHref == null) { throw new ExpectedMapRepresentingLinkException(); } final ObjectAdapter objectAdapter = OidUtils.getObjectAdapter(resourceContext, oidFromHref); if (objectAdapter == null) { throw new UnknownOidException(oidFromHref); } return objectAdapter; } /** * Similar to * {@link #objectAdapterFor(ResourceContext, ObjectSpecification, Object)}, * however the object being interpreted is a String holding URL encoded JSON * (rather than having already been parsed into a Map representation). * * @throws IOException * @throws JsonMappingException * @throws JsonParseException */ ObjectAdapter objectAdapterFor(final ObjectSpecification spec, final String urlEncodedJson) throws JsonParseException, JsonMappingException, IOException { final String json = UrlDecoderUtils.urlDecode(urlEncodedJson); final JsonRepresentation representation = JsonMapper.instance().read(json); return objectAdapterFor(resourceContext, spec, representation); } private static class ExpectedMapRepresentingLinkException extends IllegalArgumentException { private static final long serialVersionUID = 1L; } private static class UnknownOidException extends IllegalArgumentException { private static final long serialVersionUID = 1L; public UnknownOidException(final String oid) { super(UrlDecoderUtils.urlDecode(oid)); } } // /////////////////////////////////////////////////////////////////// // get{MemberType}ThatIsVisibleAndUsable // /////////////////////////////////////////////////////////////////// protected OneToOneAssociation getPropertyThatIsVisibleAndUsable(final String propertyId, final Intent intent, Where where) { final ObjectAssociation association = objectAdapter.getSpecification().getAssociation(propertyId); if (association == null || !association.isOneToOneAssociation()) { throwNotFoundException(propertyId, MemberType.PROPERTY); } final OneToOneAssociation property = (OneToOneAssociation) association; return memberThatIsVisibleAndUsable(property, MemberType.PROPERTY, intent, where); } protected OneToManyAssociation getCollectionThatIsVisibleAndUsable(final String collectionId, final Intent intent, Where where) { final ObjectAssociation association = objectAdapter.getSpecification().getAssociation(collectionId); if (association == null || !association.isOneToManyAssociation()) { throwNotFoundException(collectionId, MemberType.COLLECTION); } final OneToManyAssociation collection = (OneToManyAssociation) association; return memberThatIsVisibleAndUsable(collection, MemberType.COLLECTION, intent, where); } protected ObjectAction getObjectActionThatIsVisibleAndUsable(final String actionId, final Intent intent, Where where) { final ObjectAction action = objectAdapter.getSpecification().getObjectAction(actionId); if (action == null) { throwNotFoundException(actionId, MemberType.ACTION); } return memberThatIsVisibleAndUsable(action, MemberType.ACTION, intent, where); } protected <T extends ObjectMember> T memberThatIsVisibleAndUsable(final T objectMember, final MemberType memberType, final Intent intent, Where where) { final String memberId = objectMember.getId(); final AuthenticationSession authenticationSession = resourceContext.getAuthenticationSession(); if (objectMember.isVisible(authenticationSession, objectAdapter, where).isVetoed()) { throwNotFoundException(memberId, memberType); } if (intent.isMutate()) { final Consent usable = objectMember.isUsable(authenticationSession, objectAdapter, where); if (usable.isVetoed()) { final String memberTypeStr = memberType.name().toLowerCase(); throw RestfulObjectsApplicationException.create(HttpStatusCode.NOT_ACCEPTABLE, "%s is not usable: '%s' (%s)", memberTypeStr, memberId, usable.getReason()); } } return objectMember; } protected static void throwNotFoundException(final String memberId, final MemberType memberType) { final String memberTypeStr = memberType.name().toLowerCase(); throw RestfulObjectsApplicationException.create(HttpStatusCode.NOT_FOUND, "%s '%s' either does not exist or is not visible", memberTypeStr, memberId); } // /////////////////////////////////////////////////////////////////// // parseBody // /////////////////////////////////////////////////////////////////// /** * * @param objectSpec * @param bodyAsString * - as per {@link #asStringUtf8(InputStream)} * @return */ ObjectAdapter parseAsMapWithSingleValue(final ObjectSpecification objectSpec, final String bodyAsString) { final JsonRepresentation arguments = readAsMap(bodyAsString); return parseAsMapWithSingleValue(objectSpec, arguments); } ObjectAdapter parseAsMapWithSingleValue(final ObjectSpecification objectSpec, final JsonRepresentation arguments) { final JsonRepresentation representation = arguments.getRepresentation("value"); if (arguments.size() != 1 || representation == null) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, "Body should be a map with a single key 'value' whose value represents an instance of type '%s'", resourceFor(objectSpec)); } return objectAdapterFor(resourceContext, objectSpec, representation); } private List<ObjectAdapter> parseArguments(final ObjectAction action, final JsonRepresentation arguments) { return parseArguments(resourceContext, action, arguments); } public static List<ObjectAdapter> parseArguments(final ResourceContext resourceContext, final ObjectAction action, final JsonRepresentation arguments) { final List<JsonRepresentation> argList = argListFor(action, arguments); final List<ObjectAdapter> argAdapters = Lists.newArrayList(); final List<ObjectActionParameter> parameters = action.getParameters(); for (int i = 0; i < argList.size(); i++) { final String paramName = parameters.get(i).getName(); final JsonRepresentation arg = argList.get(i); final ObjectSpecification paramSpec = parameters.get(i).getSpecification(); try { final ObjectAdapter objectAdapter = objectAdapterFor(resourceContext, paramSpec, arg); argAdapters.add(objectAdapter); } catch (final ExpectedStringRepresentingValueException e) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, "Action '%s', argument %s should be a URL encoded string representing a value of type %s", action.getId(), paramName, resourceFor(paramSpec)); } catch (final ExpectedMapRepresentingLinkException e) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, "Action '%s', argument %s should be a map representing a link to reference of type %s", action.getId(), paramName, resourceFor(paramSpec)); } } return argAdapters; } private static List<JsonRepresentation> argListFor(final ObjectAction action, final JsonRepresentation arguments) { final List<JsonRepresentation> argList = Lists.newArrayList(); // ensure that we have no arguments that are not parameters for (final Entry<String, JsonRepresentation> arg : arguments.mapIterable()) { final String argName = arg.getKey(); if (action.getParameterById(argName) == null) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, "Action '%s' does not have a parameter %s but an argument of that name was provided", action.getId(), argName); } } // ensure that an argument value has been provided for all non-optional // parameters final List<ObjectActionParameter> parameters = action.getParameters(); for (final ObjectActionParameter param : parameters) { final String paramId = param.getId(); final JsonRepresentation argRepr = arguments.getRepresentation(paramId); if (argRepr == null && !param.isOptional()) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, "Action '%s', no argument found for (mandatory) parameter '%s'", action.getId(), paramId); } argList.add(argRepr); } return argList; } public static JsonRepresentation readParameterMapAsMap(final Map<String, String[]> parameterMap) { final JsonRepresentation map = JsonRepresentation.newMap(); for (final Map.Entry<String, String[]> parameter : parameterMap.entrySet()) { map.mapPut(parameter.getKey(), parameter.getValue()[0]); } return map; } public static JsonRepresentation readQueryStringAsMap(final String queryString) { if (queryString == null) { return JsonRepresentation.newMap(); } final String queryStringTrimmed = queryString.trim(); if (queryStringTrimmed.isEmpty()) { return JsonRepresentation.newMap(); } final String queryStringUrlDecoded = UrlEncodingUtils.urlDecode(queryStringTrimmed); if (queryStringUrlDecoded.isEmpty()) { return JsonRepresentation.newMap(); } return read(queryStringUrlDecoded, "query string"); } public static JsonRepresentation readAsMap(final String body) { if (body == null) { return JsonRepresentation.newMap(); } final String bodyTrimmed = body.trim(); if (bodyTrimmed.isEmpty()) { return JsonRepresentation.newMap(); } return read(bodyTrimmed, "body"); } private static JsonRepresentation read(final String args, final String argsNature) { try { final JsonRepresentation jsonRepr = JsonMapper.instance().read(args); if (!jsonRepr.isMap()) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, "could not read %s as a JSON map", argsNature); } return jsonRepr; } catch (final JsonParseException e) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, e, "could not parse %s", argsNature); } catch (final JsonMappingException e) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, e, "could not read %s as JSON", argsNature); } catch (final IOException e) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, e, "could not parse %s", argsNature); } } public static String asStringUtf8(final InputStream body) { try { final byte[] byteArray = ByteStreams.toByteArray(body); return new String(byteArray, Charsets.UTF_8); } catch (final IOException e) { throw RestfulObjectsApplicationException.create(HttpStatusCode.BAD_REQUEST, e, "could not read body"); } } // ////////////////////////////////////////////////////////////// // misc // ////////////////////////////////////////////////////////////// private static String resourceFor(final ObjectSpecification objectSpec) { // TODO: should return a string in the form // http://localhost:8080/types/xxx return objectSpec.getFullIdentifier(); } // ////////////////////////////////////////////////////////////// // dependencies // ////////////////////////////////////////////////////////////// protected RendererFactoryRegistry getRendererFactoryRegistry() { // TODO: yuck return RendererFactoryRegistry.instance; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.vector.ptf; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hive.ql.exec.vector.LongColumnVector; import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.ColumnVector.Type; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; /** * This class evaluates rank() for a PTF group. * * Rank starts at 1; the same rank is streamed to the output column as repeated; after the last * group row, the rank is increased by the number of group rows. */ public class VectorPTFEvaluatorRank extends VectorPTFEvaluatorBase { private static final long serialVersionUID = 1L; private static final String CLASS_NAME = VectorPTFEvaluatorRank.class.getName(); private static final Log LOG = LogFactory.getLog(CLASS_NAME); private int rank; private int groupCount; public VectorPTFEvaluatorRank(WindowFrameDef windowFrameDef, VectorExpression inputVecExpr, int outputColumnNum) { super(windowFrameDef, inputVecExpr, outputColumnNum); resetEvaluator(); } public void evaluateGroupBatch(VectorizedRowBatch batch, boolean isLastGroupBatch) { evaluateInputExpr(batch); LongColumnVector longColVector = (LongColumnVector) batch.cols[outputColumnNum]; longColVector.isRepeating = true; longColVector.noNulls = true; longColVector.isNull[0] = false; longColVector.vector[0] = rank; groupCount += batch.size; if (isLastGroupBatch) { rank += groupCount; groupCount = 0; } } public boolean streamsResult() { // No group value. return true; } @Override public Type getResultColumnVectorType() { return Type.LONG; } @Override public void resetEvaluator() { rank = 1; groupCount = 0; } }