gt
stringclasses
1 value
context
stringlengths
2.05k
161k
package fortunedog.mail.reflow; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.util.LinkedList; import java.util.ResourceBundle; import java.util.Vector; import javax.naming.Context; import javax.naming.InitialContext; import javax.naming.NamingException; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpSession; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import fortunedog.mail.proxy.MailClient; import fortunedog.mail.proxy.servlet.SetCharacterEncodingFilter; import fortunedog.util.Utils; public class RarZipPager extends ContentPager { public static String winrarPath; static Logger log = LoggerFactory.getLogger(RarZipPager.class); static{ Context env; try { env = (Context) new InitialContext().lookup("java:comp/env"); winrarPath = (String) env.lookup("winrarPath"); } catch (NamingException e) { e.printStackTrace(); winrarPath="winrar"; } } class InternalHierFile { private String _filePath; private String _fileName; private String _sizeStr=""; private boolean _dirFlag = false;//directory flag private ContentPager _pager = null; private int _idxInParent=-1; private InternalHierFile _parent = null; //used for embedded compressed file, after extracted, new hierarchical file list will be generated. private InternalHierFile _linkedFile = null; public Vector<InternalHierFile> _children=new Vector<InternalHierFile>(); public InternalHierFile(String filePath,String fileName,InternalHierFile parent, int idx,boolean dirFlag) { _fileName = fileName; _filePath = filePath; _parent = parent; _idxInParent = idx; _dirFlag = dirFlag; } public String getFileSizeStr() { if(_dirFlag) return _sizeStr; if(!_sizeStr.equals("")) return _sizeStr; File file = new File(_filePath); try { FileInputStream fis = new FileInputStream(file); _sizeStr = getHumanReadableSize(fis.available()); fis.close(); } catch (Exception e) { e.printStackTrace(); _sizeStr = ""; } return _sizeStr; } public ContentPager getPager(HttpSession session) { if(_pager==null) { if(_dirFlag)//cd into folder. { HtmlPager htPager = new HtmlPager(); StringBuffer strBuf= new StringBuffer(); fillHeader(strBuf, Utils.getClientLocale(session).getLanguage()); htPager.initWithoutFilter("<p>"+strBuf.toString()+"</p><table border=\"1\" cellpadding=\"2\" cellspacing=\"0\">"+getChildrenList(Utils.getClientLocale(session).getLanguage())+"</table>"); _pager = (ContentPager)htPager; } else {// preview try { _pager = PagerFactory.getPager(_fileName, session); if(_pager instanceof RarZipPager) { RarZipPager tmpPager = (RarZipPager)_pager; tmpPager.setUid(_uid); tmpPager.setFolderName(_folderName); tmpPager.setAttachIdx(_attachIdx); tmpPager._fileBeforeExtract = this; } _pager.init(new File(_filePath)); } catch (Exception e) { log.warn("create zip pager:", e); } assert (_pager != null); } } return _pager; } private String getChildrenList(String clientLang) { if(_children.isEmpty()) return null; StringBuffer strBuf = new StringBuffer(); ResourceBundle rb = Utils.getResourceBundle(SetCharacterEncodingFilter.getCurrentRequest()); for(int i = 0; i < _children.size(); i++) { strBuf.append("<tr bgColor=\"#cccccc\"><td>"); InternalHierFile child = _children.get(i); if(child._dirFlag) { strBuf.append(child.getFileItem(clientLang)+"</td></tr>"); continue; } else { strBuf.append(child._fileName); } String internalPath = child.getInternalPath(); strBuf.append("</td><td>"+child.getFileSizeStr()+"</td><td>"); strBuf.append("<button style=\"height:35;width:85\" "); strBuf.append("onClick=\"window.cmail.downloadAttachment"); strBuf.append("("+getAttachIdx()+",'"+child._fileName+"','"+internalPath+"')\">"+rb.getString("download")+"</button>"); if(PagerFactory.canPreview(child._fileName)) {//add preview strBuf.append("<button style=\"height:35;width:85\" "); strBuf.append("onClick=\"window.cmail.openAttachment"); strBuf.append("("+getAttachIdx()+",'"+child._fileName+"','"+internalPath+"')\">"+rb.getString("preview")+"</button>"); } strBuf.append("</td></tr>"); } return strBuf.toString(); } public String getFileItem(String linkStr, String clientLang) { StringBuffer strBuf=new StringBuffer(); strBuf.append("<a href=\""); strBuf.append(getSharedUrl(clientLang)); String internalPath = getInternalPath(); if(internalPath!=null && !internalPath.equals("")) strBuf.append("&internalPath="+Utils.encodeUrlParam(internalPath)); strBuf.append("\">"+linkStr+"</a>"); return strBuf.toString(); } public String getFileItem(String clientLang) { return getFileItem(_fileName, clientLang); } //return the hierarchical path with index. public String getInternalPath() { if(_parent == null || _idxInParent < 0) return ""; String parentPath = _parent.getInternalPath(); if(parentPath==null || parentPath.equals("")) return ""+_idxInParent; else return parentPath+"."+_idxInParent; } private String getSharedUrl(String clientLang) { return "/MailProxy2/ViewPart?uid=" + Utils.encodeUrlParam(getUid()) +"&folderName="+ Utils.encodeUrlParam(getFolderName()) +"&index=" + getAttachIdx()+"&pageNo=0" +"&lang="+clientLang; } //show the file path in rar package. public void fillHeader(StringBuffer strBuf, String clientLang) { if(_parent != null) { _parent.fillHeader(strBuf, clientLang); strBuf.append("/ "); } strBuf.append(getFileItem(clientLang)); } } private File tempDownLoadFile; private InternalHierFile _rootFile=null; //this variable for embedded compressed file. //current compressed file's corresponding InternalFile before extracted, it should be passed to root file after extracted. private InternalHierFile _fileBeforeExtract=null; private String _uid=null; private String _folderName = null; private int _attachIdx=-1; //current internal file's path to preview. private String _curInternalPath=null; public void setUid(String uid) { _uid = uid; } public String getUid() { return _uid; } public void setAttachIdx(int attachIdx) { _attachIdx = attachIdx; } public int getAttachIdx() { return _attachIdx; } public String getCurInternalPath() { return _curInternalPath; } public void setCurInternalPath(String internalPath) { _curInternalPath = internalPath; } @Override public int getPageCount() { return getPager(getInternalFileIndexArray()).getPageCount(); } private int[] getInternalFileIndexArray() { int idxArray[] = null; if(_curInternalPath==null || _curInternalPath.equals("")) { idxArray = new int[0]; } else { String idxStrArray[] = _curInternalPath.split("\\."); idxArray = new int[idxStrArray.length]; for(int i=0; i < idxArray.length; i++) idxArray[i] = Integer.valueOf(idxStrArray[i]).intValue(); } return idxArray; } @Override public String renderPage(int pageNo) { return getPager(getInternalFileIndexArray()).renderPage(pageNo); } @Override public void init(InputStream is, String charset) throws IOException { try { tempDownLoadFile = File.createTempFile("mail", "." + getProperty(ContentPager.PROP_FULL_FILE_EXT, "rar") ); tempDownLoadFile.deleteOnExit(); FileOutputStream os = new FileOutputStream(tempDownLoadFile); byte[] buffer = new byte[1024*1024]; int len; while( (len = is.read(buffer)) >= 0) { os.write(buffer, 0, len); } os.close(); doExtract(); } catch (IOException e) { e.printStackTrace(); } } private void doExtract() { //extract the zip/rar file with winrar String filePath = tempDownLoadFile.getAbsolutePath(); String targetDir = filePath.substring(0, filePath.lastIndexOf("."))+"\\"; String[] cmds = { winrarPath, "x", "-ibck", "-inul","-pnull","-y", filePath, targetDir}; try { Process p = Runtime.getRuntime().exec(cmds); int result = p.waitFor(); System.out.println("The exit code:"+result); if(result != 0) return; String orgFileName= getProperty(ContentPager.PROP_FILE_NAME, ""); if(_fileBeforeExtract == null) _rootFile = new InternalHierFile(targetDir,orgFileName,null,-1,true); else //for embedded compressed file. { _rootFile = new InternalHierFile(targetDir,orgFileName,_fileBeforeExtract._parent,_fileBeforeExtract._idxInParent,true); _fileBeforeExtract._linkedFile = _rootFile; } //print extracted files. LinkedList<File> list = new LinkedList<File>(); LinkedList<InternalHierFile> interDirList = new LinkedList<InternalHierFile>(); File dir = new File(targetDir); File file[] = dir.listFiles(); for (int i = 0; i < file.length; i++) { boolean dirFlag = file[i].isDirectory(); InternalHierFile tmpHFile = new InternalHierFile(file[i].getAbsolutePath(), file[i].getName(), _rootFile,i,dirFlag); _rootFile._children.add(tmpHFile); if (dirFlag) { list.add(file[i]); interDirList.add(tmpHFile); } else System.out.println(file[i].getAbsolutePath()); } File dirFile; InternalHierFile dirHFile; while (!list.isEmpty() && !interDirList.isEmpty()) { dirFile = (File)list.removeFirst(); dirHFile = interDirList.removeFirst(); file = dirFile.listFiles(); if (file == null) continue; for (int i = 0; i < file.length; i++) { boolean dirFlag = file[i].isDirectory(); InternalHierFile tmpHFile = new InternalHierFile( file[i].getAbsolutePath(), file[i].getName(), dirHFile, i, dirFlag); dirHFile._children.add(tmpHFile); if (dirFlag) { list.add(file[i]); interDirList.add(tmpHFile); } else System.out.println(file[i].getAbsolutePath()); } } } catch (Exception ex) { ex.printStackTrace(); } } private InternalHierFile getInternalFile(int fileIdxs[]) { InternalHierFile curFile = _rootFile; for(int i = 0; i < fileIdxs.length; i++) { if(curFile._children.isEmpty() && curFile._linkedFile != null) curFile = curFile._linkedFile; assert(fileIdxs[i] < curFile._children.size()); curFile = curFile._children.get(fileIdxs[i]); } return curFile; } public ContentPager getPager(int fileIdxs[]) { return getInternalFile(fileIdxs).getPager(getSession()); } protected void finalize() throws Throwable { if(_rootFile!=null) org.apache.commons.io.FileUtils.deleteQuietly(new File(_rootFile._filePath)); tempDownLoadFile.delete(); super.finalize(); } public void visit(PageVisitor visitor) { visitor.visitPager(this); } public String getFilePath(String internalPath) { String idxStrArray[] = internalPath.split("\\."); int idxArray[] = new int[idxStrArray.length]; for(int i=0; i < idxArray.length; i++) idxArray[i] = Integer.valueOf(idxStrArray[i]).intValue(); return getInternalFile(idxArray)._filePath; } public void setNavBar(String pageNavBar) { getPager(getInternalFileIndexArray()).setNavBar(pageNavBar); } /** * @param _folderName * */ public void setFolderName(String folderName) { _folderName = folderName; } public String getFolderName() { return _folderName; } }
package com.github.vatbub.common.internet; /*- * #%L * FOKProjects Common * %% * Copyright (C) 2016 Frederik Kammel * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ import com.github.vatbub.common.core.Common; import com.github.vatbub.common.core.logging.FOKLogger; import org.apache.commons.lang.SystemUtils; import org.apache.commons.lang.exception.ExceptionUtils; import javax.mail.Message; import javax.mail.MessagingException; import javax.mail.Session; import javax.mail.Transport; import javax.mail.internet.InternetAddress; import javax.mail.internet.MimeMessage; import java.io.*; import java.net.HttpURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.nio.charset.StandardCharsets; import java.util.Properties; import java.util.logging.Level; /** * All custom common.internet functions * * @author Frederik Kammel */ @SuppressWarnings("SameParameterValue") public class Internet { private Internet() { throw new IllegalStateException("Class may not be instantiated"); } /** * Sends an event to the IFTTT Maker Channel. See * <a href="https://maker.ifttt.com/use/">https://maker.ifttt.com/use/</a> * for more information. * * @param IFTTTMakerChannelApiKey Your Maker API Key. Get your one on * <a href="https://ifttt.com/maker">https://ifttt.com/maker</a> * @param eventName The name of the event to trigger. * @return The response text from IFTTT * @throws IOException Should actually never be thrown but occurs if something is * wrong with the connection (e. g. not connected) */ public static String sendEventToIFTTTMakerChannel(String IFTTTMakerChannelApiKey, String eventName) throws IOException { return sendEventToIFTTTMakerChannel(IFTTTMakerChannelApiKey, eventName, ""); } /** * Sends an event to the IFTTT Maker Channel. See * <a href="https://maker.ifttt.com/use/">https://maker.ifttt.com/use/</a> * for more information. * * @param IFTTTMakerChannelApiKey Your Maker API Key. Get your one on * <a href="https://ifttt.com/maker">https://ifttt.com/maker</a> * @param eventName The name of the event to trigger. * @param details1 You can send up to three additional fields to the Maker * channel which you can use then as IFTTT ingredients. See * <a href= * "https://maker.ifttt.com/use/">https://maker.ifttt.com/use/</a> * for more information. * @return The response text from IFTTT * @throws IOException Should actually never be thrown but occurs if something is * wrong with the connection (e. g. not connected) */ public static String sendEventToIFTTTMakerChannel(String IFTTTMakerChannelApiKey, String eventName, String details1) throws IOException { return sendEventToIFTTTMakerChannel(IFTTTMakerChannelApiKey, eventName, details1, ""); } /** * Sends an event to the IFTTT Maker Channel. See * <a href="https://maker.ifttt.com/use/">https://maker.ifttt.com/use/</a> * for more information. * * @param IFTTTMakerChannelApiKey Your Maker API Key. Get your one on * <a href="https://ifttt.com/maker">https://ifttt.com/maker</a> * @param eventName The name of the event to trigger. * @param details1 You can send up to three additional fields to the Maker * channel which you can use then as IFTTT ingredients. See * <a href= * "https://maker.ifttt.com/use/">https://maker.ifttt.com/use/</a> * for more information. * @param details2 The second additional parameter. * @return The response text from IFTTT * @throws IOException Should actually never be thrown but occurs if something is * wrong with the connection (e. g. not connected) */ public static String sendEventToIFTTTMakerChannel(String IFTTTMakerChannelApiKey, String eventName, String details1, String details2) throws IOException { return sendEventToIFTTTMakerChannel(IFTTTMakerChannelApiKey, eventName, details1, details2, ""); } /** * Sends an event to the IFTTT Maker Channel. See * <a href="https://maker.ifttt.com/use/">https://maker.ifttt.com/use/</a> * for more information. * * @param IFTTTMakerChannelApiKey Your Maker API Key. Get your one on * <a href="https://ifttt.com/maker">https://ifttt.com/maker</a> * @param eventName The name of the event to trigger. * @param details1 You can send up to three additional fields to the Maker * channel which you can use then as IFTTT ingredients. See * <a href= * "https://maker.ifttt.com/use/">https://maker.ifttt.com/use/</a> * for more information. * @param details2 The second additional parameter. * @param details3 The third additional parameter. * @return The response text from IFTTT * @throws IOException Should actually never be thrown but occurs if something is * wrong with the connection (e. g. not connected) */ public static String sendEventToIFTTTMakerChannel(String IFTTTMakerChannelApiKey, String eventName, String details1, String details2, String details3) throws IOException { HttpURLConnection connection; StringBuilder response = new StringBuilder(); URL url; try { url = new URL("https://maker.ifttt.com/trigger/" + eventName + "/with/key/" + IFTTTMakerChannelApiKey); String postData = "{ \"value1\" : \"" + details1 + "\", \"value2\" : \"" + details2 + "\", \"value3\" : \"" + details3 + "\" }"; byte[] postData2 = postData.getBytes(StandardCharsets.UTF_8); connection = (HttpURLConnection) url.openConnection(); connection.setDoOutput(true); connection.setInstanceFollowRedirects(false); connection.setRequestMethod("POST"); connection.setRequestProperty("Content-Type", "application/json"); connection.setRequestProperty("charset", "utf-8"); DataOutputStream wr = new DataOutputStream(connection.getOutputStream()); wr.write(postData2); connection.connect(); Reader in; in = new BufferedReader(new InputStreamReader(connection.getInputStream(), "UTF-8")); for (int c; (c = in.read()) >= 0; ) { response.append(Character.toString((char) c)); } return response.toString(); } catch (MalformedURLException e) { FOKLogger.log(Internet.class.getName(), Level.SEVERE, "An error occurred!", e); return ""; } } /** * Sends a GET request to url and retrieves the server response * * @param url The url to call. * @return The server response * @throws IOException No Internet connection */ @SuppressWarnings("UnusedReturnValue") public static String webread(URL url) throws IOException { StringBuilder result = new StringBuilder(); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("GET"); BufferedReader rd = new BufferedReader(new InputStreamReader(conn.getInputStream())); String line; while ((line = rd.readLine()) != null) { result.append(line); } rd.close(); return result.toString(); } /** * Checks if the computer is connected to the common.internet by calling * {@code http://www.google.com} * * @return {@code true} if the computer is connected to the common.internet, * {@code false} otherwise. */ public static boolean isConnected() { try { return isConnected(new URL("https://www.google.com")); } catch (MalformedURLException e) { // Just in case I made a typo in the hardcoded value... e.printStackTrace(); return false; } } /** * Checks if the computer is connected to the common.internet by calling * {@code urlToTest} * * @param urlToTest The url to be used to test the connection. It is recommended * to use a reliable server (like Google) for this to ensure that * the server is always online. * @return {@code true} if the computer is connected to the common.internet, * {@code false} otherwise. */ public static boolean isConnected(URL urlToTest) { try { webread(urlToTest); return true; } catch (IOException e) { return false; } } /** * Sends a error message via gMail. This method requires a gMail account to send emails from. Get a new account <a href="https://accounts.google.com/SignUp?continue=https%3A%2F%2Fwww.google.com%2F%3Fgfe_rd%3Dcr%26ei%3D30aJWLDMDrP08Af3oLrwDg%26gws_rd%3Dssl&hl=en">here</a> * * @param phase The phase in which the error occurred. This can be any string that helps you to identify the part of the code where the exception happened * @param e The exception that occurred * @param gMailUsername The username of the gMail-account to use to send the mail from, including {@code {@literal @}gmail.com} * @param gMailPassword The password of the gMail-account */ public static void sendErrorMail(String phase, Throwable e, String gMailUsername, String gMailPassword) { sendErrorMail(phase, null, e, gMailUsername, gMailPassword); } /** * Sends a error message via gMail. This method requires a gMail account to send emails from. Get a new account <a href="https://accounts.google.com/SignUp?continue=https%3A%2F%2Fwww.google.com%2F%3Fgfe_rd%3Dcr%26ei%3D30aJWLDMDrP08Af3oLrwDg%26gws_rd%3Dssl&hl=en">here</a> * * @param phase The phase in which the error occurred. This can be any string that helps you to identify the part of the code where the exception happened. * @param requestBody The body of the http request that caused the exception * @param e The exception that occurred * @param gMailUsername The username of the gMail-account to use to send the mail from, including {@code {@literal @}gmail.com} * @param gMailPassword The password of the gMail-account */ public static void sendErrorMail(String phase, String requestBody, Throwable e, String gMailUsername, String gMailPassword) { final String toAddress = "vatbub123+automatederrorreports@gmail.com"; Properties props = new Properties(); props.put("mail.smtp.auth", "true"); props.put("mail.smtp.starttls.enable", "true"); props.put("mail.smtp.host", "smtp.gmail.com"); props.put("mail.smtp.port", "587"); Session session = Session.getInstance(props, new javax.mail.Authenticator() { @Override protected javax.mail.PasswordAuthentication getPasswordAuthentication() { return new javax.mail.PasswordAuthentication(gMailUsername, gMailPassword); } }); try { Message message = new MimeMessage(session); message.setFrom(new InternetAddress(gMailUsername)); message.setRecipients(Message.RecipientType.TO, InternetAddress.parse(toAddress)); message.setSubject("[" + Common.getInstance().getAppName() + "] An error occurred in your application"); String messageText = "Exception occurred in phase: " + phase; if (requestBody != null) { messageText = messageText + "\n\nRequest that caused the exception:\n" + requestBody; } messageText = messageText + "\n\nStacktrace of the exception:\n" + ExceptionUtils.getFullStackTrace(e); message.setText(messageText); Transport.send(message); System.out.println("Sent email with error message to " + toAddress); } catch (MessagingException e2) { throw new RuntimeException(e2); } } /** * Returns the reason for the given http code as a human-readable string. Reasons taken from <a href="https://de.wikipedia.org/wiki/HTTP-Statuscode">Wikipedia (German)</a> * * @param httpCode The http code to get the reason for * @return The reason for the given http code as a human-readable string or {@code null}, if the given http code is unknown */ public static String getReasonForHTTPCode(int httpCode) { switch (httpCode) { case 100: return "Continue"; case 101: return "Switching Protocols"; case 102: return "Processing"; case 200: return "OK"; case 201: return "Created"; case 202: return "Accepted"; case 203: return "Non-Authoritative Information"; case 204: return "No Content"; case 205: return "Reset Content"; case 206: return "Partial Content"; case 207: return "Multi-Status"; case 208: return "Already Reported"; case 226: return "IM Used"; case 300: return "Multiple Choices"; case 301: return "Moved Permanently"; case 302: return "Moved Temporarily"; case 303: return "See Other"; case 304: return "Not Modified"; case 305: return "Use Proxy"; case 307: return "Temporary Redirect"; case 308: return "Permanent Redirect"; case 400: return "Bad Request"; case 401: return "Unauthorized"; case 402: return "Payment Required"; case 403: return "Forbidden"; case 404: return "Not Found"; case 405: return "Method Not Allowed"; case 406: return "Not Acceptable"; case 407: return "Proxy Authentication Required"; case 408: return "Request Time-out"; case 409: return "Conflict"; case 410: return "Gone"; case 411: return "Length required"; case 412: return "Precondition failed"; case 413: return "Request Entity Too Large"; case 414: return "Request-URL Too Long"; case 415: return "UnsupportedMedia Type"; case 416: return "Requested range not satisfiable"; case 417: return "Expectation failed"; case 418: return "I'm a teapot"; case 420: return "Policy Not Fulfilled"; case 421: return "Misdirected Request"; case 422: return "Unprocessable Entity"; case 423: return "Locked"; case 424: return "Failed Dependency"; case 425: return "Unordered Collection"; case 426: return "Upgrade Required"; case 428: return "Precondition Required"; case 429: return "Too Many Requests"; case 431: return "Request Header Fields Too Large"; case 451: return "Unavailable For Legal Reasons"; case 500: return "Internal Server Error"; case 501: return "Not Implemented"; case 502: return "Bad Gateway"; case 503: return "Service Unavailable"; case 504: return "Gateway Time-out"; case 505: return "HTTP Version not supported"; case 506: return "Variant Also Negotiates"; case 507: return "Insufficient Storage"; case 508: return "Loop Detected"; case 509: return "Bandwidth Limit Exceeded"; case 510: return "Not Extended"; case 511: return "Network Authentication Required"; default: return null; } } /** * OPens the specified url in the default browser. * * @param url The url to open * @throws IOException If the URL cannot be opened */ public static void openInDefaultBrowser(URL url) throws IOException { Runtime rt = Runtime.getRuntime(); if (SystemUtils.IS_OS_WINDOWS) { rt.exec("rundll32 url.dll,FileProtocolHandler " + url); } else if (SystemUtils.IS_OS_MAC) { rt.exec("open" + url); } else { String[] browsers = {"epiphany", "firefox", "mozilla", "konqueror", "netscape", "opera", "links", "lynx"}; StringBuilder cmd = new StringBuilder(); for (int i = 0; i < browsers.length; i++) cmd.append(i == 0 ? "" : " || ").append(browsers[i]).append(" \"").append(url).append("\" "); rt.exec(new String[]{"sh", "-c", cmd.toString()}); } } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights * Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.codepipeline.model; import java.io.Serializable; /** * <p> * The Amazon S3 location where artifacts are stored for the pipeline. If this * Amazon S3 bucket is created manually, it must meet the requirements for AWS * CodePipeline. For more information, see the <ulink url= * "http://docs.aws.amazon.com/codepipeline/latest/UserGuide/concepts.html" * >Concepts</ulink>. * </p> */ public class ArtifactStore implements Serializable, Cloneable { /** * <p> * The type of the artifact store, such as S3. * </p> */ private String type; /** * <p> * The location for storing the artifacts for a pipeline, such as an S3 * bucket or folder. * </p> */ private String location; /** * <p> * The AWS Key Management Service (AWS KMS) key used to encrypt the data in * the artifact store. If this is undefined, the default key for Amazon S3 * is used. * </p> */ private EncryptionKey encryptionKey; /** * <p> * The type of the artifact store, such as S3. * </p> * * @param type * The type of the artifact store, such as S3. * @see ArtifactStoreType */ public void setType(String type) { this.type = type; } /** * <p> * The type of the artifact store, such as S3. * </p> * * @return The type of the artifact store, such as S3. * @see ArtifactStoreType */ public String getType() { return this.type; } /** * <p> * The type of the artifact store, such as S3. * </p> * * @param type * The type of the artifact store, such as S3. * @return Returns a reference to this object so that method calls can be * chained together. * @see ArtifactStoreType */ public ArtifactStore withType(String type) { setType(type); return this; } /** * <p> * The type of the artifact store, such as S3. * </p> * * @param type * The type of the artifact store, such as S3. * @see ArtifactStoreType */ public void setType(ArtifactStoreType type) { this.type = type.toString(); } /** * <p> * The type of the artifact store, such as S3. * </p> * * @param type * The type of the artifact store, such as S3. * @return Returns a reference to this object so that method calls can be * chained together. * @see ArtifactStoreType */ public ArtifactStore withType(ArtifactStoreType type) { setType(type); return this; } /** * <p> * The location for storing the artifacts for a pipeline, such as an S3 * bucket or folder. * </p> * * @param location * The location for storing the artifacts for a pipeline, such as an * S3 bucket or folder. */ public void setLocation(String location) { this.location = location; } /** * <p> * The location for storing the artifacts for a pipeline, such as an S3 * bucket or folder. * </p> * * @return The location for storing the artifacts for a pipeline, such as an * S3 bucket or folder. */ public String getLocation() { return this.location; } /** * <p> * The location for storing the artifacts for a pipeline, such as an S3 * bucket or folder. * </p> * * @param location * The location for storing the artifacts for a pipeline, such as an * S3 bucket or folder. * @return Returns a reference to this object so that method calls can be * chained together. */ public ArtifactStore withLocation(String location) { setLocation(location); return this; } /** * <p> * The AWS Key Management Service (AWS KMS) key used to encrypt the data in * the artifact store. If this is undefined, the default key for Amazon S3 * is used. * </p> * * @param encryptionKey * The AWS Key Management Service (AWS KMS) key used to encrypt the * data in the artifact store. If this is undefined, the default key * for Amazon S3 is used. */ public void setEncryptionKey(EncryptionKey encryptionKey) { this.encryptionKey = encryptionKey; } /** * <p> * The AWS Key Management Service (AWS KMS) key used to encrypt the data in * the artifact store. If this is undefined, the default key for Amazon S3 * is used. * </p> * * @return The AWS Key Management Service (AWS KMS) key used to encrypt the * data in the artifact store. If this is undefined, the default key * for Amazon S3 is used. */ public EncryptionKey getEncryptionKey() { return this.encryptionKey; } /** * <p> * The AWS Key Management Service (AWS KMS) key used to encrypt the data in * the artifact store. If this is undefined, the default key for Amazon S3 * is used. * </p> * * @param encryptionKey * The AWS Key Management Service (AWS KMS) key used to encrypt the * data in the artifact store. If this is undefined, the default key * for Amazon S3 is used. * @return Returns a reference to this object so that method calls can be * chained together. */ public ArtifactStore withEncryptionKey(EncryptionKey encryptionKey) { setEncryptionKey(encryptionKey); return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getType() != null) sb.append("Type: " + getType() + ","); if (getLocation() != null) sb.append("Location: " + getLocation() + ","); if (getEncryptionKey() != null) sb.append("EncryptionKey: " + getEncryptionKey()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ArtifactStore == false) return false; ArtifactStore other = (ArtifactStore) obj; if (other.getType() == null ^ this.getType() == null) return false; if (other.getType() != null && other.getType().equals(this.getType()) == false) return false; if (other.getLocation() == null ^ this.getLocation() == null) return false; if (other.getLocation() != null && other.getLocation().equals(this.getLocation()) == false) return false; if (other.getEncryptionKey() == null ^ this.getEncryptionKey() == null) return false; if (other.getEncryptionKey() != null && other.getEncryptionKey().equals(this.getEncryptionKey()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getType() == null) ? 0 : getType().hashCode()); hashCode = prime * hashCode + ((getLocation() == null) ? 0 : getLocation().hashCode()); hashCode = prime * hashCode + ((getEncryptionKey() == null) ? 0 : getEncryptionKey() .hashCode()); return hashCode; } @Override public ArtifactStore clone() { try { return (ArtifactStore) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package org.jellycastle.maven; import org.w3c.dom.*; /** * @author Christoph Deppisch * @since 1.0 */ public class PropertyElement implements Element { private String name; private String value; public PropertyElement(String name, String value) { this.name = name; this.value = value; } @Override public String getTagName() { return name; } @Override public String getAttribute(String name) { throw new UnsupportedOperationException(); } @Override public void setAttribute(String name, String value) throws DOMException { } @Override public void removeAttribute(String name) throws DOMException { } @Override public Attr getAttributeNode(String name) { throw new UnsupportedOperationException(); } @Override public Attr setAttributeNode(Attr newAttr) throws DOMException { throw new UnsupportedOperationException(); } @Override public Attr removeAttributeNode(Attr oldAttr) throws DOMException { throw new UnsupportedOperationException(); } @Override public NodeList getElementsByTagName(String name) { throw new UnsupportedOperationException(); } @Override public String getAttributeNS(String namespaceURI, String localName) throws DOMException { throw new UnsupportedOperationException(); } @Override public void setAttributeNS(String namespaceURI, String qualifiedName, String value) throws DOMException { } @Override public void removeAttributeNS(String namespaceURI, String localName) throws DOMException { } @Override public Attr getAttributeNodeNS(String namespaceURI, String localName) throws DOMException { throw new UnsupportedOperationException(); } @Override public Attr setAttributeNodeNS(Attr newAttr) throws DOMException { throw new UnsupportedOperationException(); } @Override public NodeList getElementsByTagNameNS(String namespaceURI, String localName) throws DOMException { throw new UnsupportedOperationException(); } @Override public boolean hasAttribute(String name) { return false; } @Override public boolean hasAttributeNS(String namespaceURI, String localName) throws DOMException { return false; } @Override public TypeInfo getSchemaTypeInfo() { throw new UnsupportedOperationException(); } @Override public void setIdAttribute(String name, boolean isId) throws DOMException { } @Override public void setIdAttributeNS(String namespaceURI, String localName, boolean isId) throws DOMException { } @Override public void setIdAttributeNode(Attr idAttr, boolean isId) throws DOMException { } @Override public String getNodeName() { return name; } @Override public String getNodeValue() throws DOMException { return value; } @Override public void setNodeValue(String nodeValue) throws DOMException { } @Override public short getNodeType() { return Node.ELEMENT_NODE; } @Override public Node getParentNode() { throw new UnsupportedOperationException(); } @Override public NodeList getChildNodes() { throw new UnsupportedOperationException(); } @Override public Node getFirstChild() { return new PropertyTextValue(value); } @Override public Node getLastChild() { return null; } @Override public Node getPreviousSibling() { throw new UnsupportedOperationException(); } @Override public Node getNextSibling() { throw new UnsupportedOperationException(); } @Override public NamedNodeMap getAttributes() { return new EmptyAttributes(); } @Override public Document getOwnerDocument() { throw new UnsupportedOperationException(); } @Override public Node insertBefore(Node newChild, Node refChild) throws DOMException { throw new UnsupportedOperationException(); } @Override public Node replaceChild(Node newChild, Node oldChild) throws DOMException { throw new UnsupportedOperationException(); } @Override public Node removeChild(Node oldChild) throws DOMException { throw new UnsupportedOperationException(); } @Override public Node appendChild(Node newChild) throws DOMException { throw new UnsupportedOperationException(); } @Override public boolean hasChildNodes() { return true; } @Override public Node cloneNode(boolean deep) { throw new UnsupportedOperationException(); } @Override public void normalize() { } @Override public boolean isSupported(String feature, String version) { throw new UnsupportedOperationException(); } @Override public String getNamespaceURI() { return "http://maven.apache.org/POM/4.0.0"; } @Override public String getPrefix() { throw new UnsupportedOperationException(); } @Override public void setPrefix(String prefix) throws DOMException { } @Override public String getLocalName() { return name; } @Override public boolean hasAttributes() { return false; } @Override public String getBaseURI() { throw new UnsupportedOperationException(); } @Override public short compareDocumentPosition(Node other) throws DOMException { return 0; } @Override public String getTextContent() throws DOMException { return value; } @Override public void setTextContent(String textContent) throws DOMException { } @Override public boolean isSameNode(Node other) { throw new UnsupportedOperationException(); } @Override public String lookupPrefix(String namespaceURI) { throw new UnsupportedOperationException(); } @Override public boolean isDefaultNamespace(String namespaceURI) { throw new UnsupportedOperationException(); } @Override public String lookupNamespaceURI(String prefix) { throw new UnsupportedOperationException(); } @Override public boolean isEqualNode(Node arg) { throw new UnsupportedOperationException(); } @Override public Object getFeature(String feature, String version) { throw new UnsupportedOperationException(); } @Override public Object setUserData(String key, Object data, UserDataHandler handler) { throw new UnsupportedOperationException(); } @Override public Object getUserData(String key) { throw new UnsupportedOperationException(); } private class EmptyAttributes implements NamedNodeMap { @Override public Node getNamedItem(String name) { throw new UnsupportedOperationException(); } @Override public Node setNamedItem(Node arg) throws DOMException { throw new UnsupportedOperationException(); } @Override public Node removeNamedItem(String name) throws DOMException { throw new UnsupportedOperationException(); } @Override public Node item(int index) { throw new UnsupportedOperationException(); } @Override public int getLength() { return 0; } @Override public Node getNamedItemNS(String namespaceURI, String localName) throws DOMException { throw new UnsupportedOperationException(); } @Override public Node setNamedItemNS(Node arg) throws DOMException { throw new UnsupportedOperationException(); } @Override public Node removeNamedItemNS(String namespaceURI, String localName) throws DOMException { throw new UnsupportedOperationException(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.directory.server.core.partition.tree; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.File; import org.apache.directory.api.ldap.model.exception.LdapException; import org.apache.directory.api.ldap.model.name.Dn; import org.apache.directory.api.ldap.model.name.Rdn; import org.apache.directory.api.ldap.model.schema.SchemaManager; import org.apache.directory.api.ldap.schema.extractor.SchemaLdifExtractor; import org.apache.directory.api.ldap.schema.extractor.impl.DefaultSchemaLdifExtractor; import org.apache.directory.api.ldap.schema.loader.LdifSchemaLoader; import org.apache.directory.api.ldap.schema.manager.impl.DefaultSchemaManager; import org.apache.directory.api.ldap.util.tree.DnNode; import org.apache.directory.api.util.exception.Exceptions; import org.apache.directory.server.core.api.CacheService; import org.apache.directory.server.core.api.DnFactory; import org.apache.directory.server.core.api.partition.Partition; import org.apache.directory.server.core.partition.impl.btree.jdbm.DupsContainerCursorTest; import org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition; import org.apache.directory.server.core.shared.DefaultDnFactory; import org.junit.BeforeClass; import org.junit.Test; /** * Test the partition tree manipulations. * * @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a> */ public class PartitionTreeTest { private static SchemaManager schemaManager; private static DnFactory dnFactory; @BeforeClass public static void init() throws Exception { String workingDirectory = System.getProperty( "workingDirectory" ); if ( workingDirectory == null ) { String path = DupsContainerCursorTest.class.getResource( "" ).getPath(); int targetPos = path.indexOf( "target" ); workingDirectory = path.substring( 0, targetPos + 6 ); } File schemaRepository = new File( workingDirectory, "schema" ); SchemaLdifExtractor extractor = new DefaultSchemaLdifExtractor( new File( workingDirectory ) ); extractor.extractOrCopy( true ); LdifSchemaLoader loader = new LdifSchemaLoader( schemaRepository ); schemaManager = new DefaultSchemaManager( loader ); boolean loaded = schemaManager.loadAllEnabled(); if ( !loaded ) { fail( "Schema load failed : " + Exceptions.printErrors( schemaManager.getErrors() ) ); } CacheService cacheService = new CacheService(); cacheService.initialize( null ); dnFactory = new DefaultDnFactory( schemaManager, cacheService.getCache( "dnCache" ) ); } /** * Test the addition of a single partition */ @Test public void testNewPartitionTree() throws LdapException { /** A structure to hold all the partitions */ DnNode<Partition> partitionLookupTree = new DnNode<Partition>(); Dn suffix = new Dn( schemaManager, "dc=example, dc=com" ); Partition partition = new JdbmPartition( schemaManager, dnFactory ); partition.setSuffixDn( suffix ); partitionLookupTree.add( suffix, partition ); assertNotNull( partitionLookupTree ); assertTrue( partitionLookupTree.hasChildren() ); assertTrue( partitionLookupTree.contains( new Rdn( schemaManager, "dc=com" ) ) ); DnNode<Partition> child = partitionLookupTree.getChild( new Rdn( schemaManager, "dc=com" ) ); assertTrue( child.hasChildren() ); assertTrue( child.contains( new Rdn( schemaManager, "dc=example" ) ) ); child = child.getChild( new Rdn( schemaManager, "dc=example" ) ); assertEquals( "dc=example, dc=com", child.getElement().getSuffixDn().getName() ); } /** * Test the addition of a two disjointed partition */ @Test public void testNewPartitionTree2Nodes() throws LdapException { /** A structure to hold all the partitions */ DnNode<Partition> partitionLookupTree = new DnNode<Partition>(); Dn suffix1 = new Dn( schemaManager, "dc=example, dc=com" ); Partition partition1 = new JdbmPartition( schemaManager, dnFactory ); partition1.setSuffixDn( suffix1 ); partitionLookupTree.add( suffix1, partition1 ); Dn suffix2 = new Dn( schemaManager, "ou=system" ); Partition partition2 = new JdbmPartition( schemaManager, dnFactory ); partition2.setSuffixDn( suffix2 ); partitionLookupTree.add( suffix2, partition2 ); assertNotNull( partitionLookupTree ); assertTrue( partitionLookupTree.hasChildren() ); assertTrue( partitionLookupTree.contains( new Rdn( schemaManager, "ou=system" ) ) ); assertTrue( partitionLookupTree.contains( new Rdn( schemaManager, "dc=com" ) ) ); DnNode<Partition> child = partitionLookupTree.getChild( new Rdn( schemaManager, "ou=system" ) ); assertTrue( child.isLeaf() ); assertEquals( "ou=system", child.getElement().getSuffixDn().getName() ); child = partitionLookupTree.getChild( new Rdn( schemaManager, "dc=com" ) ); assertTrue( child.hasChildren() ); assertTrue( child.contains( new Rdn( schemaManager, "dc=example" ) ) ); child = child.getChild( new Rdn( schemaManager, "dc=example" ) ); assertTrue( child.isLeaf() ); assertEquals( "dc=example, dc=com", child.getElement().getSuffixDn().getName() ); } /** * Test the addition of a two partitions with the same root */ @Test public void testNewPartitionTree2NodesWithSameRoot() throws LdapException { /** A structure to hold all the partitions */ DnNode<Partition> partitionLookupTree = new DnNode<Partition>(); Dn suffix1 = new Dn( schemaManager, "dc=example1, dc=com" ); Partition partition1 = new JdbmPartition( schemaManager, dnFactory ); partition1.setSuffixDn( suffix1 ); partitionLookupTree.add( suffix1, partition1 ); Dn suffix2 = new Dn( schemaManager, "dc=example2, dc=com" ); Partition partition2 = new JdbmPartition( schemaManager, dnFactory ); partition2.setSuffixDn( suffix2 ); partitionLookupTree.add( suffix2, partition2 ); assertNotNull( partitionLookupTree ); assertTrue( partitionLookupTree.hasChildren() ); assertTrue( partitionLookupTree.contains( new Rdn( schemaManager, "dc=com" ) ) ); DnNode<Partition> child = partitionLookupTree.getChild( new Rdn( schemaManager, "dc=com" ) ); assertTrue( child.hasChildren() ); child = partitionLookupTree.getChild( new Rdn( schemaManager, "dc=com" ) ); assertTrue( child.hasChildren() ); assertTrue( child.contains( new Rdn( schemaManager, "dc=example1" ) ) ); assertTrue( child.contains( new Rdn( schemaManager, "dc=example2" ) ) ); DnNode<Partition> child1 = child.getChild( new Rdn( schemaManager, "dc=example1" ) ); assertTrue( child1.isLeaf() ); assertEquals( "dc=example1, dc=com", child1.getElement().getSuffixDn().getName() ); DnNode<Partition> child2 = child.getChild( new Rdn( schemaManager, "dc=example1" ) ); assertTrue( child2.isLeaf() ); assertEquals( "dc=example1, dc=com", child2.getElement().getSuffixDn().getName() ); } }
/* * Copyright 2014 Matti Tahvonen. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.vaadin.viritin; import org.vaadin.viritin.ListContainer; import org.vaadin.viritin.testdomain.Person; import com.vaadin.data.Container; import com.vaadin.data.Item; import com.vaadin.data.util.BeanItemContainer; import com.vaadin.ui.Table; import java.lang.management.ManagementFactory; import java.lang.management.MemoryUsage; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.logging.Logger; import junit.framework.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.vaadin.viritin.testdomain.Service; /** * */ public class DynaBeanBasedContainer { Random r = new Random(0); final static int amount = 1000000; private List<Person> persons = Service.getListOfPersons(amount); @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void testEmptyList() { List<Person> l = new ArrayList<Person>(); // Test with BeanItemContainer System.out.println("BeanItemContainer with empty list"); BeanItemContainer<Person> bc = new BeanItemContainer<Person>( Person.class, l); System.out.println(" container size=" + bc.size()); System.out.print("Properties: "); for (String p : bc.getContainerPropertyIds()) { System.out.print(p + " "); } // Test ListContainer with setCollection call System.out. println("\n\nListContainer with empty list via setCollection"); ListContainer<Person> lc = new ListContainer<Person>(Person.class); lc.setCollection(l); System.out.println(" container size=" + lc.size()); System.out.print("Properties: "); for (String p : lc.getContainerPropertyIds()) { System.out.print(p + " "); } // Test ListContainer with setCollection call System.out.println( "\n\nListContainer with Class<T>, Collection<T> constructor"); lc = new ListContainer<Person>(Person.class, l); System.out.println(" container size=" + lc.size()); System.out.print("Properties: "); for (String p : lc.getContainerPropertyIds()) { System.out.print(p + " "); } Person per = new Person(); per.setFirstName("First"); per.setLastName("Lastname"); per.setAge(r.nextInt(100)); lc.addItem(per); System.out.println("\n container size after addItem = " + lc.size()); Person per2 = new Person(); per2.setFirstName("Firs"); per2.setLastName("Lastnam"); per2.setAge(r.nextInt(100)); l.add(per2); System.out.println(" container size after add = " + lc.size()); // Test ListContainer with constructor that takes the List -- empty List // will cause zarro properties System.out.println( "\n\nListContainer with empty list via Collection<T> constructor"); l = new ArrayList<Person>(); lc = new ListContainer<Person>(l); System.out.println(" container size=" + lc.size()); System.out.println("Properties: none should print due to exception"); Assert.assertEquals(0, lc.getContainerPropertyIds().size()); } @Test public void testMemoryUsage() { System.out.println("\n Testing List container from Maddon"); long initial = reportMemoryUsage(); long ms = System.currentTimeMillis(); ListContainer lc = new ListContainer<Person>(persons); System.out.println( "After creation (took " + (System.currentTimeMillis() - ms) + ")"); long after = reportMemoryUsage(); System.err.println("Delta (bytes)" + (after - initial)); ms = System.currentTimeMillis(); for (int i = 0; i < amount; i++) { Item item = lc.getItem(persons.get(i)); String str; str = item.getItemProperty("firstName").toString(); } System.out.println( "After loop (took " + (System.currentTimeMillis() - ms) + ")"); after = reportMemoryUsage(); System.err.println("Delta (bytes)" + (after - initial)); // call to avoid GC:n the whole container lc.getItemIds(); System.out.println("After GC"); after = reportMemoryUsage(); System.err.println("Delta (bytes)" + (after - initial)); } @Test public void testMemoryUsageStd() { System.out.println("\n Testing BeanItemContainer from core Vaadin"); long initial = reportMemoryUsage(); long ms = System.currentTimeMillis(); BeanItemContainer lc = new BeanItemContainer<Person>(persons); System.out.println( "After creation (took " + (System.currentTimeMillis() - ms) + ")"); long after = reportMemoryUsage(); System.err.println("Delta (bytes)" + (after - initial)); ms = System.currentTimeMillis(); for (int i = 0; i < amount; i++) { Item item = lc.getItem(persons.get(i)); String str; str = item.getItemProperty("firstName").toString(); } System.out.println( "After loop (took " + (System.currentTimeMillis() - ms) + ")"); after = reportMemoryUsage(); System.err.println("Delta (bytes)" + (after - initial)); // call to avoid GC:n the whole container lc.getItemIds(); System.out.println("After GC"); after = reportMemoryUsage(); System.err.println("Delta (bytes)" + (after - initial)); } private long reportMemoryUsage() { try { System.gc(); Thread.sleep(100); System.gc(); Thread.sleep(100); System.gc(); Thread.sleep(100); System.gc(); } catch (InterruptedException ex) { } MemoryUsage mu = ManagementFactory.getMemoryMXBean(). getHeapMemoryUsage(); System.out.println("Memory used (M):" + mu.getUsed() / 1000000); return ManagementFactory.getMemoryMXBean(). getHeapMemoryUsage().getUsed(); } @Test public void ensureNullFromNextAndPrevId() { final List<Person> persons = Service.getListOfPersons(2); ListContainer lc = new ListContainer<Person>(persons); Assert.assertNull(lc.prevItemId(persons.get(0))); Assert.assertEquals(persons.get(0), lc.prevItemId(persons.get(1))); Assert.assertEquals(persons.get(1), lc.nextItemId(persons.get(0))); Assert.assertNull(lc.nextItemId(persons.get(1))); } }
/* * Copyright 2009-2012 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ibatis.jdbc; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.ibatis.io.ResolverUtil; import org.apache.ibatis.io.Resources; import org.apache.ibatis.mapping.BoundSql; import org.apache.ibatis.session.Configuration; import org.apache.ibatis.type.TypeHandler; import org.apache.ibatis.type.TypeHandlerRegistry; /** * @author Clinton Begin */ public class SqlRunner { public static final int NO_GENERATED_KEY = Integer.MIN_VALUE + 1001; private Connection connection; private TypeHandlerRegistry typeHandlerRegistry; private boolean useGeneratedKeySupport; public SqlRunner(Connection connection) { this.connection = connection; this.typeHandlerRegistry = new TypeHandlerRegistry(); } public void setUseGeneratedKeySupport(boolean useGeneratedKeySupport) { this.useGeneratedKeySupport = useGeneratedKeySupport; } /* * Executes a SELECT statement that returns one row. * * @param sql The SQL * * @param args The arguments to be set on the statement. * * @return The number of rows impacted or BATCHED_RESULTS if the statements * are being batched. * * @throws SQLException If more than one row is returned */ public Map<String, Object> selectOne(String sql, Object... args) throws SQLException { List<Map<String, Object>> results = selectAll(sql, args); if (results.size() != 1) { throw new SQLException("Statement returned " + results.size() + " results where exactly one (1) was expected."); } return results.get(0); } /* * Executes a SELECT statement that returns multiple rows. * * @param sql The SQL * * @param args The arguments to be set on the statement. * * @return The number of rows impacted or BATCHED_RESULTS if the statements * are being batched. * * @throws SQLException If statement prepration or execution fails */ public List<Map<String, Object>> selectAll(String sql, Object... args) throws SQLException { PreparedStatement ps = connection.prepareStatement(sql); try { setParameters(ps, args); ResultSet rs = ps.executeQuery(); return getResults(rs); } finally { try { ps.close(); } catch (SQLException e) { // ignore } } } /* * Executes an INSERT statement. * * @param sql The SQL * * @param args The arguments to be set on the statement. * * @return The number of rows impacted or BATCHED_RESULTS if the statements * are being batched. * * @throws SQLException If statement prepration or execution fails */ public int insert(String sql, Object... args) throws SQLException { PreparedStatement ps; if (useGeneratedKeySupport) { ps = connection.prepareStatement(sql, Statement.RETURN_GENERATED_KEYS); } else { ps = connection.prepareStatement(sql); } try { setParameters(ps, args); ps.executeUpdate(); if (useGeneratedKeySupport) { List<Map<String, Object>> keys = getResults(ps.getGeneratedKeys()); if (keys.size() == 1) { Map<String, Object> key = keys.get(0); Iterator<Object> i = key.values().iterator(); if (i.hasNext()) { Object genkey = i.next(); if (genkey != null) { try { return Integer.parseInt(genkey.toString()); } catch (NumberFormatException e) { // ignore, no numeric key suppot } } } } } return NO_GENERATED_KEY; } finally { try { ps.close(); } catch (SQLException e) { // ignore } } } /* * Executes an UPDATE statement. * * @param sql The SQL * * @param args The arguments to be set on the statement. * * @return The number of rows impacted or BATCHED_RESULTS if the statements * are being batched. * * @throws SQLException If statement prepration or execution fails */ public int update(String sql, Object... args) throws SQLException { PreparedStatement ps = connection.prepareStatement(sql); try { setParameters(ps, args); return ps.executeUpdate(); } finally { try { ps.close(); } catch (SQLException e) { // ignore } } } /* * Executes a DELETE statement. * * @param sql The SQL * * @param args The arguments to be set on the statement. * * @return The number of rows impacted or BATCHED_RESULTS if the statements * are being batched. * * @throws SQLException If statement prepration or execution fails */ public int delete(String sql, Object... args) throws SQLException { return update(sql, args); } /* * Executes any string as a JDBC Statement. Good for DDL * * @param sql The SQL * * @throws SQLException If statement prepration or execution fails */ public void run(String sql) throws SQLException { Statement stmt = connection.createStatement(); try { stmt.execute(sql); } finally { try { stmt.close(); } catch (SQLException e) { // ignore } } } public void closeConnection() { try { connection.close(); } catch (SQLException e) { // ignore } } private void setParameters(PreparedStatement ps, Object... args) throws SQLException { for (int i = 0, n = args.length; i < n; i++) { if (args[i] == null) { throw new SQLException("SqlRunner requires an instance of Null to represent typed null values for JDBC compatibility"); } else if (args[i] instanceof Null) { ((Null) args[i]).getTypeHandler().setParameter(ps, i + 1, null, ((Null) args[i]).getJdbcType()); } else { TypeHandler typeHandler = typeHandlerRegistry.getTypeHandler(args[i].getClass()); if (typeHandler == null) { throw new SQLException("SqlRunner could not find a TypeHandler instance for " + args[i].getClass()); } else { typeHandler.setParameter(ps, i + 1, args[i], null); } } } } private List<Map<String, Object>> getResults(ResultSet rs) throws SQLException { try { List<Map<String, Object>> list = new ArrayList<Map<String, Object>>(); List<String> columns = new ArrayList<String>(); List<TypeHandler<?>> typeHandlers = new ArrayList<TypeHandler<?>>(); ResultSetMetaData rsmd = rs.getMetaData(); for (int i = 0, n = rsmd.getColumnCount(); i < n; i++) { columns.add(rsmd.getColumnLabel(i + 1)); try { Class<?> type = Resources.classForName(rsmd.getColumnClassName(i + 1)); TypeHandler<?> typeHandler = typeHandlerRegistry.getTypeHandler(type); if (typeHandler == null) { typeHandler = typeHandlerRegistry.getTypeHandler(Object.class); } typeHandlers.add(typeHandler); } catch (Exception e) { typeHandlers.add(typeHandlerRegistry.getTypeHandler(Object.class)); } } while (rs.next()) { Map<String, Object> row = new HashMap<String, Object>(); for (int i = 0, n = columns.size(); i < n; i++) { String name = columns.get(i); TypeHandler<?> handler = typeHandlers.get(i); row.put(name.toUpperCase(Locale.ENGLISH), handler.getResult(rs, name)); } list.add(row); } return list; } finally { try { if (rs != null) rs.close(); } catch (Exception e) { // ignore } } } public static Map<String, Object> findByTablefield(Connection connection, BoundSql boundSql, String table) throws SQLException { PreparedStatement countStmt = null; ResultSet rs = null; String field = ""; Map<String, Object> map = new HashMap<String, Object>(); try { String sql = Configuration.SHOW_COLUMNS_FROM + table.toUpperCase(); countStmt = connection.prepareStatement(sql); rs = countStmt.executeQuery(); while (rs.next()) { field += rs.getString("Field") + ","; String column_key = rs.getString("Key"); if (StringUtils.isNotBlank(column_key)&&"PRI".equals(column_key)) { map.put(Configuration.COLUMN_KEY, ResolverUtil.trimComma(field)); } ; } map.put(Configuration.FIELD, ResolverUtil.trimComma(field)); field = ResolverUtil.trimComma(field); } finally { try { rs.close(); } catch (Exception e) { } try { countStmt.close(); } catch (Exception e) { } } return map; } }
package android.nized.org.orgnized; import android.animation.Animator; import android.animation.AnimatorListenerAdapter; import android.annotation.TargetApi; import android.app.Activity; import android.app.AlertDialog; import android.app.Dialog; import android.content.DialogInterface; import android.nized.org.api.APIWrapper; import android.nized.org.domain.Person; import android.os.AsyncTask; import android.os.Build; import android.os.Bundle; import android.support.v4.app.DialogFragment; import android.text.Editable; import android.text.TextWatcher; import android.util.Log; import android.view.LayoutInflater; import android.view.View; import android.widget.Button; import android.widget.EditText; import com.loopj.android.http.JsonHttpResponseHandler; import com.loopj.android.http.RequestParams; import org.apache.http.Header; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import java.util.concurrent.CountDownLatch; /** * Created by greg on 4/22/15. */ public class ChangePasswordDialogFragment extends DialogFragment { // Use this instance of the interface to deliver action events NoticeDialogListener mListener; private EditText mConfirmPassword; private EditText mOldPassword; private EditText mNewPassword; private AlertDialog mBuild; private ChangePasswordTask mAuthTask; private View mLoginFormView; private View mProgressView; public interface NoticeDialogListener { public void onDialogPositiveClick(DialogFragment dialog); } // Override the Fragment.onAttach() method to instantiate the NoticeDialogListener @Override public void onAttach(Activity activity) { super.onAttach(activity); // Verify that the host activity implements the callback interface try { // Instantiate the NoticeDialogListener so we can send events to the host mListener = (NoticeDialogListener) activity; } catch (ClassCastException e) { // The activity doesn't implement the interface, throw exception throw new ClassCastException(activity.toString() + " must implement NoticeDialogListener"); } } /** * Shows the progress UI and hides the login form. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB_MR2) public void showProgress(final boolean show) { // On Honeycomb MR2 we have the ViewPropertyAnimator APIs, which allow // for very easy animations. If available, use these APIs to fade-in // the progress spinner. if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB_MR2) { int shortAnimTime = getResources().getInteger(android.R.integer.config_shortAnimTime); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); mLoginFormView.animate().setDuration(shortAnimTime).alpha( show ? 0 : 1).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } }); mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mProgressView.animate().setDuration(shortAnimTime).alpha( show ? 1 : 0).setListener(new AnimatorListenerAdapter() { @Override public void onAnimationEnd(Animator animation) { mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); } }); } else { // The ViewPropertyAnimator APIs are not available, so simply show // and hide the relevant UI components. mProgressView.setVisibility(show ? View.VISIBLE : View.GONE); mLoginFormView.setVisibility(show ? View.GONE : View.VISIBLE); } } @Override public Dialog onCreateDialog(Bundle savedInstanceState) { AlertDialog.Builder builder = new AlertDialog.Builder(getActivity()); // Get the layout inflater LayoutInflater inflater = getActivity().getLayoutInflater(); final View layout = inflater.inflate(R.layout.dialog_changepassword, null); // Inflate and set the layout for the dialog // Pass null as the parent view because its going in the dialog layout builder.setView(layout) // Add action buttons .setPositiveButton(R.string.submit_button, null) .setNegativeButton(R.string.cancel, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { ChangePasswordDialogFragment.this.getDialog().cancel(); } }); mOldPassword = (EditText) layout.findViewById(R.id.prevPasswordET); mNewPassword = (EditText) layout.findViewById(R.id.newPasswordET); mConfirmPassword = (EditText) layout.findViewById(R.id.confirmPasswordET); mLoginFormView = layout.findViewById(R.id.change_password_form); mProgressView = layout.findViewById(R.id.change_password_progress); mConfirmPassword.addTextChangedListener(new TextWatcher() { public void afterTextChanged(Editable s) { mConfirmPassword.setError(null); String strPass1 = mNewPassword.getText().toString(); String strPass2 = mConfirmPassword.getText().toString(); if (!strPass1.equals(strPass2)) { mConfirmPassword.setError(getString(R.string.settings_pwd_not_equal)); } } public void beforeTextChanged(CharSequence s, int start, int count, int after) { } public void onTextChanged(CharSequence s, int start, int before, int count) { } }); mBuild = builder.create(); mBuild.setOnShowListener(new DialogInterface.OnShowListener() { @Override public void onShow(DialogInterface dialog) { Button b = mBuild.getButton(AlertDialog.BUTTON_POSITIVE); b.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { // TODO Do something String oldPassword = mOldPassword.getText().toString(); String newPassword = mNewPassword.getText().toString(); String confirmPassword = mConfirmPassword.getText().toString(); // reset passwords mOldPassword.setError(null); mNewPassword.setError(null); mConfirmPassword.setError(null); if (newPassword.equals(confirmPassword)) { // create request to change password here showProgress(true); mAuthTask = new ChangePasswordTask(ProfileFragment.mPerson.getEmail(), oldPassword, newPassword); mAuthTask.execute((Void) null); } } }); } }); return mBuild; } public class ChangePasswordTask extends AsyncTask<Void, Void, Boolean> { private final String taskEmail; private final String taskOldPassword; private final String taskNewPassword; ChangePasswordTask(String email, String oldPassword, String newPassword) { taskEmail = email; taskOldPassword = oldPassword; taskNewPassword = newPassword; } @Override protected Boolean doInBackground(Void... params) { // TODO: attempt authentication against a network service. RequestParams requestParams = new RequestParams(); requestParams.put("email", taskEmail); requestParams.put("old_password", taskOldPassword); requestParams.put("new_password", taskNewPassword); Log.i("change password", requestParams.toString()); final CountDownLatch latch = new CountDownLatch(1); final boolean[] result = new boolean[1]; APIWrapper.post(APIWrapper.CHANGE_PASSWORD, requestParams, new JsonHttpResponseHandler() { @Override public void onSuccess(int statusCode, Header[] headers, JSONObject person) { // If the response is JSONObject instead of expected JSONArray Log.i("Change person success", ""); result[0] = true; APIWrapper.setLoggedInPerson((Person) APIWrapper.parseJSONOjbect(person, Person.class)); latch.countDown(); } @Override public void onSuccess(int statusCode, Header[] headers, JSONArray people) { // Pull out the first one try { Log.i("Change person success", ""); APIWrapper.setLoggedInPerson((Person) APIWrapper.parseJSONOjbect( people.getJSONObject(0), Person.class)); result[0] = true; latch.countDown(); } catch (JSONException e) { Log.i("Change password failure", "password incorrect"); result[0] = false; latch.countDown(); } } @Override public void onSuccess(int statusCode, org.apache.http.Header[] headers, java.lang.String responseString) { Log.i("Change person success", ""); } @Override public void onFailure(int statusCode, Header[] headers, String responseString, Throwable throwable) { Log.i("Change person failure", responseString); result[0] = false; latch.countDown(); } @Override public void onFailure(int statusCode, org.apache.http.Header[] headers, java.lang.Throwable throwable, org.json.JSONObject errorResponse) { if (errorResponse != null) { Log.i("change person failure", "Response: " + errorResponse.toString()); } result[0] = false; latch.countDown(); } }); try { latch.await(); // Wait for countDown() in the UI thread. } catch (InterruptedException e) { e.printStackTrace(); } // TODO: register the new account here. return result[0]; } @Override protected void onPostExecute(final Boolean success) { mAuthTask = null; showProgress(false); if (success) { //Dismiss once everything is OK. Log.i("dismiss", ""); mBuild.dismiss(); mListener.onDialogPositiveClick(ChangePasswordDialogFragment.this); } else { mOldPassword.setError(getString(R.string.error_incorrect_password)); mOldPassword.requestFocus(); } } @Override protected void onCancelled() { mAuthTask = null; showProgress(false); } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.thrift.server; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import io.prestosql.plugin.thrift.api.PrestoThriftBlock; import io.prestosql.plugin.thrift.api.PrestoThriftId; import io.prestosql.plugin.thrift.api.PrestoThriftNullableToken; import io.prestosql.plugin.thrift.api.PrestoThriftPageResult; import io.prestosql.plugin.thrift.api.PrestoThriftSchemaTableName; import io.prestosql.plugin.thrift.api.PrestoThriftServiceException; import io.prestosql.plugin.thrift.api.PrestoThriftSplit; import io.prestosql.plugin.thrift.api.PrestoThriftSplitBatch; import io.prestosql.spi.connector.ConnectorPageSource; import io.prestosql.spi.connector.RecordPageSource; import io.prestosql.spi.connector.RecordSet; import io.prestosql.spi.type.Type; import io.prestosql.split.MappedRecordSet; import io.prestosql.testing.tpch.TpchIndexedData; import io.prestosql.testing.tpch.TpchIndexedData.IndexedTable; import io.prestosql.testing.tpch.TpchScaledTable; import java.util.ArrayList; import java.util.List; import java.util.Set; import static com.google.common.base.Preconditions.checkArgument; import static io.prestosql.plugin.thrift.server.SplitInfo.indexSplit; import static io.prestosql.testing.AbstractTestIndexedQueries.INDEX_SPEC; import static java.lang.String.format; import static java.nio.charset.StandardCharsets.UTF_8; public class ThriftIndexedTpchService extends ThriftTpchService { private static final int NUMBER_OF_INDEX_SPLITS = 2; private final TpchIndexedData indexedData = new TpchIndexedData(INDEX_SPEC); @Override protected List<Set<String>> getIndexableKeys(String schemaName, String tableName) { TpchScaledTable tpchScaledTable = new TpchScaledTable(tableName, schemaNameToScaleFactor(schemaName)); return ImmutableList.copyOf(INDEX_SPEC.getColumnIndexes(tpchScaledTable)); } @Override protected PrestoThriftSplitBatch getIndexSplitsSync( PrestoThriftSchemaTableName schemaTableName, List<String> indexColumnNames, PrestoThriftPageResult keys, int maxSplitCount, PrestoThriftNullableToken nextToken) throws PrestoThriftServiceException { checkArgument(NUMBER_OF_INDEX_SPLITS <= maxSplitCount, "maxSplitCount for lookup splits is too low"); checkArgument(nextToken.getToken() == null, "no continuation is supported for lookup splits"); int totalKeys = keys.getRowCount(); int partSize = totalKeys / NUMBER_OF_INDEX_SPLITS; List<PrestoThriftSplit> splits = new ArrayList<>(NUMBER_OF_INDEX_SPLITS); for (int splitIndex = 0; splitIndex < NUMBER_OF_INDEX_SPLITS; splitIndex++) { int begin = partSize * splitIndex; int end = partSize * (splitIndex + 1); if (splitIndex + 1 == NUMBER_OF_INDEX_SPLITS) { // add remainder to the last split end = totalKeys; } if (begin == end) { // split is empty, skip it continue; } SplitInfo splitInfo = indexSplit( schemaTableName.getSchemaName(), schemaTableName.getTableName(), indexColumnNames, thriftPageToList(keys, begin, end)); splits.add(new PrestoThriftSplit(new PrestoThriftId(SPLIT_INFO_CODEC.toJsonBytes(splitInfo)), ImmutableList.of())); } return new PrestoThriftSplitBatch(splits, null); } @Override protected ConnectorPageSource createLookupPageSource(SplitInfo splitInfo, List<String> outputColumnNames) { IndexedTable indexedTable = indexedData.getIndexedTable( splitInfo.getTableName(), schemaNameToScaleFactor(splitInfo.getSchemaName()), ImmutableSet.copyOf(splitInfo.getLookupColumnNames())) .orElseThrow(() -> new IllegalArgumentException(format("No such index: %s%s", splitInfo.getTableName(), splitInfo.getLookupColumnNames()))); List<Type> lookupColumnTypes = types(splitInfo.getTableName(), splitInfo.getLookupColumnNames()); RecordSet keyRecordSet = new ListBasedRecordSet(splitInfo.getKeys(), lookupColumnTypes); RecordSet outputRecordSet = lookupIndexKeys(keyRecordSet, indexedTable, outputColumnNames, splitInfo.getLookupColumnNames()); return new RecordPageSource(outputRecordSet); } /** * Get lookup result and re-map output columns based on requested order. */ private static RecordSet lookupIndexKeys(RecordSet keys, IndexedTable table, List<String> outputColumnNames, List<String> lookupColumnNames) { RecordSet allColumnsOutputRecordSet = table.lookupKeys( new MappedRecordSet( keys, computeRemap(lookupColumnNames, table.getKeyColumns()))); List<Integer> outputRemap = computeRemap(table.getOutputColumns(), outputColumnNames); return new MappedRecordSet(allColumnsOutputRecordSet, outputRemap); } private static List<List<String>> thriftPageToList(PrestoThriftPageResult page, int begin, int end) { checkArgument(begin <= end, "invalid interval"); if (begin == end) { // empty interval return ImmutableList.of(); } List<PrestoThriftBlock> blocks = page.getColumnBlocks(); List<List<String>> result = new ArrayList<>(blocks.size()); for (PrestoThriftBlock block : blocks) { result.add(blockAsList(block, begin, end)); } return result; } private static List<String> blockAsList(PrestoThriftBlock block, int begin, int end) { List<String> result = new ArrayList<>(end - begin); if (block.getBigintData() != null) { boolean[] nulls = block.getBigintData().getNulls(); long[] longs = block.getBigintData().getLongs(); for (int index = begin; index < end; index++) { if (nulls != null && nulls[index]) { result.add(null); } else { checkArgument(longs != null, "block structure is incorrect"); result.add(String.valueOf(longs[index])); } } } else if (block.getIntegerData() != null) { boolean[] nulls = block.getIntegerData().getNulls(); int[] ints = block.getIntegerData().getInts(); for (int index = begin; index < end; index++) { if (nulls != null && nulls[index]) { result.add(null); } else { checkArgument(ints != null, "block structure is incorrect"); result.add(String.valueOf(ints[index])); } } } else if (block.getVarcharData() != null) { boolean[] nulls = block.getVarcharData().getNulls(); int[] sizes = block.getVarcharData().getSizes(); byte[] bytes = block.getVarcharData().getBytes(); int startOffset = 0; // calculate cumulative offset before the starting position if (sizes != null) { for (int index = 0; index < begin; index++) { if (nulls == null || !nulls[index]) { startOffset += sizes[index]; } } } for (int index = begin; index < end; index++) { if (nulls != null && nulls[index]) { result.add(null); } else { checkArgument(sizes != null, "block structure is incorrect"); if (sizes[index] == 0) { result.add(""); } else { checkArgument(bytes != null); result.add(new String(bytes, startOffset, sizes[index], UTF_8)); startOffset += sizes[index]; } } } } else { throw new IllegalArgumentException("Only bigint, integer and varchar blocks are supported"); } return result; } private static List<Integer> computeRemap(List<String> startSchema, List<String> endSchema) { ImmutableList.Builder<Integer> builder = ImmutableList.builder(); for (String columnName : endSchema) { int index = startSchema.indexOf(columnName); checkArgument(index != -1, "Column name in end that is not in the start: %s", columnName); builder.add(index); } return builder.build(); } }
/* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.transforms; import static org.apache.kafka.connect.transforms.util.Requirements.requireStruct; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.kafka.common.cache.Cache; import org.apache.kafka.common.cache.LRUCache; import org.apache.kafka.common.cache.SynchronizedCache; import org.apache.kafka.common.config.ConfigDef; import org.apache.kafka.connect.connector.ConnectRecord; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.SchemaBuilder; import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.errors.ConnectException; import org.apache.kafka.connect.transforms.Transformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import io.debezium.config.Configuration; import io.debezium.config.Field; import io.debezium.data.Envelope; import io.debezium.util.AvroValidator; /** * A logical table consists of one or more physical tables with the same schema. A common use case is sharding -- the * two physical tables `db_shard1.my_table` and `db_shard2.my_table` together form one logical table. * * This Transformation allows us to change a record's topic name and send change events from multiple physical tables to * one topic. For instance, we might choose to send the two tables from the above example to the topic * `db_shard.my_table`. The config options {@link #TOPIC_REGEX} and {@link #TOPIC_REPLACEMENT} are used * to change the record's topic. * * Now that multiple physical tables can share a topic, the event's key may need to be augmented to include fields other * than just those for the record's primary/unique key, since these are not guaranteed to be unique across tables. We * need some identifier added to the key that distinguishes the different physical tables. The field name specified by * the config option {@link #KEY_FIELD_NAME} is added to the key schema for this purpose. By default, its value will * be the old topic name, but if a custom value is desired, the config options {@link #KEY_FIELD_REGEX} and * {@link #KEY_FIELD_REPLACEMENT} may be used to change it. For instance, in our above example, we might choose to * make the identifier `db_shard1` and `db_shard2` respectively. * * @author David Leibovic * @param <R> the subtype of {@link ConnectRecord} on which this transformation will operate */ public class ByLogicalTableRouter<R extends ConnectRecord<R>> implements Transformation<R> { private final Logger logger = LoggerFactory.getLogger(getClass()); private Pattern topicRegex; private String topicReplacement; private Pattern keyFieldRegex; private String keyFieldReplacement; private String keyFieldName; private Cache<Schema, Schema> keySchemaUpdateCache; private Cache<Schema, Schema> envelopeSchemaUpdateCache; private final AvroValidator schemaNameValidator = AvroValidator.create(logger); private static final Field TOPIC_REGEX = Field.create("topic.regex") .withDisplayName("Topic regex") .withType(ConfigDef.Type.STRING) .withWidth(ConfigDef.Width.LONG) .withImportance(ConfigDef.Importance.LOW) .withValidation(Field::isRequired, Field::isRegex) .withDescription("The regex used for extracting the name of the logical table from the original topic name."); private static final Field TOPIC_REPLACEMENT = Field.create("topic.replacement") .withDisplayName("Topic replacement") .withType(ConfigDef.Type.STRING) .withWidth(ConfigDef.Width.LONG) .withImportance(ConfigDef.Importance.LOW) .withValidation(Field::isRequired) .withDescription("The replacement string used in conjunction with " + TOPIC_REGEX.name() + ". This will be used to create the new topic name."); private static final Field KEY_FIELD_REGEX = Field.create("key.field.regex") .withDisplayName("Key field regex") .withType(ConfigDef.Type.STRING) .withWidth(ConfigDef.Width.LONG) .withImportance(ConfigDef.Importance.LOW) .withValidation(Field::isRegex) .withDescription("The regex used for extracting the physical table identifier from the original topic " + "name. Now that multiple physical tables can share a topic, the event's key may need to be augmented " + "to include fields other than just those for the record's primary/unique key, since these are not " + "guaranteed to be unique across tables. We need some identifier added to the key that distinguishes " + "the different physical tables."); private static final Field KEY_FIELD_REPLACEMENT = Field.create("key.field.replacement") .withDisplayName("Key field replacement") .withType(ConfigDef.Type.STRING) .withWidth(ConfigDef.Width.LONG) .withImportance(ConfigDef.Importance.LOW) .withValidation(ByLogicalTableRouter::validateKeyFieldReplacement) .withDescription("The replacement string used in conjunction with " + KEY_FIELD_REGEX.name() + ". This will be used to create the physical table identifier in the record's key."); private static final Field KEY_FIELD_NAME = Field.create("key.field.name") .withDisplayName("Key field name") .withType(ConfigDef.Type.STRING) .withWidth(ConfigDef.Width.LONG) .withImportance(ConfigDef.Importance.LOW) // Default is prefixed with "__dbz__" to minimize the likelihood of a conflict with an existing key field name. .withDefault("__dbz__physicalTableIdentifier") .withDescription("Each record's key schema will be augmented with this field name. The purpose of this " + "field is to distinguish the different physical tables that can now share a single topic. Make " + "sure not to configure a field name that is at risk of conflict with existing key schema field " + "names."); @Override public void configure(Map<String, ?> props) { Configuration config = Configuration.from(props); final Field.Set configFields = Field.setOf(TOPIC_REGEX, TOPIC_REPLACEMENT, KEY_FIELD_REGEX, KEY_FIELD_REPLACEMENT); if (!config.validateAndRecord(configFields, logger::error)) { throw new ConnectException("Unable to validate config."); } topicRegex = Pattern.compile(config.getString(TOPIC_REGEX)); topicReplacement = config.getString(TOPIC_REPLACEMENT); String keyFieldRegexString = config.getString(KEY_FIELD_REGEX); if (keyFieldRegexString != null) { keyFieldRegexString = keyFieldRegexString.trim(); } if (keyFieldRegexString != null && keyFieldRegexString != "") { keyFieldRegex = Pattern.compile(config.getString(KEY_FIELD_REGEX)); keyFieldReplacement = config.getString(KEY_FIELD_REPLACEMENT); } keyFieldName = config.getString(KEY_FIELD_NAME); keySchemaUpdateCache = new SynchronizedCache<>(new LRUCache<Schema, Schema>(16)); envelopeSchemaUpdateCache = new SynchronizedCache<>(new LRUCache<Schema, Schema>(16)); } @Override public R apply(R record) { final String oldTopic = record.topic(); final String newTopic = determineNewTopic(oldTopic); if (newTopic == null) { return record; } logger.debug("Applying topic name transformation from " + oldTopic + " to " + newTopic + "."); final Struct oldKey = requireStruct(record.key(), "Updating schema"); final Schema newKeySchema = updateKeySchema(oldKey.schema(), newTopic); final Struct newKey = updateKey(newKeySchema, oldKey, oldTopic); if (record.value() == null) { // Value will be null in the case of a delete event tombstone return record.newRecord(newTopic, record.kafkaPartition(), newKeySchema, newKey, record.valueSchema(), record.value(), record.timestamp()); } final Struct oldEnvelope = requireStruct(record.value(), "Updating schema"); final Schema newEnvelopeSchema = updateEnvelopeSchema(oldEnvelope.schema(), newTopic); final Struct newEnvelope = updateEnvelope(newEnvelopeSchema, oldEnvelope); return record.newRecord(newTopic, record.kafkaPartition(), newKeySchema, newKey, newEnvelopeSchema, newEnvelope, record.timestamp()); } @Override public void close() { keySchemaUpdateCache = null; envelopeSchemaUpdateCache = null; } @Override public ConfigDef config() { ConfigDef config = new ConfigDef(); Field.group(config, null, TOPIC_REGEX, TOPIC_REPLACEMENT, KEY_FIELD_REGEX, KEY_FIELD_REPLACEMENT); return config; } /** * Determine the new topic name. * @param oldTopic the name of the old topic * @return return the new topic name, if the regex applies. Otherwise, return null. */ private String determineNewTopic(String oldTopic) { final Matcher matcher = topicRegex.matcher(oldTopic); if (matcher.matches()) { return matcher.replaceFirst(topicReplacement); } return null; } private Schema updateKeySchema(Schema oldKeySchema, String newTopicName) { Schema newKeySchema = keySchemaUpdateCache.get(oldKeySchema); if (newKeySchema != null) { return newKeySchema; } final SchemaBuilder builder = copySchemaExcludingName(oldKeySchema, SchemaBuilder.struct()); builder.name(schemaNameValidator.validate(newTopicName + ".Key")); // Now that multiple physical tables can share a topic, the event's key may need to be augmented to include // fields other than just those for the record's primary/unique key, since these are not guaranteed to be unique // across tables. We need some identifier added to the key that distinguishes the different physical tables. builder.field(keyFieldName, Schema.STRING_SCHEMA); newKeySchema = builder.build(); keySchemaUpdateCache.put(oldKeySchema, newKeySchema); return newKeySchema; } private Struct updateKey(Schema newKeySchema, Struct oldKey, String oldTopic) { final Struct newKey = new Struct(newKeySchema); for (org.apache.kafka.connect.data.Field field : oldKey.schema().fields()) { newKey.put(field.name(), oldKey.get(field)); } String physicalTableIdentifier = oldTopic; if (keyFieldRegex != null) { final Matcher matcher = keyFieldRegex.matcher(oldTopic); if (matcher.matches()) { physicalTableIdentifier = matcher.replaceFirst(keyFieldReplacement); } } newKey.put(keyFieldName, physicalTableIdentifier); return newKey; } private Schema updateEnvelopeSchema(Schema oldEnvelopeSchema, String newTopicName) { Schema newEnvelopeSchema = envelopeSchemaUpdateCache.get(oldEnvelopeSchema); if (newEnvelopeSchema != null) { return newEnvelopeSchema; } final Schema oldValueSchema = oldEnvelopeSchema.field(Envelope.FieldName.BEFORE).schema(); final SchemaBuilder valueBuilder = copySchemaExcludingName(oldValueSchema, SchemaBuilder.struct()); valueBuilder.name(schemaNameValidator.validate(newTopicName + ".Value")); final Schema newValueSchema = valueBuilder.build(); final SchemaBuilder envelopeBuilder = copySchemaExcludingName(oldEnvelopeSchema, SchemaBuilder.struct(), false); for (org.apache.kafka.connect.data.Field field : oldEnvelopeSchema.fields()) { final String fieldName = field.name(); Schema fieldSchema = field.schema(); if (fieldName == Envelope.FieldName.BEFORE || fieldName == Envelope.FieldName.AFTER) { fieldSchema = newValueSchema; } envelopeBuilder.field(fieldName, fieldSchema); } envelopeBuilder.name(schemaNameValidator.validate(newTopicName + ".Envelope")); newEnvelopeSchema = envelopeBuilder.build(); envelopeSchemaUpdateCache.put(oldEnvelopeSchema, newEnvelopeSchema); return newEnvelopeSchema; } private Struct updateEnvelope(Schema newEnvelopeSchema, Struct oldEnvelope) { final Struct newEnvelope = new Struct(newEnvelopeSchema); final Schema newValueSchema = newEnvelopeSchema.field(Envelope.FieldName.BEFORE).schema(); for (org.apache.kafka.connect.data.Field field : oldEnvelope.schema().fields()) { final String fieldName = field.name(); Object fieldValue = oldEnvelope.get(field); if ((fieldName == Envelope.FieldName.BEFORE || fieldName == Envelope.FieldName.AFTER) && fieldValue != null) { fieldValue = updateValue(newValueSchema, requireStruct(fieldValue, "Updating schema")); } newEnvelope.put(fieldName, fieldValue); } return newEnvelope; } private Struct updateValue(Schema newValueSchema, Struct oldValue) { final Struct newValue = new Struct(newValueSchema); for (org.apache.kafka.connect.data.Field field : oldValue.schema().fields()) { newValue.put(field.name(), oldValue.get(field)); } return newValue; } private SchemaBuilder copySchemaExcludingName(Schema source, SchemaBuilder builder) { return copySchemaExcludingName(source, builder, true); } private SchemaBuilder copySchemaExcludingName(Schema source, SchemaBuilder builder, boolean copyFields) { builder.version(source.version()); builder.doc(source.doc()); Map<String, String> params = source.parameters(); if (params != null) { builder.parameters(params); } if (source.isOptional()) { builder.optional(); } else { builder.required(); } if (copyFields) { for (org.apache.kafka.connect.data.Field field : source.fields()) { builder.field(field.name(), field.schema()); } } return builder; } private static int validateKeyFieldReplacement(Configuration config, Field field, Field.ValidationOutput problems) { String keyFieldRegex = config.getString(KEY_FIELD_REGEX); if (keyFieldRegex != null) { keyFieldRegex = keyFieldRegex.trim(); } String keyFieldReplacement = config.getString(KEY_FIELD_REPLACEMENT); if (keyFieldReplacement != null) { keyFieldReplacement = keyFieldReplacement.trim(); } if (keyFieldRegex != null && keyFieldRegex != "") { if (keyFieldReplacement == null || keyFieldReplacement == "") { problems.accept(KEY_FIELD_REPLACEMENT, keyFieldReplacement, KEY_FIELD_REPLACEMENT.name() + " must be specified if " + KEY_FIELD_REGEX.name() + " is specified"); return 1; } } return 0; } }
package uk.ac.ebi.ddi.annotation.service.synonyms; import com.fasterxml.jackson.databind.JsonNode; import org.apache.http.HttpResponse; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.CloseableHttpClient; import org.apache.http.impl.client.HttpClientBuilder; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.web.client.RestClientException; import uk.ac.ebi.ddi.annotation.model.DatasetTobeEnriched; import uk.ac.ebi.ddi.annotation.model.EnrichedDataset; import uk.ac.ebi.ddi.annotation.utils.Constants; import uk.ac.ebi.ddi.extservices.annotator.client.BioOntologyClient; import uk.ac.ebi.ddi.extservices.annotator.config.BioOntologyWsConfigProd; import uk.ac.ebi.ddi.extservices.annotator.model.AnnotatedOntologyQuery; import uk.ac.ebi.ddi.extservices.annotator.model.Annotation; import uk.ac.ebi.ddi.extservices.annotator.model.SynonymQuery; import uk.ac.ebi.ddi.service.db.model.enrichment.DatasetEnrichmentInfo; import uk.ac.ebi.ddi.service.db.model.enrichment.Synonym; import uk.ac.ebi.ddi.service.db.model.enrichment.WordInField; import uk.ac.ebi.ddi.service.db.service.enrichment.EnrichmentInfoService; import uk.ac.ebi.ddi.service.db.service.enrichment.SynonymsService; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.io.UnsupportedEncodingException; import java.util.*; import java.util.concurrent.ConcurrentHashMap; import java.util.regex.Pattern; /** * Provide service for synonym annotation * * @author Mingze */ @SuppressWarnings("UnusedAssignment") public class DDIAnnotationService { private static final Logger LOGGER = LoggerFactory.getLogger(DDIAnnotationService.class); @Autowired SynonymsService synonymsService; @Autowired EnrichmentInfoService enrichmentInfoService; BioOntologyClient recommenderClient = new BioOntologyClient(new BioOntologyWsConfigProd()); Map<String, String> cachedSynonymUrlForWords = new HashMap<>(); /** * Enrichment on the dataset, includes title, abstraction, sample protocol, data protocol. * * @param datasetTobeEnriched the dataset to be enrich by the service * @return and enriched dataset */ public EnrichedDataset enrichment(DatasetTobeEnriched datasetTobeEnriched, boolean overwrite) throws Exception { String accession = datasetTobeEnriched.getAccession(); String database = datasetTobeEnriched.getDatabase(); EnrichedDataset enrichedDataset = new EnrichedDataset(accession, database); DatasetEnrichmentInfo datasetEnrichmentInfo = new DatasetEnrichmentInfo(accession, database); DatasetEnrichmentInfo prevDs = enrichmentInfoService.getLatest(accession, database); Map<String, List<WordInField>> synonyms = new HashMap<>(); boolean hasChange = false; if (prevDs == null || overwrite) { synonyms = getWordsInFiledFromWS(datasetTobeEnriched.getAttributes()); hasChange = true; } else { for (String key : datasetTobeEnriched.getAttributes().keySet()) { if (prevDs.getSynonyms() != null && prevDs.getSynonyms().containsKey(key) && prevDs.getOriginalAttributes().get(key) .equals(datasetTobeEnriched.getAttributes().get(key))) { synonyms.put(key, prevDs.getSynonyms().get(key)); } else { List<WordInField> words = getWordsInFiledFromWS(datasetTobeEnriched.getAttributes().get(key)); if (words != null && !words.isEmpty()) { synonyms.put(key, words); hasChange = true; } } } } datasetEnrichmentInfo.setSynonyms(synonyms); datasetEnrichmentInfo.setEnrichTime(new Date()); datasetEnrichmentInfo.setOriginalAttributes(datasetTobeEnriched.getAttributes()); if (hasChange) { //Only save into db when there is some changes enrichmentInfoService.insert(datasetEnrichmentInfo); } Map<String, String> fields = new HashMap<>(); for (Map.Entry<String, List<WordInField>> entry : synonyms.entrySet()) { fields.put(entry.getKey(), EnrichField(entry.getValue())); } enrichedDataset.setEnrichedAttributes(fields); return enrichedDataset; } /** * Transfer the words found in field to the synonyms String * * @param wordsInField the words provided by the service * @return the final string of the enrichment */ private String EnrichField(List<WordInField> wordsInField) throws JSONException, UnsupportedEncodingException, RestClientException { if (wordsInField == null || wordsInField.isEmpty()) { return null; } StringBuilder enrichedField = new StringBuilder(); for (WordInField word : wordsInField) { List<String> synonymsForWord = getSynonymsForWord(word.getText()); if (synonymsForWord != null) { for (String synonym : synonymsForWord) { enrichedField.append(synonym).append(", "); } if (enrichedField.length() > 0) { enrichedField = new StringBuilder( enrichedField.substring(0, enrichedField.length() - 2)); //remove the last comma enrichedField.append("; "); } } } if (enrichedField.length() > 0) { enrichedField = new StringBuilder( enrichedField.substring(0, enrichedField.length() - 2)); //remove the last comma enrichedField.append("."); } return enrichedField.toString(); } // private List<WordInField> getWordsInFiledFromWS(String fieldText) throws JSONException, // UnsupportedEncodingException, DDIException { // // if (fieldText == null || fieldText.equals(Constants.NOT_AVAILABLE)) { // return null; // } // // List<WordInField> matchedWords = new ArrayList<>(); // JSONArray annotationResults; // String recommenderPreUrl = Constants.OBO_INPUT_URL; // fieldText = fieldText.replace("%", " ");//to avoid malformed error // String recommenderUrl = recommenderPreUrl + URLEncoder.encode(fieldText, "UTF-8"); // String output = getFromWSAPI(recommenderUrl); // if (output == null) // return null; // System.out.print(output); // annotationResults = new JSONArray(output); // // for (int i = 0; i < annotationResults.length(); i++) { // JSONObject annotationResult = (JSONObject) annotationResults.get(i); // // if (annotationResult.getJSONArray(Constants.ONTOLOGIES).length() > 1) { // LOGGER.debug("There are more than one ontologies here, something must be wrong"); // throw new DDIException("There are more than one ontologies here, something must be wrong"); // } // // JSONObject ontology = annotationResult.getJSONArray(Constants.ONTOLOGIES).getJSONObject(0); // // JSONObject coverageResult = annotationResult.getJSONObject(Constants.COVERAGE_RESULT); // JSONArray matchedTerms = coverageResult.getJSONArray(Constants.ANNOTATIONS); // // matchedWords.addAll(getDistinctWordList(matchedTerms)); // } // // Collections.sort(matchedWords); // return matchedWords; // } /** * Get the biology related words in one field from WebService at bioontology.org * * @param fieldText a field Text * @return the words which are identified in the fieldText by recommender API from bioontology.org */ private List<WordInField> getWordsInFiledFromWS(String fieldText) throws Exception { List<WordInField> matchedWords = new ArrayList<>(); if (fieldText == null || fieldText.equals(Constants.NOT_AVAILABLE)) { return matchedWords; } fieldText = fieldText.replace("%", " ").trim(); //to avoid malformed error if (fieldText.isEmpty()) { return matchedWords; } JsonNode recommends = recommenderClient.getAnnotatedSynonyms(fieldText); Map<WordInField, Set<String>> synonymsMap = new HashMap<>(); for (JsonNode annotations: recommends) { if (annotations.get("annotatedClass") != null && annotations.get("annotations") != null) { Set<String> synonyms = new HashSet<>(); if (annotations.get("annotatedClass") != null) { if (annotations.get("annotatedClass").get("synonym") != null) { for (JsonNode synonym: annotations.get("annotatedClass").get("synonym")) { synonyms.add(synonym.textValue()); } } } for (JsonNode annotationValue: annotations.get("annotations")) { String actualWord = annotationValue.get("text").textValue(); int from = annotationValue.get("from").intValue(); int to = annotationValue.get("to").intValue(); WordInField wordInField = new WordInField(actualWord, from, to); if (synonymsMap.containsKey(wordInField)) { synonyms.addAll(synonymsMap.get(wordInField)); } synonymsMap.put(wordInField, synonyms); } } } matchedWords.addAll(getDistinctWordList(synonymsMap)); Collections.sort(matchedWords); return matchedWords; } private Map<String, List<WordInField>> getWordsInFiledFromWS(Map<String, String> fields) throws Exception { ConcurrentHashMap<String, List<WordInField>> results = new ConcurrentHashMap<>(); if (fields == null || fields.isEmpty()) { return results; } for (Map.Entry<String, String> entry : fields.entrySet()) { List<WordInField> matchedWords = getWordsInFiledFromWS(entry.getValue()); if (!matchedWords.isEmpty()) { results.put(entry.getKey(), matchedWords); } } return results; } /** * Get all synonyms for a word from mongoDB. If this word is not in the DB, then get it's synonyms from Web Service, * and insert them into the mongoDB. One assumption: if word1 == word2, word2 == word3, * then word1 == word3, == means * synonym. * * @param word to retrieve the given synonyms * @return the list of synonyms */ public List<String> getSynonymsForWord(String word) throws JSONException, RestClientException { List<String> synonyms; if (synonymsService.isWordExist(word)) { synonyms = synonymsService.getAllSynonyms(word); } else { synonyms = getSynonymsForWordFromWS(word); Synonym synonym = synonymsService.insert(word, synonyms); if (synonym != null && synonym.getSynonyms() != null) { synonyms = synonym.getSynonyms(); } } return synonyms; } /** * get synonyms for a word, from the BioPortal web service API * the cachedSynonymRul come from the annotation process * * @param word the word to look for synonyms * @return get synonyms of the word, by annotator API from bioontology.org */ protected ArrayList<String> getSynonymsForWordFromWS(String word) throws JSONException, RestClientException { String lowerWord = word.toLowerCase(); ArrayList<String> synonyms = new ArrayList<>(); //Todo: Mingze the cache system is not working // String wordDetailUrl = cachedSynonymUrlForWords.get(lowerWord); // wordDetailUrl = null; // if (wordDetailUrl != null) { // try { // SynonymQuery output = recommenderClient.getAllSynonymByURL(wordDetailUrl); // if (output == null) // return null; // // String[] synonymsInCls = output.getSynonyms(); // Collections.addAll(synonyms, synonymsInCls); // // }catch (RestClientException ex){ // LOGGER.debug(ex.getMessage()); // ex.printStackTrace(); // return null; // } // } else { AnnotatedOntologyQuery[] annotatedTerms = recommenderClient.getAnnotatedTerms(lowerWord, Constants.OBO_ONTOLOGIES); if (annotatedTerms == null) { return null; } if (annotatedTerms.length == 0) { synonyms.add(Constants.NOT_ANNOTATION_FOUND); return synonyms; } Annotation[] annotations = annotatedTerms[0].getAnnotations(); Annotation annotation = annotations[0]; int startPos = annotation.getFromPosition(); if (startPos > 1) { synonyms.add(Constants.NOT_ANNOTATION_FOUND); return synonyms; } String matchedWord = annotation.getText().toLowerCase(); JSONArray matchedClasses = findBioOntologyMatchclasses(matchedWord, annotatedTerms); for (int i = 0; i < matchedClasses.length(); i++) { JSONObject matchedClass = (JSONObject) matchedClasses.get(i); String wordId = matchedClass.getString(Constants.WORD_ID); String ontologyName = matchedClass.getString(Constants.ONTOLOGY_NAME); SynonymQuery output = recommenderClient.getAllSynonyms(ontologyName, wordId); if (output == null) { return null; } String[] synonymsInCls = output.getSynonyms(); Collections.addAll(synonyms, synonymsInCls); } // } return synonyms; } // @Deprecated // protected ArrayList<String> getSynonymsForWordFromWS(String word) throws JSONException, // UnsupportedEncodingException { // String lowerWord = word.toLowerCase(); // ArrayList<String> synonyms = new ArrayList<>(); // // String wordDetailUrl = cachedSynonymUrlForWords.get(lowerWord); // if (wordDetailUrl != null) { // String output = getFromWSAPI(wordDetailUrl + Constants.OBO_API_KEY); // if (output == null) // return null; // // JSONObject wordDetailsInCls = new JSONObject(output); // JSONArray synonymsInCls = wordDetailsInCls.getJSONArray(Constants.SYNONYM); // // for (int i = 0; i < synonymsInCls.length(); i++) { // String synonymInCls = synonymsInCls.getString(i); // synonyms.add(synonymInCls); // } // } else { // String annotationPreUrl = Constants.OBO_LONG_URL; // String annotatorUrl = annotationPreUrl + URLEncoder.encode(lowerWord, "UTF-8"); // String output = ""; // output = getFromWSAPI(annotatorUrl); // if (output == null) // return null; // // // JSONArray annotationResults = new JSONArray(output); // // if (annotationResults.length() == 0) { // synonyms.add(Constants.NOT_ANNOTATION_FOUND); // return synonyms; // } // // JSONArray annotations = annotationResults.getJSONObject(0).getJSONArray(Constants.ANNOTATIONS); // JSONObject annotation = annotations.getJSONObject(0); // // String matchType = annotation.getString(Constants.MATCH_TYPE); // int startPos = annotation.getInt(Constants.FROM); // // if (startPos > 1) { // synonyms.add(Constants.NOT_ANNOTATION_FOUND); // return synonyms; // } // // String matchedWord = annotation.getString(Constants.TEXT).toLowerCase(); // // JSONArray matchedClasses = findBioOntologyMatchclasses(matchedWord, annotationResults); // //// synonyms.add(lowerWord); // for (int i = 0; i < matchedClasses.length(); i++) { // JSONObject matchedClass = (JSONObject) matchedClasses.get(i); // String wordId = matchedClass.getString(Constants.WORD_ID); // String ontologyName = matchedClass.getString(Constants.ONTOLOGY_NAME); // // wordDetailUrl = Constants.OBO_URL + ontologyName + Constants.CLASSES + wordId + Constants.OBO_API_KEY; // output = getFromWSAPI(wordDetailUrl); // if (output == null) // return null; // // JSONObject wordDetailsInCls = new JSONObject(output); // JSONArray synonymsInCls = wordDetailsInCls.getJSONArray(Constants.SYNONYM); // // for (i = 0; i < synonymsInCls.length(); i++) { // String synonymInCls = synonymsInCls.getString(i); // synonyms.add(synonymInCls); // } // } // } // // return synonyms; // // // } /** * get WebService output from bioportal * * @param url the url to retrieve the information form the web service * @return access url by HTTP client */ private String getFromWSAPI(String url) { String output = null; try { //Todo: This function is not working properly final RequestConfig params = RequestConfig.custom() .setConnectTimeout(60 * 1000).setSocketTimeout(60 * 1000).build(); CloseableHttpClient httpClient = HttpClientBuilder.create().build(); LOGGER.debug("Getting from: " + url); HttpGet getRequest = new HttpGet(url); getRequest.setConfig(params); getRequest.addHeader("accept", "text/html, application/json;"); HttpResponse response; response = httpClient.execute(getRequest); BufferedReader br = new BufferedReader( new InputStreamReader((response.getEntity().getContent()))); if (response.getStatusLine().getStatusCode() != 200) { LOGGER.error("Failed: HTTP error code:" + response.getStatusLine().toString() + " at " + url); } else { output = br.readLine(); } } catch (IOException e) { LOGGER.error("Failed: HTTP error code:" + e.getMessage() + " at " + url); } return output; } @Deprecated private JSONArray findBioOntologyMatchclasses(String matchedWord, JSONArray annotationResults) throws JSONException { JSONArray matchedClasses = new JSONArray(); for (int i = 0; i < annotationResults.length(); i++) { JSONObject annotationResult = annotationResults.getJSONObject(i); JSONArray annotations = annotationResult.getJSONArray(Constants.ANNOTATIONS); JSONObject annotation = annotations.getJSONObject(0); String matchedWordHere = annotation.getString(Constants.TEXT).toLowerCase(); if (!matchedWordHere.equals(matchedWord)) { continue; } String wordIdString = annotationResult.getJSONObject(Constants.ANNOTATION_CLASS).getString(Constants.ANNOTATION_ID); if (Pattern.matches("http:\\/\\/purl\\.bioontology\\.org\\/ontology\\/(.*?)\\/(.*?)", wordIdString)) { String ontologyName = wordIdString.replaceAll("http:\\/\\/purl\\.bioontology\\.org\\/ontology\\/(.*)\\/(.*)", "$1"); String wordId = wordIdString.replaceAll("http:\\/\\/purl\\.bioontology\\.org\\/ontology\\/(.*)\\/(.*)", "$2"); JSONObject matchedClass = new JSONObject(); matchedClass.put(Constants.WORD_ID, wordId); matchedClass.put(Constants.ONTOLOGY_NAME, ontologyName); matchedClasses.put(matchedClass); LOGGER.debug(Constants.WORD_ID + " " + matchedClass.get(Constants.WORD_ID)); } } return matchedClasses; } /** * get the clasess which has the same matched word as matchedWord * * @param matchedWord chosen from the first annotation result from annotator API as the matched ontology word * @param annotationResults annotation results from annotator API, may contain multiple matched classes * @return a JSONArray with all the terms and annotations */ private JSONArray findBioOntologyMatchclasses(String matchedWord, AnnotatedOntologyQuery[] annotationResults) throws JSONException { JSONArray matchedClasses = new JSONArray(); for (AnnotatedOntologyQuery annotationResult : annotationResults) { Annotation[] annotations = annotationResult.getAnnotations(); Annotation annotation = annotations[0]; String matchedWordHere = annotation.getText().toLowerCase(); if (!matchedWordHere.equals(matchedWord)) { continue; } String wordIdString = annotationResult.getAnnotatedClass().getId(); if (Pattern.matches("http:\\/\\/purl\\.bioontology\\.org\\/ontology\\/(.*?)\\/(.*?)", wordIdString)) { String ontologyName = wordIdString.replaceAll("http:\\/\\/purl\\.bioontology\\.org\\/ontology\\/(.*)\\/(.*)", "$1"); String wordId = wordIdString.replaceAll("http:\\/\\/purl\\.bioontology\\.org\\/ontology\\/(.*)\\/(.*)", "$2"); JSONObject matchedClass = new JSONObject(); matchedClass.put(Constants.WORD_ID, wordId); matchedClass.put(Constants.ONTOLOGY_NAME, ontologyName); matchedClasses.put(matchedClass); LOGGER.debug(Constants.WORD_ID + " " + matchedClass.get(Constants.WORD_ID)); } } return matchedClasses; } /** * @param matchedTerms got from annotation results, which may overlap with other terms * @return matchedWords chosen word, which is the longest term in the overlapped terms */ @Deprecated private List<WordInField> getDistinctWordList(JSONArray matchedTerms) throws JSONException { List<WordInField> matchedWords = new ArrayList<>(); for (int i = 0; i < matchedTerms.length(); i++) { JSONObject matchedTerm = (JSONObject) matchedTerms.get(i); String text = (String) matchedTerm.get(Constants.TEXT); int from = (int) matchedTerm.get(Constants.FROM); int to = (int) matchedTerm.get(Constants.TO); WordInField word = new WordInField(text.toLowerCase(), from, to); WordInField overlappedWordInList = findOverlappedWordInList(word, matchedWords); if (null == overlappedWordInList) { matchedWords.add(word); if (!synonymsService.isWordExist(word.getText())) { JSONObject annotatedClass = (JSONObject) matchedTerm.get(Constants.ANNOTATEDCLASS); JSONObject links = (JSONObject) annotatedClass.get(Constants.LINKS); String wordUrl = (String) links.get(Constants.SELF); cachedSynonymUrlForWords.put(word.getText().toLowerCase(), wordUrl); } } else { modifyWordList(word, overlappedWordInList, matchedWords); } } return matchedWords; } private List<WordInField> getDistinctWordList(Annotation[] matchedTerms) { List<WordInField> matchedWords = new ArrayList<>(); if (matchedTerms != null && matchedTerms.length > 0) { for (Annotation matchedTerm : matchedTerms) { String text = matchedTerm.getText(); int from = matchedTerm.getFromPosition(); int to = matchedTerm.getToPosition(); WordInField word = new WordInField(text.toLowerCase(), from, to); WordInField overlappedWordInList = findOverlappedWordInList(word, matchedWords); if (null == overlappedWordInList) { matchedWords.add(word); if (!synonymsService.isWordExist(word.getText())) { if (matchedTerm.getAnnotatedClass() != null && matchedTerm.getAnnotatedClass().getLinks() != null && matchedTerm.getAnnotatedClass().getLinks().getSelf() != null) { String wordUrl = matchedTerm.getAnnotatedClass().getLinks().getSelf(); cachedSynonymUrlForWords.put(word.getText().toLowerCase(), wordUrl); } } } else { modifyWordList(word, overlappedWordInList, matchedWords); } } } return matchedWords; } private List<WordInField> getDistinctWordList(Map<WordInField, Set<String>> synonyms) { List<WordInField> matchedWords = new ArrayList<>(); if (synonyms != null && synonyms.size() > 0) { for (Map.Entry matchedTerm : synonyms.entrySet()) { WordInField key = (WordInField) matchedTerm.getKey(); WordInField word = new WordInField(key.getText().toLowerCase(), key.getFrom(), key.getTo()); WordInField overlappedWordInList = findOverlappedWordInList(word, matchedWords); if (null == overlappedWordInList) { matchedWords.add(word); } else { modifyWordList(word, overlappedWordInList, matchedWords); } synonymsService.update( new Synonym(word.getText(), (new ArrayList<>((Set<String>) matchedTerm.getValue())))); } } return matchedWords; } /** * Choose the longer one between word and overlapped word, write it in the matchedWords * * @param word the word to be search in the * @param overlappedWordInList * @param matchedWords */ private void modifyWordList(WordInField word, WordInField overlappedWordInList, List<WordInField> matchedWords) { int from = word.getFrom(); int to = word.getTo(); int overlappedFrom = overlappedWordInList.getFrom(); int overlappedTo = overlappedWordInList.getTo(); if (from - overlappedFrom == 0 && to - overlappedTo == 0) { return; } if (from <= overlappedFrom && to >= overlappedTo) { int index = matchedWords.indexOf(overlappedWordInList); matchedWords.set(index, word); } } /** * Find the words in matchedWords which is overlapped with "word" * * @param word * @param matchedWords * @return */ private WordInField findOverlappedWordInList(WordInField word, List<WordInField> matchedWords) { WordInField overlappedWord = null; for (WordInField wordInList : matchedWords) { if (word.getFrom() == wordInList.getFrom() && word.getTo() == wordInList.getTo()) { LOGGER.debug("find same word for '" + word + "':" + wordInList); overlappedWord = wordInList; break; } if (word.getFrom() <= wordInList.getTo() && word.getTo() >= wordInList.getTo()) { LOGGER.debug("find an overlapped word for '" + word + "':" + wordInList); overlappedWord = wordInList; break; } if (word.getTo() >= wordInList.getFrom() && word.getTo() <= wordInList.getTo()) { LOGGER.debug("find an overlapped word for '" + word + "':" + wordInList); overlappedWord = wordInList; break; } } return overlappedWord; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Igor A. Pyankov * @version $Revision: 1.2 $ */ package org.apache.harmony.x.print.ipp; /** * This class represents Job and Printer Set Operations * described in RFC 3380 (http://ietf.org/rfc/rfc3380.txt?number=3380) */ public class IppDefs { public static final Object[] JOBTEMPLATEATTRIBUTES = { "job-priority", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-priority-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-priority-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-hold-until", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "job-hold-until-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "job-hold-until-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "job-sheets", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "job-sheets-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "job-sheets-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "multiple-document-handling", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "multiple-document-handling-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "multiple-document-handling-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "copies", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "copies-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "copies-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_RANGEOFINTEGER }, "finishings", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "finishings-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "finishings-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "page-ranges", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_RANGEOFINTEGER }, "page-ranges-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_BOOLEAN }, "sides", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "sides-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "sides-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "number-up", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "number-up-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "number-up-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "orientation-requested", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "orientation-requested-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "orientation-requested-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "media", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "media-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "media-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "media-ready", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "printer-resolution", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_RESOLUTION }, "printer-resolution-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_RESOLUTION }, "printer-resolution-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_RESOLUTION }, "print-quality", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "print-quality-default", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM }, "print-quality-supported", new byte[] { IppAttributeGroup.TAG_JOB_TEMPLATE_ATTRIBUTES, IppAttribute.TAG_ENUM } }; public static final Object[] JOBDESCRIPTIONATTRIBUTES = { "job-uri", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_URI }, "job-id", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-printer-uri", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_URI }, "job-more-info", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_URI }, // "job-name", // new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, // IppAttribute.TAG_NAMEWITHOUTLANGUAGE }, "job-originating-user-name", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_NAMEWITHOUTLANGUAGE }, "job-state", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_ENUM }, "job-state-reasons", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "job-state-message", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "job-detailed-status-messages", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "job-document-access-errors", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "number-of-documents", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "output-device-assigned", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_NAMEWITHOUTLANGUAGE }, "time-at-creation", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "time-at-processing", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "time-at-completed", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-printer-up-time", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "date-time-at-creation", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_DATETIME }, "date-time-at-processing", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_DATETIME }, "date-time-at-completed", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_DATETIME }, "number-of-intervening-jobs", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-message-from-operator", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "job-k-octets", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-impressions", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-media-sheets", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-k-octets-processed", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-impressions-completed", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "job-media-sheets-completed", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "attributes-charset", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_CHARSET }, "attributes-natural-language", new byte[] { IppAttributeGroup.TAG_JOB_ATTRIBUTES, IppAttribute.TAG_NATURAL_LANGUAGE } }; public static final Object[] PRINTERDESCRIPTIONATTRIBUTES = { "printer-uri-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_URI }, "uri-security-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "uri-authentication-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "printer-name", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_NAMEWITHOUTLANGUAGE }, "printer-location", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "printer-info", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "printer-more-info", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_URI }, "printer-driver-installer", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_URI }, "printer-make-and-model", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "printer-more-info-manufacturer", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_URI }, "printer-state", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_ENUM }, "printer-state-reasons", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "printer-state-message", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "ipp-versions-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "operations-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_ENUM }, "multiple-document-jobs-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_BOOLEAN }, "charset-configured", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_CHARSET }, "charset-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_CHARSET }, "natural-language-configured", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_NATURAL_LANGUAGE }, "generated-natural-language-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_NATURAL_LANGUAGE }, "document-format-default", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_MIMEMEDIATYPE }, "document-format-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_MIMEMEDIATYPE }, "printer-is-accepting-jobs", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_BOOLEAN }, "queued-job-count", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "printer-message-from-operator", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_TEXTWITHOUTLANGUAGE }, "color-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_BOOLEAN }, "reference-uri-schemes-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_URISCHEME }, "pdl-override-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "printer-up-time", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "printer-current-time", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_DATETIME }, "multiple-operation-time-out", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "compression-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_KEYWORD }, "job-k-octets-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_RANGEOFINTEGER }, "job-impressions-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_RANGEOFINTEGER }, "job-media-sheets-supported", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_RANGEOFINTEGER }, "pages-per-minute", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_INTEGER }, "pages-per-minute-color", new byte[] { IppAttributeGroup.TAG_PRINTER_ATTRIBUTES, IppAttribute.TAG_INTEGER } }; public static final Object[] PRINTEROPERATIONATTRIBUTES = { "requesting-user-name", new byte[] { IppAttributeGroup.TAG_OPERATION_ATTRIBUTES, IppAttribute.TAG_NAMEWITHOUTLANGUAGE }, "job-name", new byte[] { IppAttributeGroup.TAG_OPERATION_ATTRIBUTES, IppAttribute.TAG_NAMEWITHOUTLANGUAGE }, "document-name", new byte[] { IppAttributeGroup.TAG_OPERATION_ATTRIBUTES, IppAttribute.TAG_NAMEWITHOUTLANGUAGE } }; public static final Object[] ATAGS = { JOBTEMPLATEATTRIBUTES, JOBDESCRIPTIONATTRIBUTES, PRINTERDESCRIPTIONATTRIBUTES, PRINTEROPERATIONATTRIBUTES }; // TODO need to tune for perfomance public static byte getAttributeGtag(String aname) { for (int ii = ATAGS.length, i = 0; i < ii; i += 1) { Object[] atags = (Object[]) ATAGS[i]; for (int j = 0, jj = atags.length; j < jj; j+=2) { if (aname.equals(atags[j])) { return ((byte[]) atags[j + 1])[0]; } } } return -1; } // TODO need to tune for perfomance public static byte getAttributeVtag(String aname) { for (int ii = ATAGS.length, i = 0; i < ii; i += 1) { Object[] atags = (Object[]) ATAGS[i]; for (int j = 0, jj = atags.length; j < jj; j+=2) { if (aname.equals(atags[j])) { return ((byte[]) atags[j + 1])[1]; } } } return -1; } }
package com.jme3.scene.plugins.blender.meshes; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.logging.Level; import java.util.logging.Logger; import com.jme3.math.FastMath; import com.jme3.math.Vector2f; import com.jme3.math.Vector3f; import com.jme3.scene.plugins.blender.BlenderContext; import com.jme3.scene.plugins.blender.file.BlenderFileException; import com.jme3.scene.plugins.blender.file.Pointer; import com.jme3.scene.plugins.blender.file.Structure; /** * A class that represents a single face in the mesh. The face is a polygon. Its minimum count of * vertices is = 3. * * @author Marcin Roguski (Kaelthas) */ public class Face implements Comparator<Integer> { private static final Logger LOGGER = Logger.getLogger(Face.class.getName()); /** The indexes loop of the face. */ private IndexesLoop indexes; private List<IndexesLoop> triangulatedFaces; /** Indicates if the face is smooth or solid. */ private boolean smooth; /** The material index of the face. */ private int materialNumber; /** UV coordinate sets attached to the face. The key is the set name and value are the UV coords. */ private Map<String, List<Vector2f>> faceUVCoords; /** The vertex colors of the face. */ private List<byte[]> vertexColors; /** The temporal mesh the face belongs to. */ private TemporalMesh temporalMesh; /** * Creates a complete face with all available data. * @param indexes * the indexes of the face (required) * @param smooth * indicates if the face is smooth or solid * @param materialNumber * the material index of the face * @param faceUVCoords * UV coordinate sets of the face (optional) * @param vertexColors * the vertex colors of the face (optional) * @param temporalMesh * the temporal mesh the face belongs to (required) */ public Face(Integer[] indexes, boolean smooth, int materialNumber, Map<String, List<Vector2f>> faceUVCoords, List<byte[]> vertexColors, TemporalMesh temporalMesh) { this.setTemporalMesh(temporalMesh); this.indexes = new IndexesLoop(indexes); this.smooth = smooth; this.materialNumber = materialNumber; this.faceUVCoords = faceUVCoords; this.temporalMesh = temporalMesh; this.vertexColors = vertexColors; } /** * Default constructor. Used by the clone method. */ private Face() { } @Override public Face clone() { Face result = new Face(); result.indexes = indexes.clone(); result.smooth = smooth; result.materialNumber = materialNumber; if (faceUVCoords != null) { result.faceUVCoords = new HashMap<String, List<Vector2f>>(faceUVCoords.size()); for (Entry<String, List<Vector2f>> entry : faceUVCoords.entrySet()) { List<Vector2f> uvs = new ArrayList<Vector2f>(entry.getValue().size()); for (Vector2f v : entry.getValue()) { uvs.add(v.clone()); } result.faceUVCoords.put(entry.getKey(), uvs); } } if (vertexColors != null) { result.vertexColors = new ArrayList<byte[]>(vertexColors.size()); for (byte[] colors : vertexColors) { result.vertexColors.add(colors.clone()); } } result.temporalMesh = temporalMesh; return result; } /** * Returns the index at the given position in the index loop. If the given position is negative or exceeds * the amount of vertices - it is being looped properly so that it always hits an index. * For example getIndex(-1) will return the index before the 0 - in this case it will be the last one. * @param indexPosition * the index position * @return index value at the given position */ private Integer getIndex(int indexPosition) { if (indexPosition >= indexes.size()) { indexPosition = indexPosition % indexes.size(); } else if (indexPosition < 0) { indexPosition = indexes.size() - -indexPosition % indexes.size(); } return indexes.get(indexPosition); } /** * @return the mesh this face belongs to */ public TemporalMesh getTemporalMesh() { return temporalMesh; } /** * @return the original indexes of the face */ public IndexesLoop getIndexes() { return indexes; } /** * @return the centroid of the face */ public Vector3f computeCentroid() { Vector3f result = new Vector3f(); List<Vector3f> vertices = temporalMesh.getVertices(); for (Integer index : indexes) { result.addLocal(vertices.get(index)); } return result.divideLocal(indexes.size()); } /** * @return current indexes of the face (if it is already triangulated then more than one index group will be in the result list) */ public List<List<Integer>> getCurrentIndexes() { if (triangulatedFaces == null) { return Arrays.asList(indexes.getAll()); } List<List<Integer>> result = new ArrayList<List<Integer>>(triangulatedFaces.size()); for (IndexesLoop loop : triangulatedFaces) { result.add(loop.getAll()); } return result; } /** * The method detaches the triangle from the face. This method keeps the indexes loop normalized - every index * has only two neighbours. So if detaching the triangle causes a vertex to have more than two neighbours - it is * also detached and returned as a result. * The result is an empty list if no such situation happens. * @param triangleIndexes * the indexes of a triangle to be detached * @return a list of faces that need to be detached as well in order to keep them normalized * @throws BlenderFileException * an exception is thrown when vertices of a face create more than one loop; this is found during path finding */ private List<Face> detachTriangle(Integer[] triangleIndexes) throws BlenderFileException { LOGGER.fine("Detaching triangle."); if (triangleIndexes.length != 3) { throw new IllegalArgumentException("Cannot detach triangle with that does not have 3 indexes!"); } MeshHelper meshHelper = temporalMesh.getBlenderContext().getHelper(MeshHelper.class); List<Face> detachedFaces = new ArrayList<Face>(); List<Integer> path = new ArrayList<Integer>(indexes.size()); boolean[] edgeRemoved = new boolean[] { indexes.removeEdge(triangleIndexes[0], triangleIndexes[1]), indexes.removeEdge(triangleIndexes[0], triangleIndexes[2]), indexes.removeEdge(triangleIndexes[1], triangleIndexes[2]) }; Integer[][] indexesPairs = new Integer[][] { new Integer[] { triangleIndexes[0], triangleIndexes[1] }, new Integer[] { triangleIndexes[0], triangleIndexes[2] }, new Integer[] { triangleIndexes[1], triangleIndexes[2] } }; for (int i = 0; i < 3; ++i) { if (!edgeRemoved[i]) { indexes.findPath(indexesPairs[i][0], indexesPairs[i][1], path); if (path.size() == 0) { indexes.findPath(indexesPairs[i][1], indexesPairs[i][0], path); } if (path.size() == 0) { throw new IllegalStateException("Triangulation failed. Cannot find path between two indexes. Please apply triangulation in Blender as a workaround."); } if (detachedFaces.size() == 0 && path.size() < indexes.size()) { Integer[] indexesSublist = path.toArray(new Integer[path.size()]); detachedFaces.add(new Face(indexesSublist, smooth, materialNumber, meshHelper.selectUVSubset(this, indexesSublist), meshHelper.selectVertexColorSubset(this, indexesSublist), temporalMesh)); for (int j = 0; j < path.size() - 1; ++j) { indexes.removeEdge(path.get(j), path.get(j + 1)); } indexes.removeEdge(path.get(path.size() - 1), path.get(0)); } else { indexes.addEdge(path.get(path.size() - 1), path.get(0)); } } } return detachedFaces; } /** * Sets the temporal mesh for the face. The given mesh cannot be null. * @param temporalMesh * the temporal mesh of the face * @throws IllegalArgumentException * thrown if given temporal mesh is null */ public void setTemporalMesh(TemporalMesh temporalMesh) { if (temporalMesh == null) { throw new IllegalArgumentException("No temporal mesh for the face given!"); } this.temporalMesh = temporalMesh; } /** * Flips the order of the indexes. */ public void flipIndexes() { indexes.reverse(); if (faceUVCoords != null) { for (Entry<String, List<Vector2f>> entry : faceUVCoords.entrySet()) { Collections.reverse(entry.getValue()); } } } /** * Flips UV coordinates. * @param u * indicates if U coords should be flipped * @param v * indicates if V coords should be flipped */ public void flipUV(boolean u, boolean v) { if (faceUVCoords != null) { for (Entry<String, List<Vector2f>> entry : faceUVCoords.entrySet()) { for (Vector2f uv : entry.getValue()) { uv.set(u ? 1 - uv.x : uv.x, v ? 1 - uv.y : uv.y); } } } } /** * @return the UV sets of the face */ public Map<String, List<Vector2f>> getUvSets() { return faceUVCoords; } /** * @return current vertex count of the face */ public int vertexCount() { return indexes.size(); } /** * The method triangulates the face. */ public TriangulationWarning triangulate() { LOGGER.fine("Triangulating face."); assert indexes.size() >= 3 : "Invalid indexes amount for face. 3 is the required minimum!"; triangulatedFaces = new ArrayList<IndexesLoop>(indexes.size() - 2); Integer[] indexes = new Integer[3]; TriangulationWarning warning = TriangulationWarning.NONE; try { List<Face> facesToTriangulate = new ArrayList<Face>(Arrays.asList(this.clone())); while (facesToTriangulate.size() > 0 && warning == TriangulationWarning.NONE) { Face face = facesToTriangulate.remove(0); // two special cases will improve the computations speed if(face.getIndexes().size() == 3) { triangulatedFaces.add(face.getIndexes().clone()); } else { int previousIndex1 = -1, previousIndex2 = -1, previousIndex3 = -1; while (face.vertexCount() > 0) { indexes[0] = face.getIndex(0); indexes[1] = face.findClosestVertex(indexes[0], -1); indexes[2] = face.findClosestVertex(indexes[0], indexes[1]); LOGGER.finer("Veryfying improper triangulation of the temporal mesh."); if (indexes[0] < 0 || indexes[1] < 0 || indexes[2] < 0) { warning = TriangulationWarning.CLOSEST_VERTS; break; } if (previousIndex1 == indexes[0] && previousIndex2 == indexes[1] && previousIndex3 == indexes[2]) { warning = TriangulationWarning.INFINITE_LOOP; break; } previousIndex1 = indexes[0]; previousIndex2 = indexes[1]; previousIndex3 = indexes[2]; Arrays.sort(indexes, this); facesToTriangulate.addAll(face.detachTriangle(indexes)); triangulatedFaces.add(new IndexesLoop(indexes)); } } } } catch (BlenderFileException e) { LOGGER.log(Level.WARNING, "Errors occured during face triangulation: {0}. The face will be triangulated with the most direct algorithm, but the results might not be identical to blender.", e.getLocalizedMessage()); warning = TriangulationWarning.UNKNOWN; } if(warning != TriangulationWarning.NONE) { LOGGER.finest("Triangulation the face using the most direct algorithm."); indexes[0] = this.getIndex(0); for (int i = 1; i < this.vertexCount() - 1; ++i) { indexes[1] = this.getIndex(i); indexes[2] = this.getIndex(i + 1); triangulatedFaces.add(new IndexesLoop(indexes)); } } return warning; } /** * A warning that indicates a problem with face triangulation. The warnings are collected and displayed once for each type for a mesh to * avoid multiple warning loggings during triangulation. The amount of iterations can be really huge and logging every single failure would * really slow down the importing process and make logs unreadable. * * @author Marcin Roguski (Kaelthas) */ public static enum TriangulationWarning { NONE(null), CLOSEST_VERTS("Unable to find two closest vertices while triangulating face."), INFINITE_LOOP("Infinite loop detected during triangulation."), UNKNOWN("There was an unknown problem with face triangulation. Please see log for details."); private String description; private TriangulationWarning(String description) { this.description = description; } @Override public String toString() { return description; } } /** * @return <b>true</b> if the face is smooth and <b>false</b> otherwise */ public boolean isSmooth() { return smooth; } /** * @return the material index of the face */ public int getMaterialNumber() { return materialNumber; } /** * @return the vertices colord of the face */ public List<byte[]> getVertexColors() { return vertexColors; } @Override public String toString() { return "Face " + indexes; } /** * The method finds the closest vertex to the one specified by <b>index</b>. * If the vertexToIgnore is positive than it will be ignored in the result. * The closest vertex must be able to create an edge that is fully contained * within the face and does not cross any other edges. Also if the * vertexToIgnore is not negative then the condition that the edge between * the found index and the one to ignore is inside the face must also be * met. * * @param index * the index of the vertex that needs to have found the nearest * neighbour * @param indexToIgnore * the index to ignore in the result (pass -1 if none is to be * ignored) * @return the index of the closest vertex to the given one */ private int findClosestVertex(int index, int indexToIgnore) { int result = -1; List<Vector3f> vertices = temporalMesh.getVertices(); Vector3f v1 = vertices.get(index); float distance = Float.MAX_VALUE; for (int i : indexes) { if (i != index && i != indexToIgnore) { Vector3f v2 = vertices.get(i); float d = v2.distance(v1); if (d < distance && this.contains(new Edge(index, i, 0, true, temporalMesh)) && (indexToIgnore < 0 || this.contains(new Edge(indexToIgnore, i, 0, true, temporalMesh)))) { result = i; distance = d; } } } return result; } /** * The method verifies if the edge is contained within the face. * It means it cannot cross any other edge and it must be inside the face and not outside of it. * @param edge * the edge to be checked * @return <b>true</b> if the given edge is contained within the face and <b>false</b> otherwise */ private boolean contains(Edge edge) { int index1 = edge.getFirstIndex(); int index2 = edge.getSecondIndex(); // check if the line between the vertices is not a border edge of the face if (!indexes.areNeighbours(index1, index2)) { for (int i = 0; i < indexes.size(); ++i) { int i1 = this.getIndex(i - 1); int i2 = this.getIndex(i); // check if the edges have no common verts (because if they do, they cannot cross) if (i1 != index1 && i1 != index2 && i2 != index1 && i2 != index2) { if (edge.cross(new Edge(i1, i2, 0, false, temporalMesh))) { return false; } } } // computing the edge's middle point Vector3f edgeMiddlePoint = edge.computeCentroid(); // computing the edge that is perpendicular to the given edge and has a length of 1 (length actually does not matter) Vector3f edgeVector = edge.getSecondVertex().subtract(edge.getFirstVertex()); Vector3f edgeNormal = temporalMesh.getNormals().get(index1).cross(edgeVector).normalizeLocal(); Edge e = new Edge(edgeMiddlePoint, edgeNormal.add(edgeMiddlePoint)); // compute the vectors from the middle point to the crossing between the extended edge 'e' and other edges of the face List<Vector3f> crossingVectors = new ArrayList<Vector3f>(); for (int i = 0; i < indexes.size(); ++i) { int i1 = this.getIndex(i); int i2 = this.getIndex(i + 1); Vector3f crossPoint = e.getCrossPoint(new Edge(i1, i2, 0, false, temporalMesh), true, false); if(crossPoint != null) { crossingVectors.add(crossPoint.subtractLocal(edgeMiddlePoint)); } } if(crossingVectors.size() == 0) { return false;// edges do not cross } // use only distinct vertices (doubles may appear if the crossing point is a vertex) List<Vector3f> distinctCrossingVectors = new ArrayList<Vector3f>(); for(Vector3f cv : crossingVectors) { double minDistance = Double.MAX_VALUE; for(Vector3f dcv : distinctCrossingVectors) { minDistance = Math.min(minDistance, dcv.distance(cv)); } if(minDistance > FastMath.FLT_EPSILON) { distinctCrossingVectors.add(cv); } } if(distinctCrossingVectors.size() == 0) { throw new IllegalStateException("There MUST be at least 2 crossing vertices!"); } // checking if all crossing vectors point to the same direction (if yes then the edge is outside the face) float direction = Math.signum(distinctCrossingVectors.get(0).dot(edgeNormal));// if at least one vector has different direction that this - it means that the edge is inside the face for(int i=1;i<distinctCrossingVectors.size();++i) { if(direction != Math.signum(distinctCrossingVectors.get(i).dot(edgeNormal))) { return true; } } return false; } return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + indexes.hashCode(); result = prime * result + temporalMesh.hashCode(); return result; } @Override public boolean equals(Object obj) { if (this == obj) { return true; } if (!(obj instanceof Face)) { return false; } Face other = (Face) obj; if (!indexes.equals(other.indexes)) { return false; } return temporalMesh.equals(other.temporalMesh); } /** * Loads all faces of a given mesh. * @param meshStructure * the mesh structure we read the faces from * @param userUVGroups * UV groups defined by the user * @param verticesColors * the vertices colors of the mesh * @param temporalMesh * the temporal mesh the faces will belong to * @param blenderContext * the blender context * @return list of faces read from the given mesh structure * @throws BlenderFileException * an exception is thrown when problems with file reading occur */ public static List<Face> loadAll(Structure meshStructure, Map<String, List<Vector2f>> userUVGroups, List<byte[]> verticesColors, TemporalMesh temporalMesh, BlenderContext blenderContext) throws BlenderFileException { LOGGER.log(Level.FINE, "Loading all faces from mesh: {0}", meshStructure.getName()); List<Face> result = new ArrayList<Face>(); MeshHelper meshHelper = blenderContext.getHelper(MeshHelper.class); if (meshHelper.isBMeshCompatible(meshStructure)) { LOGGER.fine("Reading BMesh."); Pointer pMLoop = (Pointer) meshStructure.getFieldValue("mloop"); Pointer pMPoly = (Pointer) meshStructure.getFieldValue("mpoly"); if (pMPoly.isNotNull() && pMLoop.isNotNull()) { List<Structure> polys = pMPoly.fetchData(); List<Structure> loops = pMLoop.fetchData(); for (Structure poly : polys) { int materialNumber = ((Number) poly.getFieldValue("mat_nr")).intValue(); int loopStart = ((Number) poly.getFieldValue("loopstart")).intValue(); int totLoop = ((Number) poly.getFieldValue("totloop")).intValue(); boolean smooth = (((Number) poly.getFieldValue("flag")).byteValue() & 0x01) != 0x00; Integer[] vertexIndexes = new Integer[totLoop]; for (int i = loopStart; i < loopStart + totLoop; ++i) { vertexIndexes[i - loopStart] = ((Number) loops.get(i).getFieldValue("v")).intValue(); } // uvs always must be added wheater we have texture or not Map<String, List<Vector2f>> uvCoords = new HashMap<String, List<Vector2f>>(); for (Entry<String, List<Vector2f>> entry : userUVGroups.entrySet()) { List<Vector2f> uvs = entry.getValue().subList(loopStart, loopStart + totLoop); uvCoords.put(entry.getKey(), new ArrayList<Vector2f>(uvs)); } List<byte[]> vertexColors = null; if (verticesColors != null && verticesColors.size() > 0) { vertexColors = new ArrayList<byte[]>(totLoop); for (int i = loopStart; i < loopStart + totLoop; ++i) { vertexColors.add(verticesColors.get(i)); } } result.add(new Face(vertexIndexes, smooth, materialNumber, uvCoords, vertexColors, temporalMesh)); } } } else { LOGGER.fine("Reading traditional faces."); Pointer pMFace = (Pointer) meshStructure.getFieldValue("mface"); List<Structure> mFaces = pMFace.isNotNull() ? pMFace.fetchData() : null; if (mFaces != null && mFaces.size() > 0) { // indicates if the material with the specified number should have a texture attached for (int i = 0; i < mFaces.size(); ++i) { Structure mFace = mFaces.get(i); int materialNumber = ((Number) mFace.getFieldValue("mat_nr")).intValue(); boolean smooth = (((Number) mFace.getFieldValue("flag")).byteValue() & 0x01) != 0x00; int v1 = ((Number) mFace.getFieldValue("v1")).intValue(); int v2 = ((Number) mFace.getFieldValue("v2")).intValue(); int v3 = ((Number) mFace.getFieldValue("v3")).intValue(); int v4 = ((Number) mFace.getFieldValue("v4")).intValue(); int vertCount = v4 == 0 ? 3 : 4; // uvs always must be added wheater we have texture or not Map<String, List<Vector2f>> faceUVCoords = new HashMap<String, List<Vector2f>>(); for (Entry<String, List<Vector2f>> entry : userUVGroups.entrySet()) { List<Vector2f> uvCoordsForASingleFace = new ArrayList<Vector2f>(vertCount); for (int j = 0; j < vertCount; ++j) { uvCoordsForASingleFace.add(entry.getValue().get(i * 4 + j)); } faceUVCoords.put(entry.getKey(), uvCoordsForASingleFace); } List<byte[]> vertexColors = null; if (verticesColors != null && verticesColors.size() > 0) { vertexColors = new ArrayList<byte[]>(vertCount); vertexColors.add(verticesColors.get(v1)); vertexColors.add(verticesColors.get(v2)); vertexColors.add(verticesColors.get(v3)); if (vertCount == 4) { vertexColors.add(verticesColors.get(v4)); } } result.add(new Face(vertCount == 4 ? new Integer[] { v1, v2, v3, v4 } : new Integer[] { v1, v2, v3 }, smooth, materialNumber, faceUVCoords, vertexColors, temporalMesh)); } } } LOGGER.log(Level.FINE, "Loaded {0} faces.", result.size()); return result; } @Override public int compare(Integer index1, Integer index2) { return indexes.indexOf(index1) - indexes.indexOf(index2); } }
package com.platypii.baseline.bluetooth; import com.platypii.baseline.BaseService; import com.platypii.baseline.common.R; import com.platypii.baseline.events.BluetoothEvent; import com.platypii.baseline.util.Exceptions; import android.app.Activity; import android.bluetooth.BluetoothAdapter; import android.bluetooth.BluetoothDevice; import android.content.Context; import android.content.Intent; import android.location.GpsStatus; import android.os.AsyncTask; import android.util.Log; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Set; import org.greenrobot.eventbus.EventBus; import static com.platypii.baseline.bluetooth.BluetoothState.BT_CONNECTED; import static com.platypii.baseline.bluetooth.BluetoothState.BT_CONNECTING; import static com.platypii.baseline.bluetooth.BluetoothState.BT_STARTING; import static com.platypii.baseline.bluetooth.BluetoothState.BT_STATES; import static com.platypii.baseline.bluetooth.BluetoothState.BT_STOPPED; import static com.platypii.baseline.bluetooth.BluetoothState.BT_STOPPING; /** * Class to manage a bluetooth GPS receiver. * Note: instantiating this class will not automatically start bluetooth. Call startAsync to connect. */ public class BluetoothService implements BaseService { private static final String TAG = "Bluetooth"; private static final int ENABLE_BLUETOOTH_CODE = 13; // Android shared preferences for bluetooth public final BluetoothPreferences preferences = new BluetoothPreferences(); // Bluetooth state private int bluetoothState = BT_STOPPED; @Nullable private BluetoothAdapter bluetoothAdapter; @Nullable private Stoppable bluetoothRunnable; @Nullable private Thread bluetoothThread; // Bluetooth device battery level public float powerLevel = Float.NaN; public boolean charging = false; final List<GpsStatus.NmeaListener> listeners = new ArrayList<>(); @Override public void start(@NonNull Context context) { if (BluetoothState.started(bluetoothState)) { Exceptions.report(new IllegalStateException("Bluetooth started twice " + BT_STATES[bluetoothState])); return; } if (!(context instanceof Activity)) { Exceptions.report(new ClassCastException("Bluetooth context must be an activity")); return; } final Activity activity = (Activity) context; if (bluetoothState == BT_STOPPED) { setState(BT_STARTING); // Start bluetooth thread if (bluetoothRunnable != null) { Log.e(TAG, "Bluetooth thread already started"); } startAsync(activity); } else { Exceptions.report(new IllegalStateException("Bluetooth already started: " + BT_STATES[bluetoothState])); } } /** * Starts bluetooth in an asynctask. * Even though we're mostly just starting the bluetooth thread, calling getAdapter can be slow. */ private void startAsync(@NonNull final Activity activity) { AsyncTask.execute(() -> { bluetoothAdapter = getAdapter(activity); if (bluetoothAdapter != null) { bluetoothRunnable = new BluetoothRunnable(BluetoothService.this, bluetoothAdapter); bluetoothThread = new Thread(bluetoothRunnable); bluetoothThread.start(); } }); } /** * Get bluetooth adapter, request bluetooth if needed */ @Nullable private BluetoothAdapter getAdapter(@NonNull Activity activity) { // TODO: Make sure this doesn't take too long final BluetoothAdapter bluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); if (bluetoothAdapter == null) { // Device not supported Log.e(TAG, "Bluetooth not supported"); } else if (!bluetoothAdapter.isEnabled()) { // Turn on bluetooth // TODO: Handle result? final Intent enableBluetoothIntent = new Intent(BluetoothAdapter.ACTION_REQUEST_ENABLE); activity.startActivityForResult(enableBluetoothIntent, ENABLE_BLUETOOTH_CODE); } return bluetoothAdapter; } /** * Return list of bonded devices, with GPS devices first */ @NonNull public List<BluetoothDevice> getDevices() { final BluetoothAdapter bluetoothAdapter = BluetoothAdapter.getDefaultAdapter(); if (bluetoothAdapter != null) { final Set<BluetoothDevice> deviceSet = bluetoothAdapter.getBondedDevices(); final List<BluetoothDevice> devices = new ArrayList<>(deviceSet); Collections.sort(devices, new BluetoothDeviceComparator()); return devices; } else { Log.w(TAG, "Tried to get devices, but bluetooth is not enabled"); return new ArrayList<>(); } } public int getState() { return bluetoothState; } void setState(int state) { if (bluetoothState == BT_STOPPING && state == BT_CONNECTING) { Log.e(TAG, "Invalid bluetooth state transition: " + BT_STATES[bluetoothState] + " -> " + BT_STATES[state]); } if (bluetoothState == state && state != BT_CONNECTING) { // Only allowed self-transition is connecting -> connecting Log.e(TAG, "Null state transition: " + BT_STATES[bluetoothState] + " -> " + BT_STATES[state]); } Log.d(TAG, "Bluetooth state: " + BT_STATES[bluetoothState] + " -> " + BT_STATES[state]); bluetoothState = state; EventBus.getDefault().post(new BluetoothEvent()); } /** * Return a human-readable string for the bluetooth state */ @NonNull public String getStatusMessage(@NonNull Context context) { if (bluetoothAdapter != null && !bluetoothAdapter.isEnabled()) { // Hardware disabled return context.getString(R.string.bluetooth_status_disabled); } else if (!preferences.preferenceEnabled) { // Bluetooth preference disabled return context.getString(R.string.bluetooth_status_disabled); } else if (preferences.preferenceDeviceId == null) { // Bluetooth preference enabled, but device not selected return context.getString(R.string.bluetooth_status_not_selected); } else { switch (bluetoothState) { case BT_STOPPED: return context.getString(R.string.bluetooth_status_stopped); case BT_STARTING: return context.getString(R.string.bluetooth_status_starting); case BT_CONNECTING: return context.getString(R.string.bluetooth_status_connecting); case BT_CONNECTED: return context.getString(R.string.bluetooth_status_connected); case BT_STOPPING: return context.getString(R.string.bluetooth_status_stopping); default: return ""; } } } @Override public synchronized void stop() { if (bluetoothState != BT_STOPPED) { Log.i(TAG, "Stopping bluetooth service"); // Stop thread if (bluetoothRunnable != null && bluetoothThread != null) { bluetoothRunnable.stop(); try { bluetoothThread.join(1000); // Thread is dead, clean up bluetoothRunnable = null; bluetoothThread = null; if (bluetoothState != BT_STOPPED) { Log.e(TAG, "Unexpected bluetooth state: state should be STOPPED when thread has stopped"); } } catch (InterruptedException e) { Log.e(TAG, "Bluetooth thread interrupted while waiting for it to die", e); } Log.i(TAG, "Bluetooth service stopped"); } else { Log.e(TAG, "Cannot stop bluetooth: runnable is null: " + BT_STATES[bluetoothState]); // Set state to stopped since it prevents getting stuck in state STOPPING } setState(BT_STOPPED); } } /** * Restart bluetooth. * If bluetooth is stopped, just start it. */ public synchronized void restart(@NonNull Activity activity) { Log.i(TAG, "Restarting bluetooth service"); if (bluetoothState != BT_STOPPED) { // Stop first stop(); if (bluetoothState != BT_STOPPED) { Exceptions.report(new IllegalStateException("Error restarting bluetooth: not stopped: " + BT_STATES[bluetoothState])); } } start(activity); } public void addNmeaListener(GpsStatus.NmeaListener listener) { listeners.add(listener); } public void removeNmeaListener(GpsStatus.NmeaListener listener) { listeners.remove(listener); } }
/* * Copyright (c) 2002-2008 LWJGL Project * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * * Neither the name of 'LWJGL' nor the names of * its contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF * LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING * NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.lwjgl.test; import org.lwjgl.LWJGLException; import org.lwjgl.opengl.Display; import org.lwjgl.opengl.DisplayMode; /** * <br> * Test class for Display & DisplayMode * * @author Brian Matzon <brian@matzon.dk> * @version $Revision$ * $Id$ */ public class DisplayTest { /** * Creates a new DisplayTest */ public DisplayTest() { } /** * Runs the tests */ public void executeTest() throws LWJGLException { currentTest(); queryModesTest(); setDisplayModeTest(); setDisplayConfigurationTest(); } /** * Prints some info about the current mode */ private void currentTest() { System.out.println("==== Test Current ===="); System.out.println("Info about current:"); System.out.println("Graphics card: " + Display.getAdapter() + ", version: " + Display.getVersion()); System.out.println("Resolution: " + Display.getDisplayMode().getWidth() + "x" + Display.getDisplayMode().getHeight() + "x" + Display.getDisplayMode().getBitsPerPixel() + "@" + Display.getDisplayMode().getFrequency() + "Hz"); System.out.println("---- Test Current ----"); } /** * Tests querying for modes */ private void queryModesTest() throws LWJGLException { DisplayMode[] modes = null; System.out.println("==== Test query ===="); System.out.println("Retrieving available displaymodes"); modes = Display.getAvailableDisplayModes(); // no modes check if (modes == null) { System.out.println("FATAL: unable to find any modes!"); System.exit(-1); } // write some info System.out.println("Found " + modes.length + " modes"); System.out.println("The first 5 are:"); for(int i=0;i<modes.length; i++) { System.out.println(modes[i]); if (i == 5) { break; } } System.out.println("---- Test query ----"); } /** * Tests setting display modes */ private void setDisplayModeTest() throws LWJGLException { DisplayMode mode = null; DisplayMode[] modes = null; System.out.println("==== Test setDisplayMode ===="); System.out.println("Retrieving available displaymodes"); modes = Display.getAvailableDisplayModes(); // no modes check if (modes == null) { System.out.println("FATAL: unable to find any modes!"); System.exit(-1); } // find a mode System.out.print("Looking for 640x480..."); for(int i=0; i<modes.length;i++) { if (modes[i].getWidth() == 640 && modes[i].getHeight() == 480) { mode = modes[i]; System.out.println("found!"); break; } } // no mode check if (mode == null) { System.out.println("error\nFATAL: Unable to find basic mode."); System.exit(-1); } // change to mode, and wait a bit System.out.print("Changing to mode..."); try { Display.setDisplayMode(mode); Display.setFullscreen(true); Display.create(); } catch (Exception e) { System.out.println("error\nFATAL: Error setting mode"); System.exit(-1); } System.out.println("done"); System.out.println("Resolution: " + Display.getDisplayMode().getWidth() + "x" + Display.getDisplayMode().getHeight() + "x" + Display.getDisplayMode().getBitsPerPixel() + "@" + Display.getDisplayMode().getFrequency() + "Hz"); pause(5000); // reset System.out.print("Resetting mode..."); try { Display.setFullscreen(false); } catch (LWJGLException e) { e.printStackTrace(); } System.out.println("done"); System.out.println("---- Test setDisplayMode ----"); } /** * Tests the DisplayConfiguration */ private void setDisplayConfigurationTest() { System.out.println("==== Test setDisplayConfigurationTest ===="); System.out.println("Testing normal setting"); changeConfig(1.0f, 0f, 1f); System.out.println("Testing gamma settings"); changeConfig(5.0f, 0f, 1f); changeConfig(0.5f, 0f, 1f); System.out.println("Testing brightness settings"); changeConfig(1.0f, -1.0f, 1f); changeConfig(1.0f, -0.5f, 1f); changeConfig(1.0f, 0.5f, 1f); changeConfig(1.0f, 1.0f, 1f); System.out.println("Testing contrast settings"); changeConfig(1.0f, 0f, 0f); changeConfig(1.0f, 0f, 0.5f); changeConfig(1.0f, 0f, 10000.0f); System.out.print("resetting..."); try { Display.setFullscreen(false); } catch (LWJGLException e) { e.printStackTrace(); } System.out.println("done"); System.out.println("---- Test setDisplayConfigurationTest ----"); } /** * Changes the Displat configuration * * @param gamma gamma value to change to * @param brightness brightness value to change to * @param contrast contrast value to change to */ private void changeConfig(float gamma, float brightness, float contrast) { try { Display.setDisplayConfiguration(gamma, brightness, contrast); System.out.println("Configuration changed, gamma = " + gamma + " brightness = " + brightness + " contrast = " + contrast); } catch (Exception e) { System.out.println("Failed on: gamma = " + gamma + " brightness = " + brightness + " contrast = " + contrast); } pause(3000); } /** * Pause current thread for a specified time * * @param time milliseconds to sleep */ private void pause(long time) { int SLEEP_DELAY = 100; for (int i = 0; i < time; i += SLEEP_DELAY) { try { Display.processMessages(); Thread.sleep(SLEEP_DELAY); } catch (InterruptedException inte) { } } } /** * Tests the Sys class, and serves as basic usage test * * @param args ignored */ public static void main(String[] args) throws LWJGLException { new DisplayTest().executeTest(); System.exit(0); } }
package biz.dfch.j.graylog.plugin.output; import biz.dfch.j.clickatell.ClickatellClient; import biz.dfch.j.clickatell.rest.accountbalance.AccountBalanceResponse; import biz.dfch.j.clickatell.rest.coverage.CoverageResponse; import biz.dfch.j.clickatell.rest.message.MessageResponse; import com.google.inject.assistedinject.Assisted; import org.graylog2.plugin.Message; import org.graylog2.plugin.configuration.Configuration; import org.graylog2.plugin.configuration.ConfigurationRequest; import org.graylog2.plugin.configuration.fields.*; import org.graylog2.plugin.outputs.MessageOutput; import org.graylog2.plugin.outputs.MessageOutputConfigurationException; import org.graylog2.plugin.streams.Stream; import org.msgpack.annotation.NotNullable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import java.util.*; import java.util.concurrent.atomic.AtomicBoolean; /** * This is the plugin. Your class should implement one of the existing plugin * interfaces. (i.e. AlarmCallback, MessageInput, MessageOutput) */ public class dfchBizClickatellOutput implements MessageOutput { private static final String CONFIG_AUTH_TOKEN = "CONFIG_AUTH_TOKEN"; private static final String CONFIG_RECIPIENTS = "CONFIG_RECIPIENTS"; private static final String CONFIG_FIELDS = "CONFIG_FIELDS"; private static final String CONFIG_INCLUDE_FIELD_NAMES = "CONFIG_INCLUDE_FIELD_NAMES"; private static final String CONFIG_MAX_LENGTH = "CONFIG_MAX_LENGTH"; private static final String CONFIG_MAX_CREDITS = "CONFIG_MAX_CREDITS"; private static final String CONFIG_MAX_PARTS = "CONFIG_MAX_PARTS"; private static final String CONFIG_STATIC_TEXT = "CONFIG_STATIC_TEXT"; private static final Logger LOG = LoggerFactory.getLogger(dfchBizClickatellOutput.class); private final AtomicBoolean isRunning = new AtomicBoolean(false); private Configuration configuration; private String streamTitle; List<String> recipients; List<String> fields; ClickatellClient clickatellClient; @Inject public dfchBizClickatellOutput ( @NotNullable @Assisted Stream stream, @NotNullable @Assisted Configuration configuration ) throws MessageOutputConfigurationException { try { LOG.debug("Verifying configuration ..."); this.configuration = configuration; streamTitle = stream.getTitle(); if(null == streamTitle || streamTitle.isEmpty()) { throw new MessageOutputConfigurationException(String.format("streamTitle: Parameter validation FAILED. Value cannot be null or empty.")); } String configAuthToken = configuration.getString("CONFIG_AUTH_TOKEN"); if(null == configAuthToken || configAuthToken.isEmpty()) { throw new MessageOutputConfigurationException(String.format("configAuthToken: Parameter validation FAILED. Value cannot be null or empty.")); } recipients = Arrays.asList(configuration.getString(CONFIG_RECIPIENTS).split("\\s*,\\s*")); if(configuration.getString(CONFIG_RECIPIENTS).isEmpty() || 0 >= recipients.size() || recipients.isEmpty()) { throw new MessageOutputConfigurationException(String.format("CONFIG_RECIPIENTS: Parameter validation FAILED. You have to specify at least one recipient.")); } fields = Arrays.asList(configuration.getString(CONFIG_FIELDS).split("\\s*,\\s*")); if(configuration.getString(CONFIG_FIELDS).isEmpty() || 0 >= fields.size() || fields.isEmpty()) { LOG.warn(String.format("No fields were specified. Using the following default fields: '<timestamp>', '<stream>', '<message>'.")); fields = new ArrayList<String>(); fields.add("<timestamp>"); fields.add("<stream>"); fields.add("<source>"); fields.add("<message>"); } if(0 > configuration.getInt("CONFIG_MAX_LENGTH")) { LOG.error(String.format("CONFIG_MAX_LENGTH: Parameter validation FAILED. Field must be equal or greater than zero.")); } if(0 > configuration.getInt("CONFIG_MAX_CREDITS")) { LOG.warn(String.format("CONFIG_MAX_CREDITS: Parameter validation FAILED. Field must be equal or greater than zero.")); } if(0 > configuration.getInt("CONFIG_MAX_PARTS")) { LOG.warn(String.format("CONFIG_MAX_PARTS: Parameter validation FAILED. Field must be equal or greater than zero.")); } LOG.info("Verifying configuration SUCCEEDED."); LOG.debug("Connecting to Clickatell ..."); clickatellClient = new ClickatellClient(configAuthToken); AccountBalanceResponse accountBalanceResponse = clickatellClient.getBalance(); LOG.info(String.format("%s: current balance for output on stream '%s' is '%s'.", (new dfchBizClickatellOutputMetaData()).getName(), streamTitle, accountBalanceResponse.getData().getBalance())); for(String recipient : recipients) { CoverageResponse coverageResponse = clickatellClient.getCoverage(recipient); if(!coverageResponse.getData().isRoutable()) { LOG.error(String.format("Sending message to '%s' is outside coverage.", recipient)); } } LOG.info("Connecting to Clickatell SUCCEEDED."); isRunning.set(true); } catch (MessageOutputConfigurationException ex) { LOG.error("Connecting to Clickatell FAILED.", ex); throw ex; } catch (Exception ex) { LOG.error("Connecting to Clickatell FAILED.", ex); throw new MessageOutputConfigurationException(ex.getMessage()); } } @Override public boolean isRunning() { return isRunning.get(); } @Override public void stop() { try { LOG.info("stop()"); isRunning.set(false); } catch(Throwable ex) { LOG.error("stop() FAILED."); } } @Override public void write(Message message) throws Exception { if(!isRunning.get()) { return; } try { StringBuilder sb = new StringBuilder(configuration.getInt("CONFIG_MAX_LENGTH")); String staticText = configuration.getString("CONFIG_STATIC_TEXT"); int maxLength = configuration.getInt("CONFIG_MAX_LENGTH"); if(null != staticText && !staticText.isEmpty()) { sb.append(staticText); sb.append(" "); } for (String fieldName : fields) { switch(fieldName) { case "<id>": if(configuration.getBoolean("CONFIG_INCLUDE_FIELD_NAMES")) { sb.append("id: "); } sb.append(message.getId()); sb.append(";"); break; case "<message>": if(configuration.getBoolean("CONFIG_INCLUDE_FIELD_NAMES")) { sb.append("message: "); } sb.append(message.getMessage()); sb.append(";"); break; case "<source>": if(configuration.getBoolean("CONFIG_INCLUDE_FIELD_NAMES")) { sb.append("source: "); } sb.append(message.getSource()); sb.append(";"); break; case "<timestamp>": if(configuration.getBoolean("CONFIG_INCLUDE_FIELD_NAMES")) { sb.append("timestamp: "); } sb.append(message.getTimestamp()); sb.append(";"); break; case "<stream>": if(configuration.getBoolean("CONFIG_INCLUDE_FIELD_NAMES")) { sb.append("stream: "); } sb.append(streamTitle); sb.append(";"); break; default: if(!message.hasField(fieldName)) { LOG.warn(String.format("%s: field name does not exist. Skipping ...", fieldName)); continue; } if(configuration.getBoolean("CONFIG_INCLUDE_FIELD_NAMES")) { sb.append(fieldName); sb.append(": "); } String fieldValue = message.getField(fieldName).toString(); sb.append(fieldValue); sb.append(";"); break; } if(0 < maxLength && maxLength < sb.length()) { break; } } if(0 < maxLength && maxLength < sb.length()) { LOG.warn(String.format("CONFIG_MAX_LENGTH: Generated message contains '%d' characters and exceeds configured maximum of '%d' characters. Truncating message ...", sb.length(), maxLength)); sb.setLength(maxLength); } LOG.debug(sb.toString()); List<MessageResponse> messageResponses = clickatellClient.sendMessage( recipients, sb.toString(), configuration.getInt("CONFIG_MAX_CREDITS"), configuration.getInt("CONFIG_MAX_PARTS") ); } catch (Exception ex) { LOG.error("Exception occurred.", ex); ex.printStackTrace(); throw ex; } } @Override public void write(List<Message> messages) throws Exception { if (!isRunning.get()) { return; } for (Message message : messages) { write(message); } } public static class Config extends MessageOutput.Config { @Override public ConfigurationRequest getRequestedConfiguration() { final ConfigurationRequest configurationRequest = new ConfigurationRequest(); configurationRequest.addField(new TextField( CONFIG_AUTH_TOKEN, "Clickatell AuthToken", "", "AuthenticationToken for the Clickatell REST API", ConfigurationField.Optional.NOT_OPTIONAL, TextField.Attribute.IS_PASSWORD) ); configurationRequest.addField(new TextField( CONFIG_RECIPIENTS, "Recipients of short message", "", "Comma separated list of number in international format, eg 27999112345. No '00', ' ', '+' or '-', just numbers", ConfigurationField.Optional.NOT_OPTIONAL) ); configurationRequest.addField(new TextField( CONFIG_FIELDS, "Fields to send in short message", "", "Comma separated list of fields to send as message text, eg <message>, <id>, <timestamp>, <stream> or user defined fields. Built-in fields have to be surrounded by '<>'", ConfigurationField.Optional.OPTIONAL) ); configurationRequest.addField(new BooleanField( CONFIG_INCLUDE_FIELD_NAMES, "Include field names in short message", true, "Set to true to include field names in short message, or set to false to omit field names and only send field contents.") ); configurationRequest.addField(new NumberField( CONFIG_MAX_LENGTH, "MaxLength", 0, "Maximum length of short message", ConfigurationField.Optional.OPTIONAL) ); configurationRequest.addField(new NumberField( CONFIG_MAX_CREDITS, "MaxCredits", 0, "Maximum credits to spend on a short message", ConfigurationField.Optional.OPTIONAL) ); configurationRequest.addField(new NumberField( CONFIG_MAX_PARTS, "MaxParts", 0, "Maximum number of parts a short message can consist of", ConfigurationField.Optional.OPTIONAL) ); configurationRequest.addField(new TextField( CONFIG_STATIC_TEXT, "Static text that prepends the short message", "", "You can optionally define a phrase that will be sent with every short message.", ConfigurationField.Optional.OPTIONAL) ); return configurationRequest; } } public interface Factory extends MessageOutput.Factory<dfchBizClickatellOutput> { @Override dfchBizClickatellOutput create(Stream stream, Configuration configuration); @Override Config getConfig(); @Override Descriptor getDescriptor(); } public static class Descriptor extends MessageOutput.Descriptor { public Descriptor() { super((new dfchBizClickatellOutputMetaData()).getName(), false, "", (new dfchBizClickatellOutputMetaData()).getDescription()); } } } /** * * * Copyright 2015 Ronald Rink, d-fens GmbH * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */
package com.lsu.vizeq; import java.io.InterruptedIOException; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Iterator; import java.util.Map; import java.util.Map.Entry; import redis.clients.jedis.Jedis; import redis.clients.jedis.exceptions.JedisConnectionException; import android.app.ActionBar; import android.app.AlertDialog; import android.app.Dialog; import android.content.DialogInterface; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.graphics.Typeface; import android.graphics.drawable.ColorDrawable; import android.os.AsyncTask; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.MotionEvent; import android.view.View; import android.view.Window; import android.view.WindowManager; import android.widget.EditText; import android.widget.ImageButton; import android.widget.LinearLayout; import android.widget.TextView; public class HostMenuActivity extends BackableActivity { MyApplication myapp; ActionBar actionBar; boolean connectionGood = true; public void serverHeartbeat() { final MyApplication myapp = (MyApplication) this.getApplicationContext(); new Thread(new Runnable() { @Override public void run() { while(myapp.hosting) { long test = 0; while(test != 1) { Jedis jedis = null; try { jedis = myapp.jedisPool.getResource(); jedis.auth(Redis.auth); // Log.d("heartbeat", "sending heartbeat"); //jedis.set(myapp.zipcode + ":" + myapp.myName, myapp.myIp); test = jedis.expire(myapp.zipcode + ":" + myapp.myName, 5); while(test != 1) { jedis.set(myapp.zipcode + ":" + myapp.myName, myapp.myIp); test = jedis.expire(myapp.zipcode + ":" + myapp.myName, 5); } connectionGood = true; } catch (JedisConnectionException e) { e.printStackTrace(); if(jedis != null) { myapp.jedisPool.returnBrokenResource(jedis); jedis = null; } if(connectionGood) { connectionGood = false; runOnUiThread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub LostConnectionNotification(); } }); } } finally { if(jedis != null) myapp.jedisPool.returnResource(jedis); } } try { Thread.sleep(3000L); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } }).start(); } public void LostConnectionNotification() { // Log.d("Contact Server", "Error connecting"); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("Lost connection to server").setCancelable(false) .setPositiveButton("ok", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { } }); AlertDialog alert = builder.create(); alert.show(); } public void userHeartbeat() { new Thread(new Runnable() { @Override public void run() { DatagramSocket listenSocket, sendSocket; try { listenSocket = new DatagramSocket(7772); sendSocket = new DatagramSocket(); while(myapp.hosting) { Iterator< Entry<InetAddress, String> > it = myapp.connectedUsers.entrySet().iterator(); byte [] ping = new byte[1024]; byte [] ack = new byte[1024]; ping = "ping".getBytes(); while(it.hasNext()) { Entry<InetAddress, String> currEntry = it.next(); InetAddress currIp = currEntry.getKey(); final String guestName = currEntry.getValue(); DatagramPacket pingPacket = new DatagramPacket(ping, ping.length, currIp, 7772); DatagramPacket ackPacket = new DatagramPacket(ack, ack.length); try { sendSocket.send(pingPacket); listenSocket.setSoTimeout(5000); listenSocket.receive(ackPacket); } catch(InterruptedIOException e) { it.remove(); //remove user from party runOnUiThread(new Runnable() { @Override public void run() { // TODO Auto-generated method stub // Log.d("Removing Guest ", guestName); refreshLists(); } }); } catch(Exception e) { e.printStackTrace(); } } Thread.sleep(3000L); } sendSocket.close(); listenSocket.close(); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } } }).start(); } @Override protected void onStart(){ super.onStart(); actionBar = getActionBar(); actionBar.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.LightGreen))); ImageButton search = (ImageButton)findViewById(R.id.Search); ImageButton scope = (ImageButton)findViewById(R.id.Scope); ImageButton playing = (ImageButton)findViewById(R.id.NowPlaying); ImageButton visualizer = (ImageButton)findViewById(R.id.SoundViz); scope.setAlpha(0.9f); search.setAlpha(0.9f); playing.setAlpha(0.9f); visualizer.setAlpha(0.9f); SharedPreferences memory = getSharedPreferences("VizEQ",MODE_PRIVATE); int posi = memory.getInt("colorPos", -1); if (posi > 0) VizEQ.numRand = posi; switch (VizEQ.numRand) { case 1: actionBar.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.Red))); scope.setBackgroundColor(getResources().getColor(R.color.Red)); search.setBackgroundColor(getResources().getColor(R.color.Red)); playing.setBackgroundColor(getResources().getColor(R.color.Red)); visualizer.setBackgroundColor(getResources().getColor(R.color.Red)); break; case 2: actionBar.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.Green))); scope.setBackgroundColor(getResources().getColor(R.color.Green)); search.setBackgroundColor(getResources().getColor(R.color.Green)); playing.setBackgroundColor(getResources().getColor(R.color.Green)); visualizer.setBackgroundColor(getResources().getColor(R.color.Green)); break; case 3: actionBar.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.Blue))); scope.setBackgroundColor(getResources().getColor(R.color.Blue)); search.setBackgroundColor(getResources().getColor(R.color.Blue)); playing.setBackgroundColor(getResources().getColor(R.color.Blue)); visualizer.setBackgroundColor(getResources().getColor(R.color.Blue)); break; case 4: actionBar.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.Purple))); scope.setBackgroundColor(getResources().getColor(R.color.Purple)); search.setBackgroundColor(getResources().getColor(R.color.Purple)); playing.setBackgroundColor(getResources().getColor(R.color.Purple)); visualizer.setBackgroundColor(getResources().getColor(R.color.Purple)); break; case 5: actionBar.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.Orange))); scope.setBackgroundColor(getResources().getColor(R.color.Orange)); search.setBackgroundColor(getResources().getColor(R.color.Orange)); playing.setBackgroundColor(getResources().getColor(R.color.Orange)); visualizer.setBackgroundColor(getResources().getColor(R.color.Orange)); break; } actionBar.setDisplayShowTitleEnabled(false); actionBar.setDisplayShowTitleEnabled(true); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_host_menu); // Log.d("Flow", "onCreate HostMenu"); actionBar = getActionBar(); actionBar.setBackgroundDrawable(new ColorDrawable(getResources().getColor(R.color.LightGreen))); ImageButton search = (ImageButton)findViewById(R.id.Search); ImageButton scope = (ImageButton)findViewById(R.id.Scope); ImageButton playing = (ImageButton)findViewById(R.id.NowPlaying); ImageButton visualizer = (ImageButton)findViewById(R.id.SoundViz); scope.setAlpha(0.7f); search.setAlpha(0.7f); playing.setAlpha(0.7f); visualizer.setAlpha(0.7f); refreshLists(); // Log.d("Flow", "onStart HostMenu"); final Dialog dialog = new Dialog(HostMenuActivity.this); dialog.requestWindowFeature(Window.FEATURE_NO_TITLE); dialog.setContentView(R.layout.activity_host_sound_visualization); final Window window = dialog.getWindow(); window.setLayout(WindowManager.LayoutParams.MATCH_PARENT, WindowManager.LayoutParams.WRAP_CONTENT); window.clearFlags(WindowManager.LayoutParams.FLAG_DIM_BEHIND); window.setBackgroundDrawable(new ColorDrawable(Color.TRANSPARENT)); myapp = (MyApplication) this.getApplicationContext(); serverHeartbeat(); //userHeartbeat(); new Thread( new Runnable() { public void run() { try { DatagramSocket listenSocket = new DatagramSocket(7770); DatagramSocket sendSocket = new DatagramSocket(); while(true) { //listen for search // Log.d("listen thread","listening"); byte[] receiveData = new byte[1024]; DatagramPacket receivedPacket = new DatagramPacket(receiveData, receiveData.length); listenSocket.receive(receivedPacket); // Log.d("listen thread", "packet received"); InetAddress ip = receivedPacket.getAddress(); int port = receivedPacket.getPort(); String message = PacketParser.getHeader(receivedPacket); if (message.equals("search")) { // Log.d("listen thread", "search received from "+ip.toString()+" "+ip.getHostAddress()); //send back information String information = "found\n"+myapp.myName; // Log.d("listen thread", "sending back "+information+ " to "+ip.getHostAddress()); //make a packet byte[] sendData = new byte[1024]; sendData = information.getBytes(); DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, ip, 7770); sendSocket.send(sendPacket); } if (message.equals("request")) { /*Protocol * outputStream.write(requestHeader); outputStream.write(albumBytes); outputStream.write(backslashN); outputStream.write(artistBytes); outputStream.write(backslashN); outputStream.write(requesterBytes); outputStream.write(backslashN); outputStream.write(trackBytes); outputStream.write(backslashN); outputStream.write(uriBytes); outputStream.write(backslashN); */ String album = PacketParser.getArgs(receivedPacket)[0]; String artist = PacketParser.getArgs(receivedPacket)[1]; String requester = PacketParser.getArgs(receivedPacket)[2]; String trackName = PacketParser.getArgs(receivedPacket)[3]; String uri = PacketParser.getArgs(receivedPacket)[4]; //check to see if already requested boolean found = false; for(int i = 0; i<myapp.requests.size(); i++) { if(myapp.requests.get(i).mUri.equals(uri)) { //if not already requested by this person, add to requester list for track if(!myapp.requests.get(i).requesters.contains(requester)) myapp.requests.get(i).requesters.add(requester); found = true; } } if(!found) { Track request = new Track(); request.mAlbum = album; request.mArtist = artist; request.mTrack = trackName; request.mUri = uri; request.requesters.add(requester); myapp.requests.add(request); } // Log.d("listen thread", "Request added!"); // Log.d("listen thread", "request album = " + request.mAlbum); // Log.d("listen thread", "request artist = " + request.mArtist); // Log.d("listen thread", "request requester = " + request.mRequester); // Log.d("listen thread", "request track = " + request.mTrack); // Log.d("listen thread", "request uri = " + request.mUri); } } } catch (Exception e) { e.printStackTrace(); } } }).start(); new ListenForJoinRequestTask().execute(); View.OnTouchListener touchListener = new View.OnTouchListener() { @Override public boolean onTouch(View v, MotionEvent e){ if (e.getAction() == MotionEvent.ACTION_DOWN) { v.setAlpha(.7f); } else if (e.getAction() == MotionEvent.ACTION_UP) { v.setAlpha(.9f); } return false; } }; findViewById(R.id.NowPlaying).setOnTouchListener(touchListener); findViewById(R.id.Scope).setOnTouchListener(touchListener); findViewById(R.id.Search).setOnTouchListener(touchListener); findViewById(R.id.SoundViz).setOnTouchListener(touchListener); findViewById(R.id.NowPlaying).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent nextIntent = new Intent(HostMenuActivity.this, PlayerActivity.class); startActivity(nextIntent); } }); findViewById(R.id.Scope).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent nextIntent = new Intent(HostMenuActivity.this, PreferenceVisualizationActivity.class); startActivity(nextIntent); } }); findViewById(R.id.Search).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent nextIntent = new Intent(HostMenuActivity.this, SearchActivity.class); startActivity(nextIntent); } }); findViewById(R.id.SoundViz).setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { Intent nextIntent = new Intent(HostMenuActivity.this, HostSoundVisualizationActivity.class); startActivity(nextIntent); } }); } public void addUserToList(View view) { //EditText nameField = (EditText) this.findViewById(R.id.name_field); //EditText ipField = (EditText) this.findViewById(R.id.ip_field); MyApplication myapp = (MyApplication) this.getApplicationContext(); try { //myapp.connectedUsers.put(InetAddress.getByName(ipField.getText().toString()), nameField.getText().toString()); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } //nameField.setText(""); //ipField.setText(""); refreshLists(); } private void refreshLists() { //TextView nameList = (TextView) this.findViewById(R.id.name_list); //TextView ipList = (TextView) this.findViewById(R.id.ip_list); String nameString = ""; String ipString = ""; MyApplication myapp = (MyApplication) this.getApplicationContext(); Iterator< Entry<InetAddress, String> > it = myapp.connectedUsers.entrySet().iterator(); int numPartiers = 0; TextView partyText = (TextView)findViewById(R.id.numUsers); Typeface font = Typeface.createFromAsset(getAssets(), "Mission Gothic Regular.otf"); partyText.setTypeface(font); partyText.setTextColor(Color.WHITE); while (it.hasNext()) { Map.Entry<InetAddress, String> pairs= it.next(); String name = (String) pairs.getValue(); String ip = ((InetAddress) pairs.getKey()).getHostAddress(); nameString += (name + "\n"); ipString += (ip + "\n"); numPartiers++; } String peepString = (numPartiers == 1) ? " person" : " people"; String verb = (numPartiers == 1) ? " is " : " are "; partyText.setText(numPartiers + peepString + verb + "connected to the party"); //iterate through usersConnected //nameList.setText(nameString); //ipList.setText(ipString); } private class ListenForJoinRequestTask extends AsyncTask <Void, Void, Void> { DatagramSocket listenSocket; DatagramSocket sendSocket; @Override protected Void doInBackground(Void... params) { // TODO Auto-generated method stub try { listenSocket = new DatagramSocket(7771); sendSocket = new DatagramSocket(); while(true) { byte listenData[] = new byte[1024]; DatagramPacket listenPacket = new DatagramPacket(listenData, listenData.length); listenSocket.receive(listenPacket); String message = PacketParser.getHeader(listenPacket); if(message.equals("join")) { String clientName = PacketParser.getArgs(listenPacket)[0]; InetAddress clientIp = listenPacket.getAddress(); myapp.connectedUsers.put(clientIp, clientName); // Log.d("join listener", "added "+clientName+" "+clientIp.getHostName()); byte sendData[] = new byte[1024]; String sendString = "accept\n" + VizEQ.nowPlaying + "\n"; sendData = sendString.getBytes(); DatagramPacket sendPacket = new DatagramPacket(sendData, sendData.length, clientIp, 7771); sendSocket.send(sendPacket); // Log.d("accept thread", "accept sent"); publishProgress(); } } } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); } return null; } @Override protected void onCancelled() { // TODO Auto-generated method stub listenSocket.close(); sendSocket.close(); } @Override protected void onCancelled(Void result) { // TODO Auto-generated method stub sendSocket.close(); listenSocket.close(); } @Override protected void onProgressUpdate(Void... values) { // TODO Auto-generated method stub refreshLists(); } } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.host_menu, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { case R.id.action_settings: Intent nextIntent = new Intent(HostMenuActivity.this, HostProfileActivity.class); startActivity(nextIntent); break; case R.id.about: Intent nextIntent2 = new Intent(HostMenuActivity.this, AboutActivity.class); startActivity(nextIntent2); break; default: return super.onOptionsItemSelected(item); } return true; } public void BackWarning() { // Log.d("Contact Server", "Couldn't find your location."); AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("Going back will stop your party.\nAre you sure you wish to continue?") .setCancelable(false) .setPositiveButton("Yes", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int id) { goBack(); } }) .setNegativeButton("Nevermind", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface arg0, int arg1) { } }); AlertDialog alert = builder.create(); alert.show(); } public void goBack() { super.onBackPressed(); } @Override public void onBackPressed() { // TODO Auto-generated method stub BackWarning(); } }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hssf.model; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.poi.hssf.record.CRNCountRecord; import org.apache.poi.hssf.record.CRNRecord; import org.apache.poi.hssf.record.CountryRecord; import org.apache.poi.hssf.record.ExternSheetRecord; import org.apache.poi.hssf.record.ExternalNameRecord; import org.apache.poi.hssf.record.NameCommentRecord; import org.apache.poi.hssf.record.NameRecord; import org.apache.poi.hssf.record.Record; import org.apache.poi.hssf.record.SupBookRecord; import org.apache.poi.ss.formula.SheetNameFormatter; import org.apache.poi.ss.formula.ptg.Area3DPtg; import org.apache.poi.ss.formula.ptg.ErrPtg; import org.apache.poi.ss.formula.ptg.NameXPtg; import org.apache.poi.ss.formula.ptg.Ptg; import org.apache.poi.ss.formula.ptg.Ref3DPtg; import org.apache.poi.ss.usermodel.Workbook; /** * Link Table (OOO pdf reference: 4.10.3 ) <p/> * * The main data of all types of references is stored in the Link Table inside the Workbook Globals * Substream (4.2.5). The Link Table itself is optional and occurs only if there are any * references in the document. * <p/> * * In BIFF8 the Link Table consists of * <ul> * <li>zero or more EXTERNALBOOK Blocks<p/> * each consisting of * <ul> * <li>exactly one EXTERNALBOOK (0x01AE) record</li> * <li>zero or more EXTERNALNAME (0x0023) records</li> * <li>zero or more CRN Blocks<p/> * each consisting of * <ul> * <li>exactly one XCT (0x0059)record</li> * <li>zero or more CRN (0x005A) records (documentation says one or more)</li> * </ul> * </li> * </ul> * </li> * <li>zero or one EXTERNSHEET (0x0017) record</li> * <li>zero or more DEFINEDNAME (0x0018) records</li> * </ul> */ final class LinkTable { // TODO make this class into a record aggregate private static final class CRNBlock { private final CRNCountRecord _countRecord; private final CRNRecord[] _crns; public CRNBlock(RecordStream rs) { _countRecord = (CRNCountRecord) rs.getNext(); int nCRNs = _countRecord.getNumberOfCRNs(); CRNRecord[] crns = new CRNRecord[nCRNs]; for (int i = 0; i < crns.length; i++) { crns[i] = (CRNRecord) rs.getNext(); } _crns = crns; } public CRNRecord[] getCrns() { return _crns.clone(); } } private static final class ExternalBookBlock { private final SupBookRecord _externalBookRecord; private ExternalNameRecord[] _externalNameRecords; private final CRNBlock[] _crnBlocks; public ExternalBookBlock(RecordStream rs) { _externalBookRecord = (SupBookRecord) rs.getNext(); List<Object> temp = new ArrayList<Object>(); while(rs.peekNextClass() == ExternalNameRecord.class) { temp.add(rs.getNext()); } _externalNameRecords = new ExternalNameRecord[temp.size()]; temp.toArray(_externalNameRecords); temp.clear(); while(rs.peekNextClass() == CRNCountRecord.class) { temp.add(new CRNBlock(rs)); } _crnBlocks = new CRNBlock[temp.size()]; temp.toArray(_crnBlocks); } /** * Create a new block for external references. */ public ExternalBookBlock(String url, String[] sheetNames) { _externalBookRecord = SupBookRecord.createExternalReferences(url, sheetNames); _crnBlocks = new CRNBlock[0]; } /** * Create a new block for internal references. It is called when constructing a new LinkTable. * * @see org.apache.poi.hssf.model.LinkTable#LinkTable(int, WorkbookRecordList) */ public ExternalBookBlock(int numberOfSheets) { _externalBookRecord = SupBookRecord.createInternalReferences((short)numberOfSheets); _externalNameRecords = new ExternalNameRecord[0]; _crnBlocks = new CRNBlock[0]; } /** * Create a new block for registering add-in functions * * @see org.apache.poi.hssf.model.LinkTable#addNameXPtg(String) */ public ExternalBookBlock() { _externalBookRecord = SupBookRecord.createAddInFunctions(); _externalNameRecords = new ExternalNameRecord[0]; _crnBlocks = new CRNBlock[0]; } public SupBookRecord getExternalBookRecord() { return _externalBookRecord; } public String getNameText(int definedNameIndex) { return _externalNameRecords[definedNameIndex].getText(); } public int getNameIx(int definedNameIndex) { return _externalNameRecords[definedNameIndex].getIx(); } /** * Performs case-insensitive search * @return -1 if not found */ public int getIndexOfName(String name) { for (int i = 0; i < _externalNameRecords.length; i++) { if(_externalNameRecords[i].getText().equalsIgnoreCase(name)) { return i; } } return -1; } public int getNumberOfNames() { return _externalNameRecords.length; } public int addExternalName(ExternalNameRecord rec){ ExternalNameRecord[] tmp = new ExternalNameRecord[_externalNameRecords.length + 1]; System.arraycopy(_externalNameRecords, 0, tmp, 0, _externalNameRecords.length); tmp[tmp.length - 1] = rec; _externalNameRecords = tmp; return _externalNameRecords.length - 1; } } private ExternalBookBlock[] _externalBookBlocks; private final ExternSheetRecord _externSheetRecord; private final List<NameRecord> _definedNames; private final int _recordCount; private final WorkbookRecordList _workbookRecordList; // TODO - would be nice to remove this public LinkTable(List<Record> inputList, int startIndex, WorkbookRecordList workbookRecordList, Map<String, NameCommentRecord> commentRecords) { _workbookRecordList = workbookRecordList; RecordStream rs = new RecordStream(inputList, startIndex); List<ExternalBookBlock> temp = new ArrayList<ExternalBookBlock>(); while(rs.peekNextClass() == SupBookRecord.class) { temp.add(new ExternalBookBlock(rs)); } _externalBookBlocks = new ExternalBookBlock[temp.size()]; temp.toArray(_externalBookBlocks); temp.clear(); if (_externalBookBlocks.length > 0) { // If any ExternalBookBlock present, there is always 1 of ExternSheetRecord if (rs.peekNextClass() != ExternSheetRecord.class) { // not quite - if written by google docs _externSheetRecord = null; } else { _externSheetRecord = readExtSheetRecord(rs); } } else { _externSheetRecord = null; } _definedNames = new ArrayList<NameRecord>(); // collect zero or more DEFINEDNAMEs id=0x18, // with their comments if present while(true) { Class<? extends Record> nextClass = rs.peekNextClass(); if (nextClass == NameRecord.class) { NameRecord nr = (NameRecord)rs.getNext(); _definedNames.add(nr); } else if (nextClass == NameCommentRecord.class) { NameCommentRecord ncr = (NameCommentRecord)rs.getNext(); commentRecords.put(ncr.getNameText(), ncr); } else { break; } } _recordCount = rs.getCountRead(); _workbookRecordList.getRecords().addAll(inputList.subList(startIndex, startIndex + _recordCount)); } private static ExternSheetRecord readExtSheetRecord(RecordStream rs) { List<ExternSheetRecord> temp = new ArrayList<ExternSheetRecord>(2); while(rs.peekNextClass() == ExternSheetRecord.class) { temp.add((ExternSheetRecord) rs.getNext()); } int nItems = temp.size(); if (nItems < 1) { throw new RuntimeException("Expected an EXTERNSHEET record but got (" + rs.peekNextClass().getName() + ")"); } if (nItems == 1) { // this is the normal case. There should be just one ExternSheetRecord return temp.get(0); } // Some apps generate multiple ExternSheetRecords (see bug 45698). // It seems like the best thing to do might be to combine these into one ExternSheetRecord[] esrs = new ExternSheetRecord[nItems]; temp.toArray(esrs); return ExternSheetRecord.combine(esrs); } public LinkTable(int numberOfSheets, WorkbookRecordList workbookRecordList) { _workbookRecordList = workbookRecordList; _definedNames = new ArrayList<NameRecord>(); _externalBookBlocks = new ExternalBookBlock[] { new ExternalBookBlock(numberOfSheets), }; _externSheetRecord = new ExternSheetRecord(); _recordCount = 2; // tell _workbookRecordList about the 2 new records SupBookRecord supbook = _externalBookBlocks[0].getExternalBookRecord(); int idx = findFirstRecordLocBySid(CountryRecord.sid); if(idx < 0) { throw new RuntimeException("CountryRecord not found"); } _workbookRecordList.add(idx+1, _externSheetRecord); _workbookRecordList.add(idx+1, supbook); } /** * TODO - would not be required if calling code used RecordStream or similar */ public int getRecordCount() { return _recordCount; } /** * @param builtInCode a BUILTIN_~ constant from {@link NameRecord} * @param sheetNumber 1-based sheet number */ public NameRecord getSpecificBuiltinRecord(byte builtInCode, int sheetNumber) { Iterator<NameRecord> iterator = _definedNames.iterator(); while (iterator.hasNext()) { NameRecord record = iterator.next(); //print areas are one based if (record.getBuiltInName() == builtInCode && record.getSheetNumber() == sheetNumber) { return record; } } return null; } public void removeBuiltinRecord(byte name, int sheetIndex) { //the name array is smaller so searching through it should be faster than //using the findFirstXXXX methods NameRecord record = getSpecificBuiltinRecord(name, sheetIndex); if (record != null) { _definedNames.remove(record); } // TODO - do we need "Workbook.records.remove(...);" similar to that in Workbook.removeName(int namenum) {}? } public int getNumNames() { return _definedNames.size(); } public NameRecord getNameRecord(int index) { return _definedNames.get(index); } public void addName(NameRecord name) { _definedNames.add(name); // TODO - this is messy // Not the most efficient way but the other way was causing too many bugs int idx = findFirstRecordLocBySid(ExternSheetRecord.sid); if (idx == -1) idx = findFirstRecordLocBySid(SupBookRecord.sid); if (idx == -1) idx = findFirstRecordLocBySid(CountryRecord.sid); int countNames = _definedNames.size(); _workbookRecordList.add(idx+countNames, name); } public void removeName(int namenum) { _definedNames.remove(namenum); } /** * checks if the given name is already included in the linkTable */ public boolean nameAlreadyExists(NameRecord name) { // Check to ensure no other names have the same case-insensitive name for ( int i = getNumNames()-1; i >=0; i-- ) { NameRecord rec = getNameRecord(i); if (rec != name) { if (isDuplicatedNames(name, rec)) return true; } } return false; } private static boolean isDuplicatedNames(NameRecord firstName, NameRecord lastName) { return lastName.getNameText().equalsIgnoreCase(firstName.getNameText()) && isSameSheetNames(firstName, lastName); } private static boolean isSameSheetNames(NameRecord firstName, NameRecord lastName) { return lastName.getSheetNumber() == firstName.getSheetNumber(); } public String[] getExternalBookAndSheetName(int extRefIndex) { int ebIx = _externSheetRecord.getExtbookIndexFromRefIndex(extRefIndex); SupBookRecord ebr = _externalBookBlocks[ebIx].getExternalBookRecord(); if (!ebr.isExternalReferences()) { return null; } // Sheet name only applies if not a global reference int shIx1 = _externSheetRecord.getFirstSheetIndexFromRefIndex(extRefIndex); int shIx2 = _externSheetRecord.getLastSheetIndexFromRefIndex(extRefIndex); String firstSheetName = null; String lastSheetName = null; if(shIx1 >= 0) { firstSheetName = ebr.getSheetNames()[shIx1]; } if (shIx2 >= 0) { lastSheetName = ebr.getSheetNames()[shIx2]; } if (shIx1 == shIx2) { return new String[] { ebr.getURL(), firstSheetName }; } else { return new String[] { ebr.getURL(), firstSheetName, lastSheetName }; } } private int getExternalWorkbookIndex(String workbookName) { for (int i=0; i<_externalBookBlocks.length; i++) { SupBookRecord ebr = _externalBookBlocks[i].getExternalBookRecord(); if (!ebr.isExternalReferences()) { continue; } if (workbookName.equals(ebr.getURL())) { // not sure if 'equals()' works when url has a directory return i; } } return -1; } public int linkExternalWorkbook(String name, Workbook externalWorkbook) { int extBookIndex = getExternalWorkbookIndex(name); if (extBookIndex != -1) { // Already linked! return extBookIndex; } // Create a new SupBookRecord String[] sheetNames = new String[externalWorkbook.getNumberOfSheets()]; for (int sn=0; sn<sheetNames.length; sn++) { sheetNames[sn] = externalWorkbook.getSheetName(sn); } String url = "\000" + name; ExternalBookBlock block = new ExternalBookBlock(url, sheetNames); // Add it into the list + records extBookIndex = extendExternalBookBlocks(block); // add the created SupBookRecord before ExternSheetRecord int idx = findFirstRecordLocBySid(ExternSheetRecord.sid); if (idx == -1) { idx = _workbookRecordList.size(); } _workbookRecordList.add(idx, block.getExternalBookRecord()); // Setup links for the sheets for (int sn=0; sn<sheetNames.length; sn++) { _externSheetRecord.addRef(extBookIndex, sn, sn); } // Report where it went return extBookIndex; } public int getExternalSheetIndex(String workbookName, String firstSheetName, String lastSheetName) { int externalBookIndex = getExternalWorkbookIndex(workbookName); if (externalBookIndex == -1) { throw new RuntimeException("No external workbook with name '" + workbookName + "'"); } SupBookRecord ebrTarget = _externalBookBlocks[externalBookIndex].getExternalBookRecord(); int firstSheetIndex = getSheetIndex(ebrTarget.getSheetNames(), firstSheetName); int lastSheetIndex = getSheetIndex(ebrTarget.getSheetNames(), lastSheetName); // Find or add the external sheet record definition for this int result = _externSheetRecord.getRefIxForSheet(externalBookIndex, firstSheetIndex, lastSheetIndex); if (result < 0) { result = _externSheetRecord.addRef(externalBookIndex, firstSheetIndex, lastSheetIndex); } return result; } private static int getSheetIndex(String[] sheetNames, String sheetName) { for (int i = 0; i < sheetNames.length; i++) { if (sheetNames[i].equals(sheetName)) { return i; } } throw new RuntimeException("External workbook does not contain sheet '" + sheetName + "'"); } /** * @param extRefIndex as from a {@link Ref3DPtg} or {@link Area3DPtg} * @return -1 if the reference is to an external book */ public int getFirstInternalSheetIndexForExtIndex(int extRefIndex) { if (extRefIndex >= _externSheetRecord.getNumOfRefs() || extRefIndex < 0) { return -1; } return _externSheetRecord.getFirstSheetIndexFromRefIndex(extRefIndex); } /** * @param extRefIndex as from a {@link Ref3DPtg} or {@link Area3DPtg} * @return -1 if the reference is to an external book */ public int getLastInternalSheetIndexForExtIndex(int extRefIndex) { if (extRefIndex >= _externSheetRecord.getNumOfRefs() || extRefIndex < 0) { return -1; } return _externSheetRecord.getLastSheetIndexFromRefIndex(extRefIndex); } public void removeSheet(int sheetIdx) { _externSheetRecord.removeSheet(sheetIdx); } public int checkExternSheet(int sheetIndex) { return checkExternSheet(sheetIndex, sheetIndex); } public int checkExternSheet(int firstSheetIndex, int lastSheetIndex) { int thisWbIndex = -1; // this is probably always zero for (int i=0; i<_externalBookBlocks.length; i++) { SupBookRecord ebr = _externalBookBlocks[i].getExternalBookRecord(); if (ebr.isInternalReferences()) { thisWbIndex = i; break; } } if (thisWbIndex < 0) { throw new RuntimeException("Could not find 'internal references' EXTERNALBOOK"); } //Trying to find reference to this sheet int i = _externSheetRecord.getRefIxForSheet(thisWbIndex, firstSheetIndex, lastSheetIndex); if (i>=0) { return i; } //We haven't found reference to this sheet return _externSheetRecord.addRef(thisWbIndex, firstSheetIndex, lastSheetIndex); } /** * copied from Workbook */ private int findFirstRecordLocBySid(short sid) { int index = 0; for (Iterator<Record> iterator = _workbookRecordList.iterator(); iterator.hasNext(); ) { Record record = iterator.next(); if (record.getSid() == sid) { return index; } index ++; } return -1; } public String resolveNameXText(int refIndex, int definedNameIndex, InternalWorkbook workbook) { int extBookIndex = _externSheetRecord.getExtbookIndexFromRefIndex(refIndex); int firstTabIndex = _externSheetRecord.getFirstSheetIndexFromRefIndex(refIndex); if (firstTabIndex == -1) { // The referenced sheet could not be found throw new RuntimeException("Referenced sheet could not be found"); } // Does it exist via the external book block? ExternalBookBlock externalBook = _externalBookBlocks[extBookIndex]; if (externalBook._externalNameRecords.length > definedNameIndex) { return _externalBookBlocks[extBookIndex].getNameText(definedNameIndex); } else if (firstTabIndex == -2) { // Workbook scoped name, not actually external after all NameRecord nr = getNameRecord(definedNameIndex); int sheetNumber = nr.getSheetNumber(); StringBuffer text = new StringBuffer(); if (sheetNumber > 0) { String sheetName = workbook.getSheetName(sheetNumber-1); SheetNameFormatter.appendFormat(text, sheetName); text.append("!"); } text.append(nr.getNameText()); return text.toString(); } else { throw new ArrayIndexOutOfBoundsException( "Ext Book Index relative but beyond the supported length, was " + extBookIndex + " but maximum is " + _externalBookBlocks.length ); } } public int resolveNameXIx(int refIndex, int definedNameIndex) { int extBookIndex = _externSheetRecord.getExtbookIndexFromRefIndex(refIndex); return _externalBookBlocks[extBookIndex].getNameIx(definedNameIndex); } /** * Finds the external name definition for the given name, * optionally restricted by externsheet index, and returns * (if found) as a NameXPtg. * @param sheetRefIndex The Extern Sheet Index to look for, or -1 if any */ public NameXPtg getNameXPtg(String name, int sheetRefIndex) { // first find any external book block that contains the name: for (int i = 0; i < _externalBookBlocks.length; i++) { int definedNameIndex = _externalBookBlocks[i].getIndexOfName(name); if (definedNameIndex < 0) { continue; } // Found one int thisSheetRefIndex = findRefIndexFromExtBookIndex(i); if (thisSheetRefIndex >= 0) { // Check for the sheet index match, if requested if (sheetRefIndex == -1 || thisSheetRefIndex == sheetRefIndex) { return new NameXPtg(thisSheetRefIndex, definedNameIndex); } } } return null; } /** * Register an external name in this workbook * * @param name the name to register * @return a NameXPtg describing this name */ public NameXPtg addNameXPtg(String name) { int extBlockIndex = -1; ExternalBookBlock extBlock = null; // find ExternalBlock for Add-In functions and remember its index for (int i = 0; i < _externalBookBlocks.length; i++) { SupBookRecord ebr = _externalBookBlocks[i].getExternalBookRecord(); if (ebr.isAddInFunctions()) { extBlock = _externalBookBlocks[i]; extBlockIndex = i; break; } } // An ExternalBlock for Add-In functions was not found. Create a new one. if (extBlock == null) { extBlock = new ExternalBookBlock(); extBlockIndex = extendExternalBookBlocks(extBlock); // add the created SupBookRecord before ExternSheetRecord int idx = findFirstRecordLocBySid(ExternSheetRecord.sid); _workbookRecordList.add(idx, extBlock.getExternalBookRecord()); // register the SupBookRecord in the ExternSheetRecord // -2 means that the scope of this name is Workbook and the reference applies to the entire workbook. _externSheetRecord.addRef(_externalBookBlocks.length - 1, -2, -2); } // create a ExternalNameRecord that will describe this name ExternalNameRecord extNameRecord = new ExternalNameRecord(); extNameRecord.setText(name); // The docs don't explain why Excel set the formula to #REF! extNameRecord.setParsedExpression(new Ptg[]{ErrPtg.REF_INVALID}); int nameIndex = extBlock.addExternalName(extNameRecord); int supLinkIndex = 0; // find the posistion of the Add-In SupBookRecord in the workbook stream, // the created ExternalNameRecord will be appended to it for (Iterator<Record> iterator = _workbookRecordList.iterator(); iterator.hasNext(); supLinkIndex++) { Record record = iterator.next(); if (record instanceof SupBookRecord) { if (((SupBookRecord) record).isAddInFunctions()) break; } } int numberOfNames = extBlock.getNumberOfNames(); // a new name is inserted in the end of the SupBookRecord, after the last name _workbookRecordList.add(supLinkIndex + numberOfNames, extNameRecord); int fakeSheetIdx = -2; /* the scope is workbook*/ int ix = _externSheetRecord.getRefIxForSheet(extBlockIndex, fakeSheetIdx, fakeSheetIdx); return new NameXPtg(ix, nameIndex); } private int extendExternalBookBlocks(ExternalBookBlock newBlock) { ExternalBookBlock[] tmp = new ExternalBookBlock[_externalBookBlocks.length + 1]; System.arraycopy(_externalBookBlocks, 0, tmp, 0, _externalBookBlocks.length); tmp[tmp.length - 1] = newBlock; _externalBookBlocks = tmp; return (_externalBookBlocks.length - 1); } private int findRefIndexFromExtBookIndex(int extBookIndex) { return _externSheetRecord.findRefIndexFromExtBookIndex(extBookIndex); } /** * Changes an external referenced file to another file. * A formular in Excel which refers a cell in another file is saved in two parts: * The referenced file is stored in an reference table. the row/cell information is saved separate. * This method invokation will only change the reference in the lookup-table itself. * @param oldUrl The old URL to search for and which is to be replaced * @param newUrl The URL replacement * @return true if the oldUrl was found and replaced with newUrl. Otherwise false */ public boolean changeExternalReference(String oldUrl, String newUrl) { for(ExternalBookBlock ex : _externalBookBlocks) { SupBookRecord externalRecord = ex.getExternalBookRecord(); if (externalRecord.isExternalReferences() && externalRecord.getURL().equals(oldUrl)) { externalRecord.setURL(newUrl); return true; } } return false; } }
package org.jboss.resteasy.skeleton.key.as7; import org.jboss.resteasy.skeleton.key.RSATokenVerifier; import org.jboss.resteasy.skeleton.key.RealmConfiguration; import org.jboss.resteasy.skeleton.key.VerificationException; import org.jboss.resteasy.skeleton.key.as7.i18n.LogMessages; import org.jboss.resteasy.skeleton.key.as7.i18n.Messages; import org.jboss.resteasy.skeleton.key.representations.AccessTokenResponse; import org.jboss.resteasy.skeleton.key.representations.SkeletonKeyToken; import org.jboss.resteasy.util.BasicAuthHelper; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.client.Entity; import javax.ws.rs.core.Form; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import java.io.IOException; import java.util.UUID; import java.util.concurrent.atomic.AtomicLong; /** * * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class ServletOAuthLogin { protected HttpServletRequest request; protected HttpServletResponse response; protected boolean codePresent; protected RealmConfiguration realmInfo; protected int redirectPort; protected String tokenString; protected SkeletonKeyToken token; public ServletOAuthLogin(RealmConfiguration realmInfo, HttpServletRequest request, HttpServletResponse response, int redirectPort) { this.request = request; this.response = response; this.realmInfo = realmInfo; this.redirectPort = redirectPort; } public String getTokenString() { return tokenString; } public SkeletonKeyToken getToken() { return token; } public RealmConfiguration getRealmInfo() { return realmInfo; } protected String getDefaultCookiePath() { String path = request.getContextPath(); if ("".equals(path) || path == null) path = "/"; return path; } protected String getRequestUrl() { return request.getRequestURL().toString(); } protected boolean isRequestSecure() { return request.isSecure(); } protected void sendError(int code) { try { response.sendError(code); } catch (IOException e) { throw new RuntimeException(e); } } protected void sendRedirect(String url) { try { response.sendRedirect(url); } catch (IOException e) { throw new RuntimeException(e); } } protected Cookie getCookie(String cookieName) { if (request.getCookies() == null) return null; for (Cookie cookie : request.getCookies()) { if (cookie.getName().equals(cookieName)) { return cookie; } } return null; } protected String getCookieValue(String cookieName) { Cookie cookie = getCookie(cookieName); if (cookie == null) return null; return cookie.getValue(); } protected String getQueryParamValue(String paramName) { String query = request.getQueryString(); if (query == null) return null; String[] params = query.split("&"); for (String param : params) { int eq = param.indexOf('='); if (eq == -1) continue; String name = param.substring(0, eq); if (!name.equals(paramName)) continue; return param.substring(eq + 1); } return null; } public String getError() { return getQueryParamValue("error"); } public String getCode() { return getQueryParamValue("code"); } protected void setCookie(String name, String value, String domain, String path, boolean secure) { Cookie cookie = new Cookie(name, value); if (domain != null) cookie.setDomain(domain); if (path != null) cookie.setPath(path); if (secure) cookie.setSecure(true); response.addCookie(cookie); } protected String getRedirectUri(String state) { String url = getRequestUrl(); if (!isRequestSecure() && realmInfo.isSslRequired()) { int port = redirectPort; if (port < 0) { // disabled? return null; } UriBuilder secureUrl = UriBuilder.fromUri(url).scheme("https").port(-1); if (port != 443) secureUrl.port(port); url = secureUrl.build().toString(); } return realmInfo.getAuthUrl().clone() .queryParam("client_id", realmInfo.getClientId()) .queryParam("redirect_uri", url) .queryParam("state", state) .queryParam("login", "true") .build().toString(); } protected static final AtomicLong counter = new AtomicLong(); protected String getStateCode() { return counter.getAndIncrement() + "/" + UUID.randomUUID().toString(); } public void loginRedirect() { String state = getStateCode(); String redirect = getRedirectUri(state); if (redirect == null) { sendError(Response.Status.FORBIDDEN.getStatusCode()); return; } setCookie(realmInfo.getStateCookieName(), state, null, getDefaultCookiePath(), realmInfo.isSslRequired()); sendRedirect(redirect); } public boolean checkStateCookie() { Cookie stateCookie = getCookie(realmInfo.getStateCookieName()); if (stateCookie == null) { sendError(400); LogMessages.LOGGER.warn(Messages.MESSAGES.noStateCookie()); return false; } // reset the cookie Cookie reset = new Cookie(stateCookie.getName(), stateCookie.getValue()); reset.setPath(stateCookie.getPath()); reset.setMaxAge(0); response.addCookie(reset); String stateCookieValue = getCookieValue(realmInfo.getStateCookieName()); // its ok to call request.getParameter() because this should be a redirect String state = request.getParameter("state"); if (state == null) { sendError(400); LogMessages.LOGGER.warn(Messages.MESSAGES.stateParameterWasNull()); return false; } if (!state.equals(stateCookieValue)) { sendError(400); LogMessages.LOGGER.warn(Messages.MESSAGES.stateParameterInvalid()); LogMessages.LOGGER.warn(Messages.MESSAGES.cookie(stateCookieValue)); LogMessages.LOGGER.warn(Messages.MESSAGES.queryParam(state)); return false; } return true; } /** * Start or continue the oauth login process. * * if code query parameter is not present, then browser is redirected to authUrl. The redirect URL will be * the URL of the current request. * * If code query parameter is present, then an access token is obtained by invoking a secure request to the codeUrl. * If the access token is obtained, the browser is again redirected to the current request URL, but any OAuth * protocol specific query parameters are removed. * * @return true if an access token was obtained */ public boolean resolveCode(String code) { // abort if not HTTPS if (realmInfo.isSslRequired() && !isRequestSecure()) { LogMessages.LOGGER.error(Messages.MESSAGES.sslIsRequired()); sendError(Response.Status.FORBIDDEN.getStatusCode()); return false; } if (!checkStateCookie()) return false; String client_id = realmInfo.getClientId(); String password = realmInfo.getCredentials().asMap().getFirst("password"); String authHeader = BasicAuthHelper.createHeader(client_id, password); String redirectUri = stripOauthParametersFromRedirect(); Form form = new Form(); form.param("grant_type", "authorization_code") .param("code", code) .param("redirect_uri", redirectUri); Response res = realmInfo.getCodeUrl().request().header(HttpHeaders.AUTHORIZATION, authHeader).post(Entity.form(form)); AccessTokenResponse tokenResponse; try { if (res.getStatus() != 200) { LogMessages.LOGGER.error(Messages.MESSAGES.failedToTurnCodeIntoToken()); sendError(Response.Status.FORBIDDEN.getStatusCode()); return false; } LogMessages.LOGGER.debug(Messages.MESSAGES.mediaType(res.getMediaType())); LogMessages.LOGGER.debug(Messages.MESSAGES.contentTypeHeader(res.getHeaderString("Content-Type"))); tokenResponse = res.readEntity(AccessTokenResponse.class); } finally { res.close(); } tokenString = tokenResponse.getToken(); try { token = RSATokenVerifier.verifyToken(tokenString, realmInfo.getMetadata()); LogMessages.LOGGER.debug(Messages.MESSAGES.verificationSucceeded()); } catch (VerificationException e) { LogMessages.LOGGER.error(Messages.MESSAGES.failedVerificationOfToken()); sendError(Response.Status.FORBIDDEN.getStatusCode()); return false; } // redirect to URL without oauth query parameters sendRedirect(redirectUri); return true; } /** * strip out unwanted query parameters and redirect so bookmarks don't retain oauth protocol bits */ protected String stripOauthParametersFromRedirect() { StringBuffer buf = request.getRequestURL().append("?").append(request.getQueryString()); UriBuilder builder = UriBuilder.fromUri(buf.toString()) .replaceQueryParam("code", null) .replaceQueryParam("state", null); return builder.build().toString(); } }
package com.giraone.samples.pmspoc1.boundary; import java.sql.Connection; import java.util.ArrayList; import java.util.Calendar; import java.util.GregorianCalendar; import java.util.List; import javax.ejb.Stateless; import javax.ejb.TransactionManagement; import javax.ejb.TransactionManagementType; import javax.persistence.EntityManager; import javax.persistence.Persistence; import javax.persistence.PersistenceContext; import javax.persistence.TypedQuery; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.criteria.CriteriaQuery; import javax.persistence.criteria.JoinType; import javax.persistence.criteria.Predicate; import javax.persistence.criteria.Root; import javax.persistence.metamodel.Attribute; import javax.persistence.metamodel.PluralAttribute; import javax.persistence.metamodel.SingularAttribute; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.core.UriBuilder; import com.giraone.samples.common.StringUtil; import com.giraone.samples.common.boundary.BaseEndpoint; import com.giraone.samples.common.boundary.PagingBlock; import com.giraone.samples.common.boundary.model.ErrorInformation; import com.giraone.samples.common.boundary.odata.ODataToJpaQueryBuilder; import com.giraone.samples.common.entity.PersistenceUtil; import com.giraone.samples.common.entity.UserTransactionConstraintViolationException; import com.giraone.samples.common.entity.UserTransactionException; import com.giraone.samples.common.entity.UserTransactional; import com.giraone.samples.pmspoc1.boundary.blobs.BlobManager; import com.giraone.samples.pmspoc1.boundary.blobs.BlobModelConfig; import com.giraone.samples.pmspoc1.boundary.blobs.MimeTypeUtil; import com.giraone.samples.pmspoc1.boundary.dto.CostCenterDTO; import com.giraone.samples.pmspoc1.boundary.dto.EmployeeDTO; import com.giraone.samples.pmspoc1.boundary.dto.EmployeeDocumentDTO; import com.giraone.samples.pmspoc1.boundary.dto.EmployeePostalAddressDTO; import com.giraone.samples.pmspoc1.boundary.dto.EmployeeSummaryDTO; import com.giraone.samples.pmspoc1.boundary.dto.EmployeeWithPropertiesDTO; import com.giraone.samples.pmspoc1.entity.CostCenter; import com.giraone.samples.pmspoc1.entity.Employee; import com.giraone.samples.pmspoc1.entity.EmployeeDocument; import com.giraone.samples.pmspoc1.entity.EmployeeDocument_; import com.giraone.samples.pmspoc1.entity.EmployeePostalAddress; import com.giraone.samples.pmspoc1.entity.EmployeePostalAddress_; import com.giraone.samples.pmspoc1.entity.Employee_; /** * REST end point for CRUD operations on "employee" entities. */ @Stateless @TransactionManagement(TransactionManagementType.BEAN) @Path("/employees") public class EmployeeEndpoint extends BaseEndpoint { final static BlobModelConfig BlobModelConfig = new BlobModelConfig( EmployeeDocument.class.getSimpleName(), EmployeeDocument_.SQL_NAME_oid, EmployeeDocument_.SQL_NAME_bytes, EmployeeDocument_.SQL_NAME_byteSize); @PersistenceContext(unitName = PmsCoreApi.PERSISTENCE_UNIT) private EntityManager em; //-- SUMMARY --------------------------------------------------------------------------------------------- @GET @Path("/summary") @Produces("application/json; charset=UTF-8") public Response employeeSummary() { CriteriaBuilder cb = em.getCriteriaBuilder(); CriteriaQuery<Long> countQuery = cb.createQuery(Long.class); Root<Employee> table = countQuery.from(Employee.class); countQuery.select(cb.count(table)); long count = em.createQuery(countQuery).getSingleResult().longValue(); Calendar lastUpdate = new GregorianCalendar(); // Currently a simple time stamp is returned EmployeeSummaryDTO dto = new EmployeeSummaryDTO(count, lastUpdate); return Response.ok(dto).build(); } //-- EMPLOYEE -------------------------------------------------------------------------------------------- /** * Find an employee by its object id. * @param id The entity object id. * @return A found {@link EmployeeWithPropertiesDTO} object (status 200) or status "not found (404). */ @GET @Path("/{employeeId:[0-9][0-9]*}") @Produces("application/json; charset=UTF-8") public Response findEmployeeById(@PathParam("employeeId") long employeeId, @QueryParam("expand") @DefaultValue("") String expand) { final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<Employee> c = cb.createQuery(Employee.class); final Root<Employee> table = c.from(Employee.class); // This is very import! We want the cost center object too (may be null) and use a left join table.fetch(Employee_.costCenter, JoinType.LEFT); final CriteriaQuery<Employee> select = c.select(table); final Predicate predicate = cb.equal(table.get(Employee_.oid), employeeId); select.where(predicate); if (StringUtil.isNotNullOrWhitespace(expand)) { ODataToJpaQueryBuilder<Employee> oDataBuilder = new ODataToJpaQueryBuilder<Employee>(); List<Attribute> expands = oDataBuilder.parseExpandExpression(cb, table, Employee_.class, expand); for (Attribute attribute : expands) { if (attribute instanceof PluralAttribute) table.fetch((PluralAttribute) attribute, JoinType.LEFT); else if (attribute instanceof SingularAttribute) table.fetch((SingularAttribute) attribute, JoinType.LEFT); } } final TypedQuery<Employee> tq = em.createQuery(select); final Employee entity = PersistenceUtil.sanityCheckForSingleResultList(tq.getResultList(), Employee_.SQL_NAME_oid); if (entity != null) { // Now we have to fetch the properties by accessing at least one fake key (this is a bit weird in JPA!) entity.getProperties().get(""); // Now we have to fetch the postal addresses entity.getPostalAddresses().size(); if (logger.isDebugEnabled()) { logger.debug("Employee.findById=isLoaded(addresses)" + Persistence.getPersistenceUtil().isLoaded(entity, "addresses")); } EmployeeWithPropertiesDTO dto = new EmployeeWithPropertiesDTO(entity); return Response.ok(dto).build(); } else { return Response.status(Status.NOT_FOUND).build(); } } /** * Alternate find method to find an employee by its unique "personnelNumber". * @param personnelNumber (this path parameter prefixed with "pnr-") * @return A found {@link EmployeeWithPropertiesDTO} object (status 200) or status "not found (404). */ @GET @Path("/pnr-{personnelNumber:[0-9a-zA-Z][0-9a-zA-Z]*}") @Produces("application/json; charset=UTF-8") public Response findEmployeeByPersonnelNumber(@PathParam("personnelNumber") String personnelNumber, @QueryParam("expand") @DefaultValue("") String expand) { final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<Employee> c = cb.createQuery(Employee.class); final Root<Employee> table = c.from(Employee.class); // This is very import! We want the cost center object too (may be null) and use a left join // table.fetch(Employee_.costCenter, JoinType.LEFT); // No more needed since "expand" is used. final CriteriaQuery<Employee> select = c.select(table); final Predicate predicate = cb.equal(table.get(Employee_.personnelNumber), personnelNumber); select.where(predicate); if (StringUtil.isNotNullOrWhitespace(expand)) { ODataToJpaQueryBuilder<Employee> oDataBuilder = new ODataToJpaQueryBuilder<Employee>(); List<Attribute> expands = oDataBuilder.parseExpandExpression(cb, table, Employee_.class, expand); for (Attribute attribute : expands) { if (attribute instanceof PluralAttribute) table.fetch((PluralAttribute) attribute, JoinType.LEFT); else if (attribute instanceof SingularAttribute) table.fetch((SingularAttribute) attribute, JoinType.LEFT); } } final TypedQuery<Employee> tq = em.createQuery(select); final Employee entity = PersistenceUtil.sanityCheckForSingleResultList(tq.getResultList(), Employee_.SQL_NAME_personnelNumber); if (entity != null) { // Now we have to fetch the properties by accessing at least one fake key (this is a bit weird in JPA!) entity.getProperties().get(""); // Now we have to fetch the postal addresses entity.getPostalAddresses().size(); EmployeeWithPropertiesDTO dto = new EmployeeWithPropertiesDTO(entity); return Response.ok(dto).build(); } else { return Response.status(Status.NOT_FOUND).build(); } } /** * List all employees with support for OData filters. * @param filter OData filter expression * @param orderby OData sort expression * @param skip OData paging * @param top OData paging * @param expand OData expand expression * @return a {@link PagingBlock} object with {@link EmployeeDTO} object. */ @GET @Produces("application/json; charset=UTF-8") public Response listEmployeeBlockwise( @QueryParam("filter") @DefaultValue("") String filter, @QueryParam("orderby") @DefaultValue("") String orderby, @QueryParam("skip") @DefaultValue("0") int skip, @QueryParam("top") @DefaultValue(DEFAULT_PAGING_SIZE) int top, @QueryParam("expand") @DefaultValue("") String expand) { // TypedQuery<Employee> findAllQuery = em.createQuery( // "SELECT DISTINCT e FROM Employee e LEFT JOIN FETCH e.costCenter ORDER // BY e.oid", Employee.class); // Join<Employee, CostCenter> join = table.join(Employee_.costCenter, JoinType.LEFT); final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<Employee> c = cb.createQuery(Employee.class); final Root<Employee> table = c.from(Employee.class); ODataToJpaQueryBuilder<Employee> oDataBuilder = new ODataToJpaQueryBuilder<Employee>(); // This is very import! We want the cost center object too (may be null) and use a left join // table.fetch(Employee_.costCenter, JoinType.LEFT); // No more needed since "expand" is used. if (StringUtil.isNotNullOrWhitespace(expand)) { List<Attribute> expands = oDataBuilder.parseExpandExpression(cb, table, Employee_.class, expand); for (Attribute attribute : expands) { if (attribute instanceof PluralAttribute) table.fetch((PluralAttribute) attribute, JoinType.LEFT); else if (attribute instanceof SingularAttribute) table.fetch((SingularAttribute) attribute, JoinType.LEFT); } } final CriteriaQuery<Employee> select = c.select(table); Predicate predicate; try { predicate = oDataBuilder.parseFilterExpression(cb, table, filter); } catch (IllegalArgumentException iae) { // OData arguments are illegal if (logger.isDebugEnabled()) { logger.debug(LOG_TAG, "Illegal OData filter=" + StringUtil.serializeAsJavaString(filter), iae); } return Response.status(Status.BAD_REQUEST).build(); } if (predicate != null) { select.where(predicate); } oDataBuilder.parseOrderExpression(cb, table, select, orderby); // Calculate the total count value using the same "table" - START CriteriaQuery<Long> countQuery = cb.createQuery(Long.class); countQuery.select(cb.count(table)); if (predicate != null) { countQuery.where(predicate); } int totalCount = em.createQuery(countQuery).getSingleResult().intValue(); // Calculate the total count value using the same "table" - END final TypedQuery<Employee> tq = em.createQuery(select); tq.setFirstResult(skip); tq.setMaxResults(top); final List<Employee> searchResults = tq.getResultList(); final List<EmployeeDTO> results = new ArrayList<EmployeeDTO>(); for (Employee searchResult : searchResults) { EmployeeDTO dto = new EmployeeDTO(searchResult); results.add(dto); } PagingBlock<EmployeeDTO> pagingBlock = new PagingBlock<EmployeeDTO>(); pagingBlock.setBlockCounter(skip); pagingBlock.setBlockSize(top); pagingBlock.setTotalCount(totalCount); pagingBlock.setBlockItems(results); return Response.ok(pagingBlock).build(); } /** * Create a new entity * @param dto the new employee's data. * @return Status CREATED (success), BAD_REQUEST (argument fault), * CONFLICT (duplicate personnelNumber) or NOT_FOUND (created but not found in DB); */ @POST @Consumes("application/json") @UserTransactional public Response createEmployee(EmployeeWithPropertiesDTO dto) { final Employee entity; // Do not accept changes of member objects - in this case postal addresses if (dto.getPostalAddresses() != null) dto.getPostalAddresses().clear(); // Do not accept changes of member objects - in this case documents if (dto.getDocuments() != null) dto.getDocuments().clear(); try { entity = dto.entityFromDTO(); } catch (IllegalArgumentException iae) { logger.warn(LOG_TAG, "create: invalid data for employee with personnelNumber=" + dto.getPersonnelNumber(), iae); ErrorInformation errorInformation = new ErrorInformation(); errorInformation.setCode(ErrorInformation.FIELD_VALIDATION_FAILED); errorInformation.setMessage("invalid arguments"); return Response.status(Status.BAD_REQUEST).entity(errorInformation).build(); } // If cost center is given with an oid, it is not updated! It is fetched always fresh from the database. final CostCenterDTO costCenterDto = dto.getCostCenter(); if (costCenterDto != null && costCenterDto.getOid() > 0) { entity.setCostCenter(fetchCostCenterOfEmployeeByOid(costCenterDto.getOid(), "personnelNumber" + dto.getPersonnelNumber())); } em.persist(entity); return Response .created(UriBuilder.fromResource(EmployeeEndpoint.class).path(String.valueOf(entity.getOid())).build()) .build(); } @PUT @Path("/{employeeId:[0-9][0-9]*}") @Consumes("application/json") @UserTransactional public Response updateEmployee(@PathParam("employeeId") long employeeId, EmployeeWithPropertiesDTO dto) { if (dto == null || employeeId == 0) { return Response.status(Status.BAD_REQUEST).build(); } if (employeeId != dto.getOid()) { logger.warn(LOG_TAG, "update: CONFLICT for employee with put.oid=" + employeeId + " not matching dto.oid=" + dto.getOid()); ErrorInformation errorInformation = new ErrorInformation(); errorInformation.setMessage("CONFLICT for employee with put.oid=" + employeeId + " not matching dto.oid=" + dto.getOid()); return Response.status(Status.CONFLICT).entity(errorInformation).build(); } Employee entity = em.find(Employee.class, employeeId); if (entity == null) { return Response.status(Status.NOT_FOUND).build(); } // Do not accept changes of member objects - in this case postal addresses if (dto.getPostalAddresses() != null) dto.getPostalAddresses().clear(); // Do not accept changes of member objects - in this case documents if (dto.getDocuments() != null) dto.getDocuments().clear(); // Now we have to fetch the properties by accessing at least one fake key (this is a bit weird in JPA!) entity.getProperties().get(""); // Now we have to fetch the postal addresses entity.getPostalAddresses().size(); // Now we have to fetch the existing cost center CostCenter existingCostCenter = entity.getCostCenter(); // And now we merge changed data from the DTO (no cost center, no postal addresses) try { entity = dto.mergeFromDTO(entity, em); } catch (IllegalArgumentException iae) { logger.warn(LOG_TAG, "update: invalid data for employee with oid=" + employeeId, iae); ErrorInformation errorInformation = new ErrorInformation(); errorInformation.setCode(ErrorInformation.FIELD_VALIDATION_FAILED); errorInformation.setMessage("invalid arguments"); return Response.status(Status.BAD_REQUEST).entity(dto).build(); } // If cost center is given, it cannot be updated! It is always fetched fresh from the database. final CostCenterDTO costCenterDto = dto.getCostCenter(); if (costCenterDto != null) { if (costCenterDto.getOid() > 0) { // Was the cost center changed? if (existingCostCenter == null || costCenterDto.getOid() != existingCostCenter.getOid()) { entity.setCostCenter(fetchCostCenterOfEmployeeByOid(costCenterDto.getOid(), "employeeId=" + employeeId)); } } else { throw new IllegalArgumentException("update: employee costCenter oid is null for employeeId=" + employeeId); } } else { // Currently, we do not remove cost centers from the employee. //entity.setCostCenter(null); // Remove the cost center } // and persist everything entity = em.merge(entity); return Response.noContent().build(); } /** * Delete an employee using its object id. * @param id the employee object id. * @return NOT_FOUND (employee not found) or NO_CONTENT (success). */ @DELETE @Path("/{employeeId:[0-9][0-9]*}") @UserTransactional public Response deleteEmployeeById(@PathParam("employeeId") long employeeId) { Employee entity = em.find(Employee.class, employeeId); if (entity == null) { return Response.status(Status.NOT_FOUND).build(); } em.remove(entity); return Response.noContent().build(); } //-- ADDRESS --------------------------------------------------------------------------------------------- /** * Find an employee address by its object id. * @param employeeId The employee's object id. * @param addressId The postal addresses object id. * @return A found {@link EmployeePostalAddressDTO} object (status 200) or status "not found (404). */ @GET @Path("/{employeeId:[0-9][0-9]*}/addresses/{addressId:[0-9][0-9]*}") @Produces("application/json; charset=UTF-8") public Response findPostalAddressById(@PathParam("employeeId") long employeeId, @PathParam("addressId") long addressId) { final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<EmployeePostalAddress> c = cb.createQuery(EmployeePostalAddress.class); final Root<EmployeePostalAddress> table = c.from(EmployeePostalAddress.class); final CriteriaQuery<EmployeePostalAddress> select = c.select(table); final Predicate predicate = cb.and( cb.equal(table.get(EmployeePostalAddress_.employee).get(Employee_.oid), employeeId), cb.equal(table.get(EmployeePostalAddress_.oid), addressId)); select.where(predicate); final TypedQuery<EmployeePostalAddress> tq = em.createQuery(select); final EmployeePostalAddress entity = PersistenceUtil.sanityCheckForSingleResultList(tq.getResultList(), EmployeePostalAddress_.SQL_NAME_oid); if (entity != null) { EmployeePostalAddressDTO dto = new EmployeePostalAddressDTO(entity); return Response.ok(dto).build(); } else { return Response.status(Status.NOT_FOUND).build(); } } /** * List all postal addresses of an employee ordered by their ranking. * @param employeeId The employee's object id. * @return list of {@link EmployeePostalAddressDTO} object. */ @GET @Path("/{employeeId:[0-9][0-9]*}/addresses") @Produces("application/json; charset=UTF-8") public List<EmployeePostalAddressDTO> listAllPostalAddresses(@PathParam("employeeId") long employeeId) { final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<EmployeePostalAddress> c = cb.createQuery(EmployeePostalAddress.class); final Root<EmployeePostalAddress> table = c.from(EmployeePostalAddress.class); final CriteriaQuery<EmployeePostalAddress> select = c.select(table); select.where(cb.equal(table.get(EmployeePostalAddress_.employee).get(Employee_.oid), employeeId)); select.orderBy(cb.asc(table.get(EmployeePostalAddress_.ranking))); final TypedQuery<EmployeePostalAddress> tq = em.createQuery(select); final List<EmployeePostalAddress> searchResults = tq.getResultList(); final List<EmployeePostalAddressDTO> results = new ArrayList<EmployeePostalAddressDTO>(); for (EmployeePostalAddress searchResult : searchResults) { EmployeePostalAddressDTO dto = new EmployeePostalAddressDTO(searchResult); results.add(dto); } return results; } /** * Create a new employee address * @param employeeId The employee's object id. * @param dto The new employee postal address data. * @return Status CREATED (success), BAD_REQUEST (argument fault), NOT_FOUND (created but not found in DB); */ @POST @Path("/{employeeId:[0-9][0-9]*}/addresses") @Consumes("application/json") @UserTransactional public Response createPostalAddress(@PathParam("employeeId") long employeeId, EmployeePostalAddressDTO dto) { Employee employee = em.find(Employee.class, employeeId); if (employee == null) { return Response.status(Status.NOT_FOUND).build(); } final EmployeePostalAddress entity = dto.entityFromDTO(); // TODO: Concept for setting defaults is needed if (entity.getCountryCode() == null) { entity.setCountryCode("DE"); } entity.setEmployee(employee); em.persist(entity); return Response .created(UriBuilder.fromResource(EmployeeEndpoint.class) .path(String.valueOf(employeeId)) .path("addresses") .path(String.valueOf(entity.getOid())) .build()) .build(); } /** * Update an employee's postal address using its object id. * @param employeeId The employee's object id. * @param addressId The postal addresses object id. * @return NOT_FOUND (address not found), CONFLICT (id mismatch) or NO_CONTENT (success). */ @PUT @Path("/{employeeId:[0-9][0-9]*}/addresses/{addressId:[0-9][0-9]*}") @Consumes("application/json") @UserTransactional public Response updatePostalAddress(@PathParam("employeeId") long employeeId, @PathParam("addressId") long addressId, EmployeePostalAddressDTO dto) { if (dto == null || employeeId == 0L || addressId == 0L) { return Response.status(Status.BAD_REQUEST).build(); } if (addressId != dto.getOid()) { logger.warn(LOG_TAG, "update: CONFLICT PostalAddress put.addressId=" + addressId + " does not match dto.addressId=" + dto.getOid()); return Response.status(Status.CONFLICT).entity(dto).build(); } /* if (!employeeId.equals(dto.getEmployeeId())) { logger.warn(LOG_TAG, "update CONFLICT employeeId=" + employeeId + ", employeeId2=" + dto.getEmployeeId()); return Response.status(Status.CONFLICT).entity(dto).build(); } */ EmployeePostalAddress entity = em.find(EmployeePostalAddress.class, addressId); if (entity == null) { return Response.status(Status.NOT_FOUND).build(); } entity = dto.mergeFromDTO(entity, em); entity = em.merge(entity); return Response.noContent().build(); } /** * Delete an employee's postal address using its object id. * @param employeeId The employee's object id. * @param addressId The postal addresses object id. * @return NOT_FOUND (address not found) or NO_CONTENT (success). */ @DELETE @Path("/{employeeId:[0-9][0-9]*}/addresses/{addressId:[0-9][0-9]*}") @UserTransactional public Response deletePostalAddressById(@PathParam("employeeId") long employeeId, @PathParam("addressId") long addressId) { EmployeePostalAddress entity = em.find(EmployeePostalAddress.class, addressId); if (entity == null) { return Response.status(Status.NOT_FOUND).build(); } em.remove(entity); return Response.noContent().build(); } //-- DOCUMENT -------------------------------------------------------------------------------------------- /** * Find an employee's document by its object id. * @param employeeId The employee's object id. * @param documentId The document's object id. * @return A found {@link EmployeeDocumentDTO} object (status 200) or status "not found (404). */ @GET @Path("/{employeeId:[0-9][0-9]*}/documents/{documentId:[0-9][0-9]*}") @Produces("application/json; charset=UTF-8") public Response findDocumentById(@PathParam("employeeId") long employeeId, @PathParam("documentId") long documentId) { final EmployeeDocument entity = this.fetchEmployeeDocument(employeeId, documentId); if (entity != null) { EmployeeDocumentDTO dto = new EmployeeDocumentDTO(entity); return Response.ok(dto).build(); } else { return Response.status(Status.NOT_FOUND).build(); } } /** * List all documents of an employee ordered by their publishing date. * @param employeeId The employee's object id. * @return list of {@link EmployeePostalAddressDTO} object. */ @GET @Path("/{employeeId:[0-9][0-9]*}/documents") @Produces("application/json; charset=UTF-8") public List<EmployeeDocumentDTO> listAllDocuments(@PathParam("employeeId") long employeeId) { final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<EmployeeDocument> c = cb.createQuery(EmployeeDocument.class); final Root<EmployeeDocument> table = c.from(EmployeeDocument.class); final CriteriaQuery<EmployeeDocument> select = c.select(table); select.where(cb.equal(table.get(EmployeeDocument_.employee).get(Employee_.oid), employeeId)); select.orderBy(cb.asc(table.get(EmployeeDocument_.publishingDate))); final TypedQuery<EmployeeDocument> tq = em.createQuery(select); final List<EmployeeDocument> searchResults = tq.getResultList(); final List<EmployeeDocumentDTO> results = new ArrayList<EmployeeDocumentDTO>(); for (EmployeeDocument searchResult : searchResults) { EmployeeDocumentDTO dto = new EmployeeDocumentDTO(searchResult); results.add(dto); } return results; } /** * Create a new employee document (without content. * @param employeeId The employee's object id. * @param dto The document's meta data. * @return Status CREATED (success), NOT_FOUND (created but not found in DB); */ @POST @Path("/{employeeId:[0-9][0-9]*}/documents") @Consumes("application/json") @UserTransactional public Response createDocument(@PathParam("employeeId") long employeeId, EmployeeDocumentDTO dto) { Employee employee = em.find(Employee.class, employeeId); if (employee == null) { return Response.status(Status.NOT_FOUND).build(); } final EmployeeDocument entity = dto.entityFromDTO(); entity.setEmployee(employee); entity.setByteSize(-1L); em.persist(entity); return Response .created(UriBuilder.fromResource(EmployeeEndpoint.class) .path(String.valueOf(employeeId)) .path("documents") .path(String.valueOf(entity.getOid())) .build()) .build(); } /** * Delete an employee document using its object id. * @param employeeId The employee's object id. * @param documentId The document's object id. * @return NOT_FOUND (document not found) or NO_CONTENT (success). */ @DELETE @Path("/{employeeId:[0-9][0-9]*}/documents/{documentId:[0-9][0-9]*}") @UserTransactional public Response deleteDocumentById(@PathParam("employeeId") long employeeId, @PathParam("documentId") long documentId) { EmployeeDocument entity = em.find(EmployeeDocument.class, documentId); if (entity == null) { return Response.status(Status.NOT_FOUND).build(); } em.remove(entity); return Response.noContent().build(); } /** * Add or update the BLOB content of an employee's document * @param employeeId The employee's object id. * @param documentId The document's object id. * @param request The servlet request object, from with the binary content is read. * @return NOT_FOUND (document not found) or CREATED (success). */ @PUT @Path("/{employeeId:[0-9][0-9]*}/documents/{documentId:[0-9][0-9]*}/content") @UserTransactional public Response updateDocumentContent( @PathParam("employeeId") long employeeId, @PathParam("documentId") long documentId, @Context HttpServletRequest request) { int contentLength = request.getContentLength(); String contentType = request.getContentType(); if (logger != null && logger.isDebugEnabled()) logger.debug(LOG_TAG, "EmployeeEndpoint.updateDocumentContent contentLength=" + contentLength + ", contentType=" + contentType + ", employeeId=" + employeeId + ", documentId=" + documentId); if (contentLength >= 0 && contentLength < 10) // plausibility check { return Response.status(Status.BAD_REQUEST).entity("ContentLength " + contentLength + " given, but lower than 10 bytes!").build(); } BlobManager blobManager = new BlobManager(); boolean ret; try { ret = blobManager.streamBlobToDatabase(this.em.unwrap(Connection.class), request.getInputStream(), contentLength, BlobModelConfig, documentId); if (logger != null && logger.isDebugEnabled()) logger.debug(LOG_TAG, "EmployeeEndpoint.updateDocumentContent ret=" + ret); } catch (Exception e) { return Response.status(Status.INTERNAL_SERVER_ERROR).entity("Cannot write BLOB of EmployeeDocument " + documentId + ": " + e.getClass().getName() + " " + e.getMessage()).build(); } if (!ret) { return Response.status(Status.NOT_FOUND).build(); } return Response .created(UriBuilder.fromResource(EmployeeEndpoint.class) .path(String.valueOf(employeeId)) .path("documents") .path(String.valueOf(documentId)) .build()) .build(); } /** * Stream/read the BLOB content of an employee's document. * @param employeeId The employee's object id. * @param documentId The document's object id. * @param response The servlet response object, to with the binary content is streamed/written. * @return NOT_FOUND (document not found), NO_CONTENT (BLOB content is empty) or OK (success). */ @GET @Path("/{employeeId:[0-9][0-9]*}/documents/{documentId:[0-9][0-9]*}/content") @UserTransactional // We need this because otherwise unwrap within BlobManager will not work! public void fetchDocumentContent( @PathParam("employeeId") final long employeeId, @PathParam("documentId") final long documentId, @Context final HttpServletResponse response) { if (logger != null && logger.isDebugEnabled()) logger.debug(LOG_TAG, "EmployeeEndpoint.fetchDocumentContent employeeId=" + employeeId + ", documentId=" + documentId); final BlobManager blobManager = new BlobManager(); final EmployeeDocument employeeDocument = this.fetchEmployeeDocument(employeeId, documentId); if (employeeDocument == null) { response.setStatus(Status.NOT_FOUND.getStatusCode()); return; } if (logger != null && logger.isDebugEnabled()) logger.debug(LOG_TAG, "EmployeeEndpoint.fetchDocumentContent document.byteSize=" + employeeDocument.getByteSize()); if (employeeDocument.getByteSize() == -1) { response.setStatus(Status.NO_CONTENT.getStatusCode()); return; } String extension = MimeTypeUtil.getExtension(employeeDocument.getMimeType()); String fileName = "EmployeeDocument_" + documentId + "." + extension; response.setContentType(employeeDocument.getMimeType()); response.setContentLength((int) employeeDocument.getByteSize()); response.setHeader("Content-Disposition", "filename=" + fileName); response.setStatus(Status.OK.getStatusCode()); /* This does not work, because the stream is processed asynchronously and the em/connection is closed then! StreamingOutput stream = new StreamingOutput() { @Override public void write(OutputStream os) throws IOException, WebApplicationException { long size; try { size = blobManager.streamBlobFromDatabase(response.getOutputStream(), BlobModelConfig, documentId); } catch (SQLException e) { e.printStackTrace(); if (logger != null) logger.warn("Cannot read BLOB data for EmployeeDocument " + documentId, e); return; } if (logger != null && logger.isDebugEnabled()) logger.debug(LOG_TAG, "EmployeeEndpoint.fetchDocumentContent " + size + " of " + employeeDocument.getDocumentBytesSize() + " Bytes sent."); } }; return Response.ok(stream).build(); */ // This is not yet perfect, because setStatus is called from the JAX/RS framework again, which leads to warnings: // "WARNING: Cannot set status. Response already committed." long size = -1L; try { size = blobManager.streamBlobFromDatabase(em.unwrap(Connection.class), response.getOutputStream(), BlobModelConfig, documentId); } catch (Exception e) { e.printStackTrace(); if (logger != null) logger.warn("Cannot read BLOB data for EmployeeDocument " + documentId, e); return; } if (logger != null && logger.isDebugEnabled()) logger.debug(LOG_TAG, "EmployeeEndpoint.fetchDocumentContent: " + size + " of " + employeeDocument.getByteSize() + " Bytes sent."); } //-------------------------------------------------------------------------------------------------------- public Object handleUserTransactionException(UserTransactionException userTransactionException) { if (logger.isDebugEnabled()) logger.debug(LOG_TAG, "EmployeeEndpoint.handleTransactionException " + userTransactionException); if (userTransactionException instanceof UserTransactionConstraintViolationException) return Response.status(HTTP_UNPROCESSABLE).build(); else return Response.status(Status.CONFLICT).build(); } private CostCenter fetchCostCenterOfEmployeeByOid(long costCenterOid, String employeeContext) { final CostCenter costCenter = em.find(CostCenter.class, costCenterOid); if (costCenter == null) { logger.warn(LOG_TAG, "INVALID costCenterOid=" + costCenterOid + " for " + employeeContext); throw new IllegalArgumentException("No costcenter with oid=" + costCenterOid + " found!"); } return costCenter; } private EmployeeDocument fetchEmployeeDocument(long employeeId, long documentId) { final CriteriaBuilder cb = em.getCriteriaBuilder(); final CriteriaQuery<EmployeeDocument> c = cb.createQuery(EmployeeDocument.class); final Root<EmployeeDocument> table = c.from(EmployeeDocument.class); final CriteriaQuery<EmployeeDocument> select = c.select(table); final Predicate predicate = cb.and( cb.equal(table.get(EmployeeDocument_.employee).get(Employee_.oid), employeeId), cb.equal(table.get(EmployeeDocument_.oid), documentId)); select.where(predicate); final TypedQuery<EmployeeDocument> tq = em.createQuery(select); return PersistenceUtil.sanityCheckForSingleResultList(tq.getResultList(), EmployeeDocument_.SQL_NAME_oid); } }
/* * Copyright 2019 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.idea.common.settings; import static com.google.common.base.Preconditions.checkState; import com.google.auto.value.AutoValue; import com.google.idea.common.settings.Property.Getter; import com.google.idea.common.settings.Property.Setter; import java.util.Optional; import java.util.function.Supplier; import javax.annotation.Nullable; /** * A setting descriptor for an {@link AutoConfigurable}. * * <p>Describes how to represent the setting in a UI, and retrieve and update its value. This is an * immutable value class. It is safe to create static instances; this will not create any Swing * components or access/modify setting state. */ @AutoValue public abstract class ConfigurableSetting<ValueT, ComponentT extends SettingComponent<ValueT>> { abstract SearchableText searchableText(); /** Returns the UI label of this setting. */ public final String label() { return searchableText().label(); } /** Returns a {@link Property} for accessing and modifying the setting. */ abstract Property<ValueT> setting(); abstract Optional<Supplier<Boolean>> hideCondition(); abstract ComponentFactory<ComponentT> componentFactory(); /** Creates a {@link SettingComponent} for representing this setting in a UI. */ final ComponentT createComponent() { ComponentT component = componentFactory().createComponent(label()); hideCondition().ifPresent(hide -> component.setEnabledAndVisible(!hide.get())); return component; } /** A factory for creating {@link SettingComponent SettingComponents}. */ @FunctionalInterface public interface ComponentFactory<ComponentT extends SettingComponent<?>> { ComponentT createComponent(String label); } /** * Creates a {@link ConfigurableSetting} with the given text, setting property, condition for * hiding, and {@link ComponentFactory}. */ public static <ValueT, ComponentT extends SettingComponent<ValueT>> ConfigurableSetting<ValueT, ComponentT> create( SearchableText searchableText, Property<ValueT> settingProperty, @Nullable Supplier<Boolean> hideCondition, ComponentFactory<ComponentT> componentFactory) { return new AutoValue_ConfigurableSetting<>( searchableText, settingProperty, Optional.ofNullable(hideCondition), componentFactory); } /** * Returns a builder for creating a {@link ConfigurableSetting}. * * @param settingsProvider provides the object containing the setting to represent */ public static <SettingsT> Builder<SettingsT> builder(Supplier<SettingsT> settingsProvider) { return new Builder<>(settingsProvider); } // Split builder to support generic type inference. // This allows fluent chained calls to determine ValueT and ComponentT, // so the caller doesn't have to declare the type up-front on #builder. private abstract static class AbstractBuilder<SettingsT, BuilderT> { final SearchableText.Builder searchableTextBuilder; final Supplier<SettingsT> settingsProvider; @Nullable Supplier<Boolean> hideCondition; AbstractBuilder(Supplier<SettingsT> settingsProvider) { this.searchableTextBuilder = SearchableText.builder(); this.settingsProvider = settingsProvider; this.hideCondition = null; } AbstractBuilder(AbstractBuilder<SettingsT, ?> other) { this.searchableTextBuilder = other.searchableTextBuilder.build().toBuilder(); this.settingsProvider = other.settingsProvider; this.hideCondition = other.hideCondition; } abstract BuilderT self(); /** Sets the UI label for this setting. */ public BuilderT label(String label) { searchableTextBuilder.setLabel(label); return self(); } /** * Adds search terms, allowing this setting to be a result for the given strings, even if they * don't appear in the user-visible label. */ public BuilderT addTags(String... tags) { searchableTextBuilder.addTags(tags); return self(); } /** * Sets a condition for hiding and disabling this setting in the UI. * * <p>The condition will only be checked when the UI is created, not on subsequent updates. * * @throws IllegalStateException if {@code hideIf} has already been called. Only one condition * is supported. */ public BuilderT hideIf(Supplier<Boolean> hideCondition) { checkState(this.hideCondition == null, "hideIf can only be called once"); this.hideCondition = hideCondition; return self(); } } /** A builder for {@link ConfigurableSetting}. */ public static final class Builder<SettingsT> extends AbstractBuilder<SettingsT, Builder<SettingsT>> { private Builder(Supplier<SettingsT> settingsProvider) { super(settingsProvider); } @Override Builder<SettingsT> self() { return this; } /** Sets the {@link Getter} used to retrieve the setting value. */ public <ValueT> TypedBuilder<SettingsT, ValueT> getter(Getter<SettingsT, ValueT> getter) { return new TypedBuilder<SettingsT, ValueT>(this).getter(getter); } /** Sets the {@link Setter} used to update the setting value. */ public <ValueT> TypedBuilder<SettingsT, ValueT> setter(Setter<SettingsT, ValueT> setter) { return new TypedBuilder<SettingsT, ValueT>(this).setter(setter); } } /** A builder for {@link ConfigurableSetting}. */ public static final class TypedBuilder<SettingsT, ValueT> extends AbstractBuilder<SettingsT, TypedBuilder<SettingsT, ValueT>> { private Getter<SettingsT, ValueT> getter; private Setter<SettingsT, ValueT> setter; private TypedBuilder(AbstractBuilder<SettingsT, ?> other) { super(other); } @Override TypedBuilder<SettingsT, ValueT> self() { return this; } /** Sets the {@link Getter} used to retrieve the setting value. */ public TypedBuilder<SettingsT, ValueT> getter(Getter<SettingsT, ValueT> getter) { this.getter = getter; return self(); } /** Sets the {@link Setter} used to update the setting value. */ public TypedBuilder<SettingsT, ValueT> setter(Setter<SettingsT, ValueT> setter) { this.setter = setter; return self(); } /** * Sets the {@link ComponentFactory} used to create the UI component, and returns the built * {@link ConfigurableSetting}. */ public <ComponentT extends SettingComponent<ValueT>> ConfigurableSetting<ValueT, ComponentT> componentFactory( ComponentFactory<ComponentT> componentFactory) { return create( searchableTextBuilder.build(), Property.create(settingsProvider, getter, setter), hideCondition, componentFactory); } } }
package io.quarkus.it.kafka.streams; import static org.hamcrest.Matchers.containsString; import java.io.File; import java.time.Duration; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Properties; import org.apache.http.HttpStatus; import org.apache.kafka.clients.CommonClientConfigs; import org.apache.kafka.clients.consumer.Consumer; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.common.config.SslConfigs; import org.apache.kafka.common.serialization.IntegerDeserializer; import org.apache.kafka.common.serialization.IntegerSerializer; import org.hamcrest.CoreMatchers; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import io.quarkus.kafka.client.serialization.ObjectMapperDeserializer; import io.quarkus.kafka.client.serialization.ObjectMapperSerializer; import io.quarkus.test.common.QuarkusTestResource; import io.quarkus.test.junit.QuarkusTest; import io.restassured.RestAssured; @QuarkusTestResource(KafkaSSLTestResource.class) @QuarkusTest public class KafkaStreamsTest { private static void addSSL(Properties props) { File sslDir = new File("src/main/resources"); File tsFile = new File(sslDir, "ks-truststore.p12"); props.setProperty(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, "SSL"); props.setProperty(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG, tsFile.getPath()); props.setProperty(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG, "Z_pkTh9xgZovK4t34cGB2o6afT4zZg0L"); props.setProperty(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG, "PKCS12"); props.setProperty(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG, ""); } private static Producer<Integer, Customer> createCustomerProducer() { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaSSLTestResource.getBootstrapServers()); props.put(ProducerConfig.CLIENT_ID_CONFIG, "streams-test-producer"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ObjectMapperSerializer.class.getName()); addSSL(props); return new KafkaProducer<>(props); } private static Producer<Integer, Category> createCategoryProducer() { Properties props = new Properties(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaSSLTestResource.getBootstrapServers()); props.put(ProducerConfig.CLIENT_ID_CONFIG, "streams-test-category-producer"); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, IntegerSerializer.class.getName()); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ObjectMapperSerializer.class.getName()); addSSL(props); return new KafkaProducer<>(props); } private static KafkaConsumer<Integer, EnrichedCustomer> createConsumer() { Properties props = new Properties(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, KafkaSSLTestResource.getBootstrapServers()); props.put(ConsumerConfig.GROUP_ID_CONFIG, "streams-test-consumer"); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, IntegerDeserializer.class.getName()); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, EnrichedCustomerDeserializer.class.getName()); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true"); props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest"); addSSL(props); KafkaConsumer<Integer, EnrichedCustomer> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList("streams-test-customers-processed")); return consumer; } @Test public void testKafkaStreams() throws Exception { testKafkaStreamsNotAliveAndNotReady(); produceCustomers(); Consumer<Integer, EnrichedCustomer> consumer = createConsumer(); List<ConsumerRecord<Integer, EnrichedCustomer>> records = poll(consumer, 4); ConsumerRecord<Integer, EnrichedCustomer> record = records.get(0); Assertions.assertEquals(101, record.key()); EnrichedCustomer customer = record.value(); Assertions.assertEquals(101, customer.id); Assertions.assertEquals("Bob", customer.name); Assertions.assertEquals("B2B", customer.category.name); Assertions.assertEquals("business-to-business", customer.category.value); record = records.get(1); Assertions.assertEquals(102, record.key()); customer = record.value(); Assertions.assertEquals(102, customer.id); Assertions.assertEquals("Becky", customer.name); Assertions.assertEquals("B2C", customer.category.name); Assertions.assertEquals("business-to-customer", customer.category.value); record = records.get(2); Assertions.assertEquals(103, record.key()); customer = record.value(); Assertions.assertEquals(103, customer.id); Assertions.assertEquals("Bruce", customer.name); Assertions.assertEquals("B2B", customer.category.name); Assertions.assertEquals("business-to-business", customer.category.value); record = records.get(3); Assertions.assertEquals(104, record.key()); customer = record.value(); Assertions.assertEquals(104, customer.id); Assertions.assertEquals("Bert", customer.name); Assertions.assertEquals("B2B", customer.category.name); Assertions.assertEquals("business-to-business", customer.category.value); // test interactive query (getting latest result from state store) assertCategoryCount(1, 3); assertCategoryCount(2, 1); testKafkaStreamsAliveAndReady(); RestAssured.when().get("/kafkastreams/state").then().body(CoreMatchers.is("RUNNING")); testMetricsPresent(); // explicitly stopping the pipeline *before* the broker is shut down, as it // otherwise will time out RestAssured.post("/kafkastreams/stop"); } private void testMetricsPresent() { // Look for kafka consumer metrics (add .log().all() to examine what they are RestAssured.when().get("/q/metrics").then() .statusCode(200) .body(containsString("kafka_stream_")); } public void testKafkaStreamsNotAliveAndNotReady() throws Exception { RestAssured.get("/q/health/ready").then() .statusCode(HttpStatus.SC_SERVICE_UNAVAILABLE) .body("checks[0].name", CoreMatchers.is("Kafka Streams topics health check")) .body("checks[0].status", CoreMatchers.is("DOWN")) .body("checks[0].data.missing_topics", CoreMatchers.is("streams-test-customers,streams-test-categories")); RestAssured.when().get("/q/health/live").then() .statusCode(HttpStatus.SC_SERVICE_UNAVAILABLE) .body("checks[0].name", CoreMatchers.is("Kafka Streams state health check")) .body("checks[0].status", CoreMatchers.is("DOWN")) .body("checks[0].data.state", CoreMatchers.is("CREATED")); RestAssured.when().get("/q/health").then() .statusCode(HttpStatus.SC_SERVICE_UNAVAILABLE); } public void testKafkaStreamsAliveAndReady() throws Exception { RestAssured.get("/q/health/ready").then() .statusCode(HttpStatus.SC_OK) .body("checks[0].name", CoreMatchers.is("Kafka Streams topics health check")) .body("checks[0].status", CoreMatchers.is("UP")) .body("checks[0].data.available_topics", CoreMatchers.is("streams-test-categories,streams-test-customers")); RestAssured.when().get("/q/health/live").then() .statusCode(HttpStatus.SC_OK) .body("checks[0].name", CoreMatchers.is("Kafka Streams state health check")) .body("checks[0].status", CoreMatchers.is("UP")) .body("checks[0].data.state", CoreMatchers.is("RUNNING")); RestAssured.when().get("/q/health").then() .statusCode(HttpStatus.SC_OK); } private void produceCustomers() { Producer<Integer, Customer> producer = createCustomerProducer(); Producer<Integer, Category> categoryProducer = createCategoryProducer(); categoryProducer.send(new ProducerRecord<>("streams-test-categories", 1, new Category("B2B", "business-to-business"))); categoryProducer.send(new ProducerRecord<>("streams-test-categories", 2, new Category("B2C", "business-to-customer"))); producer.send(new ProducerRecord<>("streams-test-customers", 101, new Customer(101, "Bob", 1))); producer.send(new ProducerRecord<>("streams-test-customers", 102, new Customer(102, "Becky", 2))); producer.send(new ProducerRecord<>("streams-test-customers", 103, new Customer(103, "Bruce", 1))); producer.send(new ProducerRecord<>("streams-test-customers", 104, new Customer(104, "Bert", 1))); } private void assertCategoryCount(int categoryId, int expectedCount) throws Exception { int i = 0; Integer actual = null; // retrying for some time as the aggregation might not have finished yet while (i < 50 && !Integer.valueOf(expectedCount).equals(actual)) { actual = getCategoryCount(categoryId); Thread.sleep(100); } Assertions.assertEquals(expectedCount, actual); } private Integer getCategoryCount(int categoryId) { String result = RestAssured.when().get("/kafkastreams/category/" + categoryId).asString(); if (result != null && !result.trim().isEmpty()) { return Integer.valueOf(result); } return null; } private List<ConsumerRecord<Integer, EnrichedCustomer>> poll(Consumer<Integer, EnrichedCustomer> consumer, int expectedRecordCount) { int fetched = 0; List<ConsumerRecord<Integer, EnrichedCustomer>> result = new ArrayList<>(); while (fetched < expectedRecordCount) { ConsumerRecords<Integer, EnrichedCustomer> records = consumer.poll(Duration.ofMillis(20000)); records.forEach(result::add); fetched = result.size(); } return result; } public static class EnrichedCustomerDeserializer extends ObjectMapperDeserializer<EnrichedCustomer> { public EnrichedCustomerDeserializer() { super(EnrichedCustomer.class); } } }
package org.wikipedia.views; import android.content.Context; import android.graphics.Canvas; import android.os.SystemClock; import android.util.AttributeSet; import android.view.KeyEvent; import android.view.MotionEvent; import android.view.ViewConfiguration; import android.webkit.WebView; import org.wikipedia.WikipediaApp; import org.wikipedia.events.WebViewInvalidateEvent; import org.wikipedia.util.DimenUtil; import java.util.ArrayList; import java.util.List; public class ObservableWebView extends WebView { private static final WebViewInvalidateEvent INVALIDATE_EVENT = new WebViewInvalidateEvent(); private List<OnClickListener> onClickListeners; private List<OnScrollChangeListener> onScrollChangeListeners; private List<OnDownMotionEventListener> onDownMotionEventListeners; private List<OnUpOrCancelMotionEventListener> onUpOrCancelMotionEventListeners; private List<OnContentHeightChangedListener> onContentHeightChangedListeners; private OnFastScrollListener onFastScrollListener; private int contentHeight = 0; private float touchStartX; private float touchStartY; private int touchSlop; private long lastScrollTime; private int totalAmountScrolled; /** * Threshold (in pixels) of continuous scrolling, to be considered "fast" scrolling. */ private static final int FAST_SCROLL_THRESHOLD = (int) (1000 * DimenUtil.getDensityScalar()); /** * Maximum single scroll amount (in pixels) to be considered a "human" scroll. * Otherwise it's probably a programmatic scroll, which we won't count. */ private static final int MAX_HUMAN_SCROLL = (int) (500 * DimenUtil.getDensityScalar()); /** * Maximum amount of time that needs to elapse before the previous scroll amount * is "forgotten." That is, if the user scrolls once, then scrolls again within this * time, then the two scroll actions will be added together as one, and counted towards * a possible "fast" scroll. */ private static final int MAX_MILLIS_BETWEEN_SCROLLS = 500; public void addOnClickListener(OnClickListener onClickListener) { onClickListeners.add(onClickListener); } public void addOnScrollChangeListener(OnScrollChangeListener onScrollChangeListener) { onScrollChangeListeners.add(onScrollChangeListener); } public void addOnDownMotionEventListener(OnDownMotionEventListener onDownMotionEventListener) { onDownMotionEventListeners.add(onDownMotionEventListener); } public void addOnUpOrCancelMotionEventListener(OnUpOrCancelMotionEventListener onUpOrCancelMotionEventListener) { onUpOrCancelMotionEventListeners.add(onUpOrCancelMotionEventListener); } public void addOnContentHeightChangedListener(OnContentHeightChangedListener onContentHeightChangedListener) { onContentHeightChangedListeners.add(onContentHeightChangedListener); } public void setOnFastScrollListener(OnFastScrollListener onFastScrollListener) { this.onFastScrollListener = onFastScrollListener; } public void clearAllListeners() { onClickListeners.clear(); onScrollChangeListeners.clear(); onDownMotionEventListeners.clear(); onUpOrCancelMotionEventListeners.clear(); onContentHeightChangedListeners.clear(); onFastScrollListener = null; } public interface OnClickListener { boolean onClick(float x, float y); } public interface OnScrollChangeListener { void onScrollChanged(int oldScrollY, int scrollY, boolean isHumanScroll); } public interface OnDownMotionEventListener { void onDownMotionEvent(); } public interface OnUpOrCancelMotionEventListener { void onUpOrCancelMotionEvent(); } public interface OnContentHeightChangedListener { void onContentHeightChanged(int contentHeight); } public interface OnFastScrollListener { void onFastScroll(); } public void copyToClipboard() { // Simulate a Ctrl-C key press, which copies the current selection to the clipboard. // Seems to work across all APIs. dispatchKeyEvent(new KeyEvent(SystemClock.uptimeMillis(), SystemClock.uptimeMillis(), KeyEvent.ACTION_DOWN, KeyEvent.KEYCODE_C, 0, KeyEvent.META_CTRL_ON)); dispatchKeyEvent(new KeyEvent(SystemClock.uptimeMillis(), SystemClock.uptimeMillis(), KeyEvent.ACTION_UP, KeyEvent.KEYCODE_C, 0, KeyEvent.META_CTRL_ON)); } public ObservableWebView(Context context) { super(context); init(); } public ObservableWebView(Context context, AttributeSet attrs) { super(context, attrs); init(); } public ObservableWebView(Context context, AttributeSet attrs, int defStyle) { super(context, attrs, defStyle); init(); } private void init() { onClickListeners = new ArrayList<>(); onScrollChangeListeners = new ArrayList<>(); onDownMotionEventListeners = new ArrayList<>(); onUpOrCancelMotionEventListeners = new ArrayList<>(); onContentHeightChangedListeners = new ArrayList<>(); touchSlop = ViewConfiguration.get(getContext()).getScaledTouchSlop(); } @Override protected void onScrollChanged(int left, int top, int oldLeft, int oldTop) { super.onScrollChanged(left, top, oldLeft, oldTop); boolean isHumanScroll = Math.abs(top - oldTop) < MAX_HUMAN_SCROLL; for (OnScrollChangeListener listener : onScrollChangeListeners) { listener.onScrollChanged(oldTop, top, isHumanScroll); } if (!isHumanScroll) { return; } totalAmountScrolled += (top - oldTop); if (Math.abs(totalAmountScrolled) > FAST_SCROLL_THRESHOLD && onFastScrollListener != null) { onFastScrollListener.onFastScroll(); totalAmountScrolled = 0; } lastScrollTime = System.currentTimeMillis(); } @Override public boolean onTouchEvent(MotionEvent event) { switch (event.getActionMasked()) { case MotionEvent.ACTION_DOWN: for (OnDownMotionEventListener listener : onDownMotionEventListeners) { listener.onDownMotionEvent(); } if (System.currentTimeMillis() - lastScrollTime > MAX_MILLIS_BETWEEN_SCROLLS) { totalAmountScrolled = 0; } touchStartX = event.getX(); touchStartY = event.getY(); break; case MotionEvent.ACTION_UP: if (Math.abs(event.getX() - touchStartX) <= touchSlop && Math.abs(event.getY() - touchStartY) <= touchSlop) { for (OnClickListener listener : onClickListeners) { if (listener.onClick(event.getX(), event.getY())) { return true; } } } case MotionEvent.ACTION_CANCEL: for (OnUpOrCancelMotionEventListener listener : onUpOrCancelMotionEventListeners) { listener.onUpOrCancelMotionEvent(); } break; default: // Do nothing for all the other things break; } return super.onTouchEvent(event); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); if (isInEditMode()) { return; } if (contentHeight != getContentHeight()) { contentHeight = getContentHeight(); for (OnContentHeightChangedListener listener : onContentHeightChangedListeners) { listener.onContentHeightChanged(contentHeight); } } WikipediaApp.getInstance().getBus().post(INVALIDATE_EVENT); } }
/* * The MIT License * Copyright (c) 2012 Microsoft Corporation * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package microsoft.exchange.webservices.data.core.request; import microsoft.exchange.webservices.data.core.EwsServiceXmlWriter; import microsoft.exchange.webservices.data.core.EwsUtilities; import microsoft.exchange.webservices.data.core.ExchangeService; import microsoft.exchange.webservices.data.core.XmlAttributeNames; import microsoft.exchange.webservices.data.core.XmlElementNames; import microsoft.exchange.webservices.data.core.response.ServiceResponse; import microsoft.exchange.webservices.data.enumeration.AffectedTaskOccurrence; import microsoft.exchange.webservices.data.enumeration.ExchangeVersion; import microsoft.exchange.webservices.data.enumeration.SendCancellationsMode; import microsoft.exchange.webservices.data.enumeration.ServiceErrorHandling; import microsoft.exchange.webservices.data.enumeration.XmlNamespace; import microsoft.exchange.webservices.data.exception.ServiceXmlSerializationException; import microsoft.exchange.webservices.data.misc.ItemIdWrapperList; /** * Represents a DeleteItem request. */ public final class DeleteItemRequest extends DeleteRequest<ServiceResponse> { /** * The item ids. */ private ItemIdWrapperList itemIds = new ItemIdWrapperList(); /** * The affected task occurrences. */ private AffectedTaskOccurrence affectedTaskOccurrences; /** * The send cancellations mode. */ private SendCancellationsMode sendCancellationsMode; /** * Initializes a new instance of the class. * * @param service the service * @param errorHandlingMode the error handling mode * @throws Exception */ public DeleteItemRequest(ExchangeService service, ServiceErrorHandling errorHandlingMode) throws Exception { super(service, errorHandlingMode); } /** * Validate request. * * @throws Exception the exception */ @Override protected void validate() throws Exception { super.validate(); EwsUtilities.validateParam(this.itemIds, "ItemIds"); } /** * Gets the expected response message count. * * @return Number of expected response messages */ @Override protected int getExpectedResponseMessageCount() { return this.itemIds.getCount(); } /** * Creates the service response. * * @param service the service * @param responseIndex the response index * @return Service response. */ @Override protected ServiceResponse createServiceResponse(ExchangeService service, int responseIndex) { return new ServiceResponse(); } /** * Gets the name of the XML element. * * @return XML element name */ @Override public String getXmlElementName() { return XmlElementNames.DeleteItem; } /** * Gets the name of the response XML element. * * @return XML element name */ @Override protected String getResponseXmlElementName() { return XmlElementNames.DeleteItemResponse; } /** * Gets the name of the response message XML element. * * @return XML element name */ @Override protected String getResponseMessageXmlElementName() { return XmlElementNames.DeleteItemResponseMessage; } /** * Writes XML attribute. * * @param writer the writer * @throws microsoft.exchange.webservices.data.exception.ServiceXmlSerializationException the service xml serialization exception */ @Override protected void writeAttributesToXml(EwsServiceXmlWriter writer) throws ServiceXmlSerializationException { super.writeAttributesToXml(writer); if (this.affectedTaskOccurrences != null) { writer.writeAttributeValue( XmlAttributeNames.AffectedTaskOccurrences, this .getAffectedTaskOccurrences()); } if (this.sendCancellationsMode != null) { writer.writeAttributeValue( XmlAttributeNames.SendMeetingCancellations, this .getSendCancellationsMode()); } } /** * Writes XML elements. * * @param writer the writer * @throws Exception the exception */ @Override protected void writeElementsToXml(EwsServiceXmlWriter writer) throws Exception { this.itemIds.writeToXml(writer, XmlNamespace.Messages, XmlElementNames.ItemIds); } /** * Gets the request version. * * @return Earliest Exchange version in which this request is supported. */ @Override protected ExchangeVersion getMinimumRequiredServerVersion() { return ExchangeVersion.Exchange2007_SP1; } /** * Gets the item ids. * * @return the item ids */ public ItemIdWrapperList getItemIds() { return this.itemIds; } /** * Gets the affected task occurrences. * * @return the affected task occurrences */ AffectedTaskOccurrence getAffectedTaskOccurrences() { return this.affectedTaskOccurrences; } /** * Sets the affected task occurrences. * * @param affectedTaskOccurrences the new affected task occurrences */ public void setAffectedTaskOccurrences(AffectedTaskOccurrence affectedTaskOccurrences) { this.affectedTaskOccurrences = affectedTaskOccurrences; } /** * Gets the send cancellations. * * @return the send cancellations mode */ SendCancellationsMode getSendCancellationsMode() { return this.sendCancellationsMode; } /** * Sets the send cancellations mode. * * @param sendCancellationsMode the new send cancellations mode */ public void setSendCancellationsMode(SendCancellationsMode sendCancellationsMode) { this.sendCancellationsMode = sendCancellationsMode; } }
/* * Copyright (c) 2015, Alachisoft. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alachisoft.tayzgrid.caching.topologies.clustered; import com.alachisoft.tayzgrid.caching.CacheRuntimeContext; import com.alachisoft.tayzgrid.caching.IGRShutDown; import com.alachisoft.tayzgrid.caching.ShutDownStatus; import com.alachisoft.tayzgrid.common.monitoring.ServerMonitor; import com.alachisoft.tayzgrid.common.datastructures.IOptimizedQueueOperation; import com.alachisoft.tayzgrid.common.ServicePropValues; import com.alachisoft.tayzgrid.common.threading.Latch; import com.alachisoft.tayzgrid.runtime.util.TimeSpan; import java.util.*; public class AsyncItemReplicator implements Runnable, IGRShutDown { private CacheRuntimeContext _context = null; private TimeSpan _interval = new TimeSpan(0, 0, 2); private Thread runner = null; private OptimizedQueue _queue = new OptimizedQueue(); private java.util.HashMap _updateIndexKeys = new java.util.HashMap(); private long _uniqueKeyNumber; private int _updateIndexMoveThreshhold = 200; private int _moveCount; private boolean stopped = true; private int _bulkKeysToReplicate = 300; private Latch _shutdownStatusLatch = new Latch(ShutDownStatus.NONE); public AsyncItemReplicator(CacheRuntimeContext context, TimeSpan interval) { if (ServicePropValues.CacheServer_BulkItemsToReplicate != null) { _bulkKeysToReplicate = Integer.decode(ServicePropValues.CacheServer_BulkItemsToReplicate); } this._context = context; this._interval = interval; } /** * Creates a new Thread and Starts it. */ public final void Start() { if (stopped) { stopped = false; runner = new Thread(this); runner.setDaemon(true); runner.setName("AsyncItemReplicationThread"); runner.start(); } } /** * An operation to update an index on the replica node is queued to be * replicate. These operations are send in bulk to the replica node. * * @param key */ public final void AddUpdateIndexKey(Object key) { synchronized (_updateIndexKeys) { _updateIndexKeys.put(key, null); _context.PerfStatsColl.incrementSlidingIndexQueueSizeStats(_updateIndexKeys.size()); } } public final void RemoveUpdateIndexKey(Object key) { synchronized (_updateIndexKeys) { _updateIndexKeys.remove(key); _context.PerfStatsColl.incrementSlidingIndexQueueSizeStats(_updateIndexKeys.size()); } } public final void WindUpTask() { if (!stopped) { _context.getCacheLog().CriticalInfo("AsyncItemReplicator", "WindUp Task Started."); if (_queue != null) { _context.getCacheLog().CriticalInfo("AsyncItemReplicator", "Async Replicator Queue Count: " + _queue.getCount()); } _interval = new TimeSpan(0, 0, 0); _shutdownStatusLatch.SetStatusBit(ShutDownStatus.SHUTDOWN_INPROGRESS, ShutDownStatus.NONE); _context.getCacheLog().CriticalInfo("AsyncItemReplicator", "WindUp Task Ended."); } } public final void WaitForShutDown(long interval) { if (!stopped) { _context.getCacheLog().CriticalInfo("AsyncItemReplicator", "Waiting for shutdown task completion."); if (_queue.getCount() > 0) { _shutdownStatusLatch.WaitForAny(ShutDownStatus.SHUTDOWN_COMPLETED, interval * 1000); } if (_queue != null && _queue.getCount() > 0) { _context.getCacheLog().CriticalInfo("AsyncItemReplicator", "Remaining Async Replicator operations: " + _queue.getCount()); } _context.getCacheLog().CriticalInfo("AsyncItemReplicator", "Shutdown task completed."); } } /** * Add the key and entry in teh HashMap for Invalidation by preodic thread. * * @param key The key of the item to invalidate. * @param entry CacheEntry to Invalidate. */ public final void EnqueueOperation(Object key, ReplicationOperation operation) { try { if (key == null) { tangible.RefObject<Long> tempRef__uniqueKeyNumber = new tangible.RefObject<Long>(_uniqueKeyNumber); key = UUID.randomUUID().toString() + tempRef__uniqueKeyNumber.argvalue++; _uniqueKeyNumber = tempRef__uniqueKeyNumber.argvalue; } _queue.Enqueue(key, operation); if (ServerMonitor.getMonitorActivity()) { ServerMonitor.LogClientActivity("AsyncReplicator.Enque", "queue_size :" + _queue.getCount()); } _context.PerfStatsColl.incrementMirrorQueueSizeStats(_queue.getCount()); } catch (Exception e) { if (_context.getCacheLog().getIsErrorEnabled()) { _context.getCacheLog().Error("AsyncItemReplicator", String.format("Exception: %1$s", e.toString())); } } } /** * Clears the Queue of any keys for replication. */ public final void Clear() { //keysQue.Clear(); _queue.Clear(); _context.PerfStatsColl.incrementMirrorQueueSizeStats(_queue.getCount()); } /** * Clears the Queue of any keys for replication. */ public final void EnqueueClear(ReplicationOperation operation) { //keysQue.Clear(); _queue.Clear(); this.EnqueueOperation("NcAcHe$Cl@Ea%R", operation); } private Object[] GetIndexOperations() { Object[] keys = null; synchronized (_updateIndexKeys) { _moveCount++; if (_updateIndexKeys.size() >= _updateIndexMoveThreshhold || _moveCount > 2) { if (_updateIndexKeys.size() > 0) { keys = new Object[_updateIndexKeys.size()]; Iterator ide = _updateIndexKeys.entrySet().iterator(); int index = 0; while (ide.hasNext()) { keys[index] = ((Map.Entry) ide.next()).getKey(); index++; } } _moveCount = 0; _updateIndexKeys.clear(); } } return keys; } /** * replication thread function. note: While replicating operations, a dummy * '0' sequence id is passed. this sequence id is totally ignored by * asynchronous por, but we are keeping it to maintain the symmetry in API. */ public final void run() { java.util.ArrayList opCodesToBeReplicated = new java.util.ArrayList(_bulkKeysToReplicate); java.util.ArrayList infoToBeReplicated = new java.util.ArrayList(_bulkKeysToReplicate); java.util.ArrayList compilationInfo = new java.util.ArrayList(_bulkKeysToReplicate); java.util.ArrayList userPayLoad = new java.util.ArrayList(); try { while (!stopped || _queue.getCount() > 0) { java.util.Date startedAt = new java.util.Date(); java.util.Date finishedAt = new java.util.Date(); try { for (int i = 0; _queue.getCount() > 0 && i < _bulkKeysToReplicate; i++) { IOptimizedQueueOperation operation = null; operation = _queue.Dequeue(); Map.Entry entry = (Map.Entry) operation.getData(); opCodesToBeReplicated.add(entry.getKey()); infoToBeReplicated.add(entry.getValue()); if (operation.getUserPayLoad() != null) { for (int j = 0; j < operation.getUserPayLoad().length; j++) { userPayLoad.add(operation.getUserPayLoad()[j]); } } compilationInfo.add(operation.getPayLoadSize()); } Object[] updateIndexKeys = GetIndexOperations(); if (!stopped) { if (opCodesToBeReplicated.size() > 0 || updateIndexKeys != null) { if (updateIndexKeys != null) { opCodesToBeReplicated.add(new Byte(ClusterCacheBase.OpCodes.UpdateIndice.getValue()).intValue()); infoToBeReplicated.add(updateIndexKeys); } _context.getCacheImpl().ReplicateOperations(opCodesToBeReplicated.toArray(new Object[0]), infoToBeReplicated.toArray(new Object[0]), userPayLoad.toArray(new Object[0]), compilationInfo, _context.getCacheImpl().getOperationSequenceId(), _context.getCacheImpl().getCurrentViewId()); } } if (!stopped && _context.PerfStatsColl != null) { _context.PerfStatsColl.incrementMirrorQueueSizeStats(_queue.getCount()); } } catch (Exception e) { if (e.getMessage().toLowerCase().indexOf("operation timeout") >= 0 && !_shutdownStatusLatch.IsAnyBitsSet(ShutDownStatus.SHUTDOWN_INPROGRESS)) { _context.getCacheLog().CriticalInfo("AsyncReplicator.Run", "Bulk operation timedout. Retrying the operation."); try { if (!stopped) { _context.getCacheImpl().ReplicateOperations(opCodesToBeReplicated.toArray(new Object[0]), infoToBeReplicated.toArray(new Object[0]), userPayLoad.toArray(new Object[0]), compilationInfo, 0, 0); _context.getCacheLog().CriticalInfo("AsyncReplicator.Run", "RETRY is successfull."); } } catch (Exception ex) { if (_context.getCacheLog().getIsErrorEnabled()) { _context.getCacheLog().Error("AsyncReplicator.RUN", "Error occured while retrying operation. " + ex.toString()); } } } else { if (_context.getCacheLog().getIsErrorEnabled()) { _context.getCacheLog().Error("AsyncReplicator.RUN", e.toString()); } } } finally { opCodesToBeReplicated.clear(); infoToBeReplicated.clear(); compilationInfo.clear(); userPayLoad.clear(); finishedAt = new java.util.Date(); } if (_queue.getCount() > 0) { continue; } else if (_queue.getCount() == 0 && _shutdownStatusLatch.IsAnyBitsSet(ShutDownStatus.SHUTDOWN_INPROGRESS)) { _shutdownStatusLatch.SetStatusBit(ShutDownStatus.SHUTDOWN_COMPLETED, ShutDownStatus.SHUTDOWN_INPROGRESS); return; } if (TimeSpan.Subtract(finishedAt, startedAt).getTotalTicks() < _interval.getTotalTicks()) { Thread.sleep(_interval.getTotalMiliSeconds() - TimeSpan.Subtract(finishedAt, startedAt).getTotalMiliSeconds()); } else { Thread.sleep(_interval.getTotalMiliSeconds()); } } } // Threads are not Aborted in Java catch (InterruptedException ti) { } catch (NullPointerException e) { } catch (Exception e) { if (!stopped) { _context.getCacheLog().Error("AsyncReplicator.RUN", "Async replicator stopped. " + e.toString()); } } } /** * Stops and disposes the Repliaction thread. The thread can be started * using Start method. * * @param gracefulStop If true then operations pending in the queue are * performed on the passive node, otherwise stopped instantly */ public final void Stop(boolean gracefulStop) throws InterruptedException { stopped = true; if (runner != null && runner.isAlive()) { if (gracefulStop) { runner.join(); } else { try { if (runner.isAlive()) { _context.getCacheLog().Flush(); runner.stop(); } } catch (Exception e) { } } try { Clear(); } catch (java.lang.Exception e2) { } } } /** * Returns the number of operations in the queue. */ public final long getQueueCount() { return _queue.getCount(); } /** * Terminates the replciation thread and Disposes the instance. */ public final void dispose() { try { Stop(false); runner = null; } catch (InterruptedException interruptedException) { } } }
import java.util.*; import java.io.*; public class NanoRefinery { public NanoRefinery (Vector<Reaction> reactions, boolean debug) { _debug = debug; _reactions = reactions; _storage = new Vector<ChemicalQuantity>(); // where we will store excess ChemicalQuantities _amountOfOre = 0; if (_debug) { Enumeration<Reaction> iter = reactions.elements(); while (iter.hasMoreElements()) System.out.println(iter.nextElement()); } } /* * Create the most amount of fuel from the given amount of ore. */ public final long createMaxFuelFromOre (long oreAvailable) { Reaction fuel = findReaction(Chemical.FUEL); long fuelCreated = 0L; /* * Look at the reactions needed to create the amount * of fuel. Work backwards from there. */ if (fuel != null) { long lastRoundFuel = 0L; // remember the fuel we created last iteration _amountOfOre = 0L; while (_amountOfOre < oreAvailable) { lastRoundFuel = fuelCreated; if (_debug) { System.out.println("Fuel created so far: "+lastRoundFuel); System.out.println("Ore used so far: "+_amountOfOre); } Vector<ChemicalQuantity> fuelChemicalQuantities = fuel.getChemicalQuantities(); // maybe not all reactions loaded are needed Enumeration<ChemicalQuantity> iter = fuelChemicalQuantities.elements(); while (iter.hasMoreElements()) { ChemicalQuantity reaction = iter.nextElement(); synthesiseChemical(reaction); } lastRoundFuel = fuelCreated; fuelCreated += fuel.chemicalCreated().getAmount(); } if (_amountOfOre > oreAvailable) // if we used more ore than available, report the previous fuel amount fuelCreated = lastRoundFuel; } else System.out.println("Error! No fuel required?!"); return fuelCreated; } public final long oreNeeded () { Reaction fuel = findReaction(Chemical.FUEL); /* * Look at the reactions needed to create the amount * of fuel. Work backwards from there. */ if (fuel != null) { if (_debug) System.out.println("\nFuel equation: "+fuel); long fuelNeeded = fuel.chemicalCreated().getAmount(); long fuelCreated = 0; _amountOfOre = 0; while (fuelCreated < fuelNeeded) { Vector<ChemicalQuantity> fuelChemicalQuantities = fuel.getChemicalQuantities(); // maybe not all reactions loaded are needed Enumeration<ChemicalQuantity> iter = fuelChemicalQuantities.elements(); while (iter.hasMoreElements()) { ChemicalQuantity reaction = iter.nextElement(); if (_debug) System.out.println("\nWorking on: "+reaction); synthesiseChemical(reaction); if (_debug) { System.out.println("**---------------------------------------"); System.out.println("**COMPLETED ORE used for "+reaction+" is "+_amountOfOre); System.out.println("**---------------------------------------"); printStorage(); } } fuelCreated += fuel.chemicalCreated().getAmount(); } } else System.out.println("Error! No fuel required?!"); return _amountOfOre; } /* * Go through each ChemicalQuantity and try to create the * required amount, storing excess in the inventory. * So check the inventory first, of course. */ private void synthesiseChemical (ChemicalQuantity theReaction) { Reaction r = findReaction(theReaction.getChemical().getName()); // the reaction for the chemical needed int needed = theReaction.getAmount(); // the amount of chemical needed if (_debug) { System.out.println("\nSynthesising: "+theReaction.getChemical()); System.out.println("Needed reaction: "+r); System.out.println("Quantity of "+theReaction.getChemical()+" needed: "+needed); System.out.println("Quantity which would be created from reaction: "+r.chemicalCreated().getAmount()); } int amountCreated = checkInventory(theReaction); if (amountCreated >= needed) consumeFromInventory(theReaction); while (amountCreated < needed) { if (r.isOre()) { /* * If the chemical needed is ORE then we don't need * to do anything special as it's always available at * whatever quantity. */ if (_debug) System.out.println("Reaction "+r+" uses ORE"); int storage = checkInventory(theReaction); if (storage >= theReaction.getAmount()) { consumeFromInventory(theReaction); amountCreated += theReaction.getAmount(); } else { _amountOfOre += r.getChemicalQuantities().elementAt(0).getAmount(); amountCreated += r.chemicalCreated().getAmount(); storeChemical(theReaction.getChemical(), r.chemicalCreated().getAmount()); if (amountCreated >= needed) { if (_debug) System.out.println("**Created "+(amountCreated - needed)+" more "+theReaction.getChemical()+" than needed"); consumeFromInventory(theReaction); amountCreated = needed; } } } else { /* * Not ORE so we need to create the chemical using * the reaction. Check the inventory first and update * if we have excess. */ if (_debug) System.out.println("**Reaction "+r+" does NOT use ORE."); int amountStored = checkInventory(theReaction); if (amountStored >= needed) { /* * There's enough of the chemical in storage so just * use that and we're done! */ consumeFromInventory(theReaction); amountCreated = needed; } else { /* * Nothing in the store so create some. */ Vector<ChemicalQuantity> chemicalQuantities = r.getChemicalQuantities(); Enumeration<ChemicalQuantity> iter = chemicalQuantities.elements(); while (iter.hasMoreElements()) { ChemicalQuantity reaction = iter.nextElement(); synthesiseChemical(reaction); } if (_debug) System.out.println("Created from combining other chemicals!"); storeChemical(r.chemicalCreated().getChemical(), r.chemicalCreated().getAmount()); } } } } private void storeChemical (Chemical chem, int amount) { if (_debug) System.out.println("Storing "+amount+" of "+chem+" in the inventory."); ChemicalQuantity toStore = new ChemicalQuantity(chem, amount); int index = _storage.indexOf(toStore); if (index != -1) { ChemicalQuantity chemQ = _storage.elementAt(index); int currentQuantityInInventory = chemQ.getAmount(); chemQ.setAmount(currentQuantityInInventory + amount); if (_debug) System.out.println("Inventory now storing: "+chemQ); } else { if (_debug) System.out.println("Adding to storage: "+toStore); _storage.add(toStore); } } /* * Check to see if the chemical is present in the inventory with at least the amount * needed. */ private int checkInventory (ChemicalQuantity needed) { int amountPresent = 0; int index = _storage.indexOf(needed); if (index != -1) return _storage.elementAt(index).getAmount(); return amountPresent; } /* * Consume the chemical needed and the amount from the inventory. * We've already determined presence of the chemical and the amount * but double check! */ private boolean consumeFromInventory (ChemicalQuantity needed) { if (_debug) System.out.println("Consuming "+needed.getAmount()+" of chemical "+needed.getChemical()+" from storage."); boolean quantityPresent = false; int index = _storage.indexOf(needed); if (index != -1) { ChemicalQuantity stored = _storage.elementAt(index); int amountStored = stored.getAmount(); if (amountStored >= needed.getAmount()) { stored.setAmount(amountStored - needed.getAmount()); if (stored.getAmount() == 0) _storage.remove(stored); quantityPresent = true; } else System.out.println("ERROR - Chemical suddenly no longer present at the required amount in the inventory!"); } else System.out.println("ERROR - Chemical suddenly no longer present in the inventory!"); return quantityPresent; } private Reaction findReaction (String name) { Enumeration<Reaction> iter = _reactions.elements(); while (iter.hasMoreElements()) { Reaction r = iter.nextElement(); if (r.chemicalCreated().getChemical().getName().equals(name)) return r; } return null; } private void printStorage () { Enumeration<ChemicalQuantity> iter = _storage.elements(); System.out.println("\nStorage contents."); System.out.println("----------"); while (iter.hasMoreElements()) System.out.println(iter.nextElement()); System.out.println("----------"); } private boolean _debug; private Vector<Reaction> _reactions; private Vector<ChemicalQuantity> _storage; private long _amountOfOre; }
package de.dynamobeuth.multiscreen; import de.dynamobeuth.multiscreen.animation.ScreenTransition; import javafx.beans.property.ReadOnlyBooleanProperty; import javafx.beans.property.SimpleBooleanProperty; import javafx.fxml.FXMLLoader; import javafx.scene.Parent; import javafx.scene.effect.GaussianBlur; import javafx.scene.layout.Pane; import javax.naming.InvalidNameException; import java.io.File; import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.jar.JarEntry; import java.util.jar.JarFile; public class ScreenManager extends Pane { private MultiScreenApplication application; private String skin = "default"; private LinkedHashMap<String, Parent> screens = new LinkedHashMap<>(); private LinkedHashMap<String, ScreenController> controllers = new LinkedHashMap<>(); private String previousScreenName = null; private String currentScreenName = null; private SimpleBooleanProperty closeRequestActive = new SimpleBooleanProperty(false); private Set<String> screensShown = new HashSet<>(); private Pane loadingIndicatorOverlay; private Pane shadeScreenOverlay; public ScreenManager(MultiScreenApplication application) { super(); this.application = application; } public void initScreens() { for (Map.Entry<String, URL> view : getViewsByConvention().entrySet()) { try { addScreen(view.getKey(), view.getValue()); } catch (IOException e) { e.printStackTrace(); } } } public void initStylesheets() { initStylesheets(getSkin()); } public void initStylesheets(String skin) { for (String screenName : screens.keySet()) { URL screenCss = application.getClass().getResource("skin/" + skin + "/css/" + screenName + ".css"); if (screenCss != null) { addStylesheetToScreen(screenName, screenCss.toString()); } } } public void addScreen(String name, URL fxmlLocation) throws IOException { FXMLLoader loader = new FXMLLoader(fxmlLocation); // load the FXML Parent screen = loader.load(); // add screen to list of screens screens.put(name, screen); // load the Controller ScreenController controller = loader.getController(); // inject application controller.setApplication(application); // inject the current screenManager instance controller.setScreenManager(this); // call prepare() method controller.prepare(); // add controller to list of controllers controllers.put(name, controller); } public boolean addStylesheetToScreen(String name, String path) { try { Parent screen = getScreenByName(name); screen.getStylesheets().add(path); } catch (InvalidNameException e) { e.printStackTrace(); return false; } System.out.println("Added stylesheet for screen '" + name + "': " + path); return true; } public boolean removeScreen(String name) { return screens.remove(name) == null; } public Parent getScreenByName(String name) throws InvalidNameException { Parent screen = screens.get(name); if (screen == null) { throw new InvalidNameException("There is no Screen with the name '" + name + "'."); } return screen; } public boolean showScreen(String name) { try { Parent screen = getScreenByName(name); previousScreenName = currentScreenName; currentScreenName = name; resetScreen(screen); if (!getChildren().isEmpty()) { getChildren().remove(0); } ScreenController screenController = getControllerByName(name); handleControllerBeforeShowMethods(name, screenController); getChildren().add(screen); handleControllerShowMethods(name, screenController); } catch (InvalidNameException e) { e.printStackTrace(); return false; } return true; } public boolean showScreen(String name, ScreenTransition animation) { try { Parent screen = getScreenByName(name); previousScreenName = currentScreenName; currentScreenName = name; resetScreen(screen); ScreenController screenController = getControllerByName(name); handleControllerBeforeShowMethods(name, screenController); if (!getChildren().isEmpty()) { animation.animate(this, getChildren().get(0), screen, e -> handleControllerShowMethods(name, screenController)); } else { animation.animate(this, null, screen, e -> handleControllerShowMethods(name, screenController)); } } catch (InvalidNameException e) { e.printStackTrace(); return false; } return true; } private void handleControllerBeforeShowMethods(String name, ScreenController screenController) { if (!screensShown.contains(name)) { screenController.onBeforeFirstShow(); } screenController.onBeforeShow(); } private void handleControllerShowMethods(String name, ScreenController screenController) { if (!screensShown.contains(name)) { screenController.onFirstShow(); screensShown.add(name); } if (currentScreenNameMatches(name)) { screenController.onShow(); } } public LinkedHashMap<String, Parent> getScreens() { return screens; } public LinkedHashMap<String, ScreenController> getControllers() { return controllers; } public ScreenController getControllerByName(String name) throws InvalidNameException { ScreenController controller = controllers.get(name); if (controller == null) { throw new InvalidNameException("There is no Controller for the Screen with the name '" + name + "'."); } return controller; } public void showLoadingIndicatorOverlay() { if (loadingIndicatorOverlay == null) { initLoadingIndicatorOverlay(); } this.getChildren().add(loadingIndicatorOverlay); } public void hideLoadingIndicatorOverlay() { if (loadingIndicatorOverlay != null) { this.getChildren().remove(loadingIndicatorOverlay); } } protected void initLoadingIndicatorOverlay() { loadingIndicatorOverlay = new Pane(); loadingIndicatorOverlay.getStyleClass().add("loading-indicator-overlay"); loadingIndicatorOverlay.getStylesheets().add(getClass().getResource("css/styles.css").toExternalForm()); loadingIndicatorOverlay.minWidthProperty().bind(this.widthProperty()); loadingIndicatorOverlay.minHeightProperty().bind(this.heightProperty()); } public void shadeScreen() { if (shadeScreenOverlay == null) { shadeScreenOverlay = new Pane(); shadeScreenOverlay.setOpacity(0.2); shadeScreenOverlay.setStyle("-fx-background-color: black"); } setEffect(new GaussianBlur(5.0D)); getChildren().add(shadeScreenOverlay); } public void unshadeScreen() { setEffect(null); getChildren().remove(shadeScreenOverlay); } public String getSkin() { return skin; } public void setSkin(String skin) { this.skin = skin; } public String getPreviousScreenName() { return previousScreenName; } public boolean previousScreenNameMatches(String name) { return previousScreenName != null && previousScreenName.equals(name); } public String getCurrentScreenName() { return currentScreenName; } public boolean currentScreenNameMatches(String name) { return currentScreenName != null && currentScreenName.equals(name); } private void resetScreen(Parent screen) { screen.setOpacity(1); screen.setScaleX(1); screen.setScaleY(1); screen.setScaleZ(1); screen.setTranslateX(0); screen.setTranslateY(0); screen.setTranslateZ(0); screen.setRotate(0); } /** * Finds all views which are stored with the convention view/*View.fxml and returns them as map. * Inspired by https://stackoverflow.com/a/20073154 * * @return A map with viewName -> viewPath */ private LinkedHashMap<String, URL> getViewsByConvention() { String appendedNamingConvention = "View.fxml"; LinkedHashMap<String, URL> availableViewsList = new LinkedHashMap<>(); final File jarFile = new File(application.getClass().getProtectionDomain().getCodeSource().getLocation().getPath()); if (jarFile.isFile()) { // Run with JAR file final JarFile jar; try { jar = new JarFile(jarFile); final Enumeration<JarEntry> entries = jar.entries(); //gives ALL entries in jar while (entries.hasMoreElements()) { final String name = entries.nextElement().getName(); if (name.contains("/view/")) { //filter according to the path Path path = Paths.get(name); String viewFileName = path.getFileName().toString(); if (!viewFileName.endsWith(appendedNamingConvention)) { continue; } // remove 'View.fxml' from 'ExampleView.fxml' and lowercase it String viewName = viewFileName.substring(0, viewFileName.length() - appendedNamingConvention.length()).toLowerCase(); availableViewsList.put(viewName, application.getClass().getResource("view/" + viewFileName)); } } jar.close(); } catch (IOException e) { e.printStackTrace(); } } else { // Run with IDE final URL url = application.getClass().getResource("view/"); if (url != null) { try { final File apps = new File(url.toURI()); for (File app : apps.listFiles()) { String viewName = app.getName(); if (!viewName.endsWith(appendedNamingConvention)) { continue; } // remove 'View.fxml' from 'ExampleView.fxml' and lowercase it viewName = viewName.substring(0, viewName.length() - appendedNamingConvention.length()).toLowerCase(); availableViewsList.put(viewName, app.toURI().toURL()); } } catch (URISyntaxException e) { // never happens } catch (MalformedURLException e) { e.printStackTrace(); } } } return availableViewsList; } public boolean isCloseRequestActive() { return closeRequestActive.get(); } public ReadOnlyBooleanProperty closeRequestActiveProperty() { return closeRequestActive; } protected void setCloseRequestActive(boolean closeRequestActive) { this.closeRequestActive.set(closeRequestActive); } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.NavigableMap; import junit.framework.AssertionFailedError; import junit.framework.TestCase; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSTableDescriptors; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hdfs.MiniDFSCluster; /** * Abstract HBase test class. Initializes a few things that can come in handly * like an HBaseConfiguration and filesystem. * @deprecated Write junit4 unit tests using {@link HBaseTestingUtility} */ @Deprecated public abstract class HBaseTestCase extends TestCase { private static final Log LOG = LogFactory.getLog(HBaseTestCase.class); protected final static byte [] fam1 = Bytes.toBytes("colfamily11"); protected final static byte [] fam2 = Bytes.toBytes("colfamily21"); protected final static byte [] fam3 = Bytes.toBytes("colfamily31"); protected static final byte [][] COLUMNS = {fam1, fam2, fam3}; private boolean localfs = false; protected static Path testDir = null; protected FileSystem fs = null; protected HRegion meta = null; protected static final char FIRST_CHAR = 'a'; protected static final char LAST_CHAR = 'z'; protected static final String PUNCTUATION = "~`@#$%^&*()-_+=:;',.<>/?[]{}|"; protected static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR}; protected String START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET); protected static final int MAXVERSIONS = 3; protected final HBaseTestingUtility testUtil = new HBaseTestingUtility(); public volatile Configuration conf = HBaseConfiguration.create(); public final FSTableDescriptors fsTableDescriptors; { try { fsTableDescriptors = new FSTableDescriptors(conf); } catch (IOException e) { throw new RuntimeException("Failed to init descriptors", e); } } /** constructor */ public HBaseTestCase() { super(); } /** * @param name */ public HBaseTestCase(String name) { super(name); } /** * Note that this method must be called after the mini hdfs cluster has * started or we end up with a local file system. */ @Override protected void setUp() throws Exception { super.setUp(); localfs = (conf.get("fs.defaultFS", "file:///").compareTo("file:///") == 0); if (fs == null) { this.fs = FileSystem.get(conf); } try { if (localfs) { testDir = getUnitTestdir(getName()); if (fs.exists(testDir)) { fs.delete(testDir, true); } } else { testDir = FSUtils.getRootDir(conf); } } catch (Exception e) { LOG.fatal("error during setup", e); throw e; } } @Override protected void tearDown() throws Exception { try { if (localfs) { if (this.fs.exists(testDir)) { this.fs.delete(testDir, true); } } } catch (Exception e) { LOG.fatal("error during tear down", e); } super.tearDown(); } /** * @see HBaseTestingUtility#getBaseTestDir * @param testName * @return directory to use for this test */ protected Path getUnitTestdir(String testName) { return testUtil.getDataTestDir(testName); } /** * You must call close on the returned region and then close on the log file it created. Do * {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to close both the region and the WAL. * @param desc * @param startKey * @param endKey * @return An {@link HRegion} * @throws IOException */ public HRegion createNewHRegion(HTableDescriptor desc, byte [] startKey, byte [] endKey) throws IOException { return createNewHRegion(desc, startKey, endKey, this.conf); } public HRegion createNewHRegion(HTableDescriptor desc, byte [] startKey, byte [] endKey, Configuration conf) throws IOException { HRegionInfo hri = new HRegionInfo(desc.getTableName(), startKey, endKey); return HBaseTestingUtility.createRegionAndWAL(hri, testDir, conf, desc); } protected HRegion openClosedRegion(final HRegion closedRegion) throws IOException { return HRegion.openHRegion(closedRegion, null); } /** * Create a table of name <code>name</code> with {@link COLUMNS} for * families. * @param name Name to give table. * @return Column descriptor. */ protected HTableDescriptor createTableDescriptor(final String name) { return createTableDescriptor(name, MAXVERSIONS); } /** * Create a table of name <code>name</code> with {@link COLUMNS} for * families. * @param name Name to give table. * @param versions How many versions to allow per column. * @return Column descriptor. */ protected HTableDescriptor createTableDescriptor(final String name, final int versions) { return createTableDescriptor(name, HColumnDescriptor.DEFAULT_MIN_VERSIONS, versions, HConstants.FOREVER, HColumnDescriptor.DEFAULT_KEEP_DELETED); } /** * Create a table of name <code>name</code> with {@link COLUMNS} for * families. * @param name Name to give table. * @param versions How many versions to allow per column. * @return Column descriptor. */ protected HTableDescriptor createTableDescriptor(final String name, final int minVersions, final int versions, final int ttl, KeepDeletedCells keepDeleted) { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name)); for (byte[] cfName : new byte[][]{ fam1, fam2, fam3 }) { htd.addFamily(new HColumnDescriptor(cfName) .setMinVersions(minVersions) .setMaxVersions(versions) .setKeepDeletedCells(keepDeleted) .setBlockCacheEnabled(false) .setTimeToLive(ttl) ); } return htd; } /** * Add content to region <code>r</code> on the passed column * <code>column</code>. * Adds data of the from 'aaa', 'aab', etc where key and value are the same. * @param r * @param columnFamily * @param column * @throws IOException * @return count of what we added. */ public static long addContent(final Region r, final byte [] columnFamily, final byte[] column) throws IOException { byte [] startKey = r.getRegionInfo().getStartKey(); byte [] endKey = r.getRegionInfo().getEndKey(); byte [] startKeyBytes = startKey; if (startKeyBytes == null || startKeyBytes.length == 0) { startKeyBytes = START_KEY_BYTES; } return addContent(new HRegionIncommon(r), Bytes.toString(columnFamily), Bytes.toString(column), startKeyBytes, endKey, -1); } public static long addContent(final Region r, final byte [] columnFamily) throws IOException { return addContent(r, columnFamily, null); } /** * Add content to region <code>r</code> on the passed column * <code>column</code>. * Adds data of the from 'aaa', 'aab', etc where key and value are the same. * @param updater An instance of {@link Incommon}. * @param columnFamily * @param writeToWAL * @throws IOException * @return count of what we added. */ public static long addContent(final Incommon updater, final String columnFamily) throws IOException { return addContent(updater, columnFamily, START_KEY_BYTES, null); } public static long addContent(final Incommon updater, final String family, final String column) throws IOException { return addContent(updater, family, column, START_KEY_BYTES, null); } /** * Add content to region <code>r</code> on the passed column * <code>column</code>. * Adds data of the from 'aaa', 'aab', etc where key and value are the same. * @param updater An instance of {@link Incommon}. * @param columnFamily * @param startKeyBytes Where to start the rows inserted * @param endKey Where to stop inserting rows. * @param writeToWAL * @return count of what we added. * @throws IOException */ public static long addContent(final Incommon updater, final String columnFamily, final byte [] startKeyBytes, final byte [] endKey) throws IOException { return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1); } public static long addContent(final Incommon updater, final String family, String column, final byte [] startKeyBytes, final byte [] endKey) throws IOException { return addContent(updater, family, column, startKeyBytes, endKey, -1); } /** * Add content to region <code>r</code> on the passed column * <code>column</code>. * Adds data of the from 'aaa', 'aab', etc where key and value are the same. * @param updater An instance of {@link Incommon}. * @param column * @param startKeyBytes Where to start the rows inserted * @param endKey Where to stop inserting rows. * @param ts Timestamp to write the content with. * @param writeToWAL * @return count of what we added. * @throws IOException */ public static long addContent(final Incommon updater, final String columnFamily, final String column, final byte [] startKeyBytes, final byte [] endKey, final long ts) throws IOException { long count = 0; // Add rows of three characters. The first character starts with the // 'a' character and runs up to 'z'. Per first character, we run the // second character over same range. And same for the third so rows // (and values) look like this: 'aaa', 'aab', 'aac', etc. char secondCharStart = (char)startKeyBytes[1]; char thirdCharStart = (char)startKeyBytes[2]; EXIT: for (char c = (char)startKeyBytes[0]; c <= LAST_CHAR; c++) { for (char d = secondCharStart; d <= LAST_CHAR; d++) { for (char e = thirdCharStart; e <= LAST_CHAR; e++) { byte [] t = new byte [] {(byte)c, (byte)d, (byte)e}; if (endKey != null && endKey.length > 0 && Bytes.compareTo(endKey, t) <= 0) { break EXIT; } try { Put put; if(ts != -1) { put = new Put(t, ts); } else { put = new Put(t); } try { StringBuilder sb = new StringBuilder(); if (column != null && column.contains(":")) { sb.append(column); } else { if (columnFamily != null) { sb.append(columnFamily); if (!columnFamily.endsWith(":")) { sb.append(":"); } if (column != null) { sb.append(column); } } } byte[][] split = KeyValue.parseColumn(Bytes.toBytes(sb.toString())); if(split.length == 1) { put.add(split[0], new byte[0], t); } else { put.add(split[0], split[1], t); } put.setDurability(Durability.SKIP_WAL); updater.put(put); count++; } catch (RuntimeException ex) { ex.printStackTrace(); throw ex; } catch (IOException ex) { ex.printStackTrace(); throw ex; } } catch (RuntimeException ex) { ex.printStackTrace(); throw ex; } catch (IOException ex) { ex.printStackTrace(); throw ex; } } // Set start character back to FIRST_CHAR after we've done first loop. thirdCharStart = FIRST_CHAR; } secondCharStart = FIRST_CHAR; } return count; } /** * Implementors can flushcache. */ public interface FlushCache { /** * @throws IOException */ void flushcache() throws IOException; } /** * Interface used by tests so can do common operations against an HTable * or an HRegion. * * TOOD: Come up w/ a better name for this interface. */ public interface Incommon { /** * * @param delete * @param writeToWAL * @throws IOException */ void delete(Delete delete, boolean writeToWAL) throws IOException; /** * @param put * @throws IOException */ void put(Put put) throws IOException; Result get(Get get) throws IOException; /** * @param family * @param qualifiers * @param firstRow * @param ts * @return scanner for specified columns, first row and timestamp * @throws IOException */ ScannerIncommon getScanner( byte[] family, byte[][] qualifiers, byte[] firstRow, long ts ) throws IOException; } /** * A class that makes a {@link Incommon} out of a {@link HRegion} */ public static class HRegionIncommon implements Incommon, FlushCache { final HRegion region; /** * @param HRegion */ public HRegionIncommon(final HRegion HRegion) { this.region = HRegion; } public HRegionIncommon(final Region region) { this.region = (HRegion)region; } public void put(Put put) throws IOException { region.put(put); } public void delete(Delete delete, boolean writeToWAL) throws IOException { this.region.delete(delete); } public Result get(Get get) throws IOException { return region.get(get); } public ScannerIncommon getScanner(byte [] family, byte [][] qualifiers, byte [] firstRow, long ts) throws IOException { Scan scan = new Scan(firstRow); if(qualifiers == null || qualifiers.length == 0) { scan.addFamily(family); } else { for(int i=0; i<qualifiers.length; i++){ scan.addColumn(HConstants.CATALOG_FAMILY, qualifiers[i]); } } scan.setTimeRange(0, ts); return new InternalScannerIncommon(region.getScanner(scan)); } public void flushcache() throws IOException { this.region.flush(true); } } /** * A class that makes a {@link Incommon} out of a {@link Table} */ public static class HTableIncommon implements Incommon { final Table table; /** * @param table */ public HTableIncommon(final Table table) { super(); this.table = table; } public void put(Put put) throws IOException { table.put(put); } public void delete(Delete delete, boolean writeToWAL) throws IOException { this.table.delete(delete); } public Result get(Get get) throws IOException { return table.get(get); } public ScannerIncommon getScanner(byte [] family, byte [][] qualifiers, byte [] firstRow, long ts) throws IOException { Scan scan = new Scan(firstRow); if(qualifiers == null || qualifiers.length == 0) { scan.addFamily(family); } else { for(int i=0; i<qualifiers.length; i++){ scan.addColumn(HConstants.CATALOG_FAMILY, qualifiers[i]); } } scan.setTimeRange(0, ts); return new ClientScannerIncommon(table.getScanner(scan)); } } public interface ScannerIncommon extends Iterable<Result> { boolean next(List<Cell> values) throws IOException; void close() throws IOException; } public static class ClientScannerIncommon implements ScannerIncommon { ResultScanner scanner; public ClientScannerIncommon(ResultScanner scanner) { this.scanner = scanner; } @Override public boolean next(List<Cell> values) throws IOException { Result results = scanner.next(); if (results == null) { return false; } values.clear(); values.addAll(results.listCells()); return true; } public void close() throws IOException { scanner.close(); } public Iterator<Result> iterator() { return scanner.iterator(); } } public static class InternalScannerIncommon implements ScannerIncommon { InternalScanner scanner; public InternalScannerIncommon(InternalScanner scanner) { this.scanner = scanner; } @Override public boolean next(List<Cell> results) throws IOException { return scanner.next(results); } @Override public void close() throws IOException { scanner.close(); } @Override public Iterator<Result> iterator() { throw new UnsupportedOperationException(); } } protected void assertResultEquals(final HRegion region, final byte [] row, final byte [] family, final byte [] qualifier, final long timestamp, final byte [] value) throws IOException { Get get = new Get(row); get.setTimeStamp(timestamp); Result res = region.get(get); NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map = res.getMap(); byte [] res_value = map.get(family).get(qualifier).get(timestamp); if (value == null) { assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp, null, res_value); } else { if (res_value == null) { fail(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp + "\" was expected to be \"" + Bytes.toStringBinary(value) + " but was null"); } if (res_value != null) { assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) + " at timestamp " + timestamp, value, new String(res_value)); } } } /** * Common method to close down a MiniDFSCluster and the associated file system * * @param cluster */ public static void shutdownDfs(MiniDFSCluster cluster) { if (cluster != null) { LOG.info("Shutting down Mini DFS "); try { cluster.shutdown(); } catch (Exception e) { /// Can get a java.lang.reflect.UndeclaredThrowableException thrown // here because of an InterruptedException. Don't let exceptions in // here be cause of test failure. } try { FileSystem fs = cluster.getFileSystem(); if (fs != null) { LOG.info("Shutting down FileSystem"); fs.close(); } FileSystem.closeAll(); } catch (IOException e) { LOG.error("error closing file system", e); } } } /** * You must call {@link #closeRootAndMeta()} when done after calling this * method. It does cleanup. * @throws IOException */ protected void createMetaRegion() throws IOException { FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(conf); meta = HBaseTestingUtility.createRegionAndWAL(HRegionInfo.FIRST_META_REGIONINFO, testDir, conf, fsTableDescriptors.get(TableName.META_TABLE_NAME)); } protected void closeRootAndMeta() throws IOException { HBaseTestingUtility.closeRegionAndWAL(meta); } public static void assertByteEquals(byte[] expected, byte[] actual) { if (Bytes.compareTo(expected, actual) != 0) { throw new AssertionFailedError("expected:<" + Bytes.toString(expected) + "> but was:<" + Bytes.toString(actual) + ">"); } } public static void assertEquals(byte[] expected, byte[] actual) { if (Bytes.compareTo(expected, actual) != 0) { throw new AssertionFailedError("expected:<" + Bytes.toStringBinary(expected) + "> but was:<" + Bytes.toStringBinary(actual) + ">"); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.tez.test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.nio.ByteBuffer; import java.util.BitSet; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.locks.Condition; import java.util.concurrent.locks.ReentrantLock; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathFilter; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.client.api.YarnClient; import org.apache.tez.client.TezClient; import org.apache.tez.dag.api.DAG; import org.apache.tez.dag.api.DataSourceDescriptor; import org.apache.tez.dag.api.InputDescriptor; import org.apache.tez.dag.api.InputInitializerDescriptor; import org.apache.tez.dag.api.ProcessorDescriptor; import org.apache.tez.dag.api.TezConfiguration; import org.apache.tez.dag.api.TezException; import org.apache.tez.dag.api.Vertex; import org.apache.tez.dag.api.client.DAGClient; import org.apache.tez.dag.api.client.DAGStatus; import org.apache.tez.dag.api.event.VertexState; import org.apache.tez.dag.api.event.VertexStateUpdate; import org.apache.tez.examples.OrderedWordCount; import org.apache.tez.examples.SimpleSessionExample; import org.apache.tez.examples.JoinDataGen; import org.apache.tez.examples.HashJoinExample; import org.apache.tez.examples.JoinValidate; import org.apache.tez.examples.SortMergeJoinExample; import org.apache.tez.runtime.api.Event; import org.apache.tez.runtime.api.InputInitializer; import org.apache.tez.runtime.api.InputInitializerContext; import org.apache.tez.runtime.api.ProcessorContext; import org.apache.tez.runtime.api.events.InputInitializerEvent; import org.apache.tez.runtime.library.processor.SimpleProcessor; import org.apache.tez.runtime.library.processor.SleepProcessor; import org.apache.tez.test.dag.MultiAttemptDAG; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; /** * Tests for Tez example jobs * */ public class TestTezJobs { private static final Log LOG = LogFactory.getLog(TestTezJobs.class); protected static MiniTezCluster mrrTezCluster; protected static MiniDFSCluster dfsCluster; private static Configuration conf = new Configuration(); private static FileSystem remoteFs; private static String TEST_ROOT_DIR = "target" + Path.SEPARATOR + TestTezJobs.class.getName() + "-tmpDir"; @BeforeClass public static void setup() throws IOException { try { conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, TEST_ROOT_DIR); dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).format(true).racks(null) .build(); remoteFs = dfsCluster.getFileSystem(); } catch (IOException io) { throw new RuntimeException("problem starting mini dfs cluster", io); } if (mrrTezCluster == null) { mrrTezCluster = new MiniTezCluster(TestTezJobs.class.getName(), 1, 1, 1); Configuration conf = new Configuration(); conf.set("fs.defaultFS", remoteFs.getUri().toString()); // use HDFS mrrTezCluster.init(conf); mrrTezCluster.start(); } } @AfterClass public static void tearDown() { if (mrrTezCluster != null) { mrrTezCluster.stop(); mrrTezCluster = null; } if (dfsCluster != null) { dfsCluster.shutdown(); dfsCluster = null; } // TODO Add cleanup code. } @Test(timeout = 60000) public void testHashJoinExample() throws Exception { HashJoinExample hashJoinExample = new HashJoinExample(); hashJoinExample.setConf(new Configuration(mrrTezCluster.getConfig())); Path stagingDirPath = new Path("/tmp/tez-staging-dir"); Path inPath1 = new Path("/tmp/hashJoin/inPath1"); Path inPath2 = new Path("/tmp/hashJoin/inPath2"); Path outPath = new Path("/tmp/hashJoin/outPath"); remoteFs.mkdirs(inPath1); remoteFs.mkdirs(inPath2); remoteFs.mkdirs(stagingDirPath); Set<String> expectedResult = new HashSet<String>(); FSDataOutputStream out1 = remoteFs.create(new Path(inPath1, "file")); FSDataOutputStream out2 = remoteFs.create(new Path(inPath2, "file")); BufferedWriter writer1 = new BufferedWriter(new OutputStreamWriter(out1)); BufferedWriter writer2 = new BufferedWriter(new OutputStreamWriter(out2)); for (int i = 0; i < 20; i++) { String term = "term" + i; writer1.write(term); writer1.newLine(); if (i % 2 == 0) { writer2.write(term); writer2.newLine(); expectedResult.add(term); } } writer1.close(); writer2.close(); out1.close(); out2.close(); String[] args = new String[] { "-D" + TezConfiguration.TEZ_AM_STAGING_DIR + "=" + stagingDirPath.toString(), inPath1.toString(), inPath2.toString(), "1", outPath.toString() }; assertEquals(0, hashJoinExample.run(args)); FileStatus[] statuses = remoteFs.listStatus(outPath, new PathFilter() { public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("_") && !name.startsWith("."); } }); assertEquals(1, statuses.length); FSDataInputStream inStream = remoteFs.open(statuses[0].getPath()); BufferedReader reader = new BufferedReader(new InputStreamReader(inStream)); String line; while ((line = reader.readLine()) != null) { assertTrue(expectedResult.remove(line)); } reader.close(); inStream.close(); assertEquals(0, expectedResult.size()); } @Test(timeout = 60000) public void testSortMergeJoinExample() throws Exception { SortMergeJoinExample sortMergeJoinExample = new SortMergeJoinExample(); sortMergeJoinExample.setConf(new Configuration(mrrTezCluster.getConfig())); Path stagingDirPath = new Path("/tmp/tez-staging-dir"); Path inPath1 = new Path("/tmp/sortMerge/inPath1"); Path inPath2 = new Path("/tmp/sortMerge/inPath2"); Path outPath = new Path("/tmp/sortMerge/outPath"); remoteFs.mkdirs(inPath1); remoteFs.mkdirs(inPath2); remoteFs.mkdirs(stagingDirPath); Set<String> expectedResult = new HashSet<String>(); FSDataOutputStream out1 = remoteFs.create(new Path(inPath1, "file")); FSDataOutputStream out2 = remoteFs.create(new Path(inPath2, "file")); BufferedWriter writer1 = new BufferedWriter(new OutputStreamWriter(out1)); BufferedWriter writer2 = new BufferedWriter(new OutputStreamWriter(out2)); for (int i = 0; i < 20; i++) { String term = "term" + i; writer1.write(term); writer1.newLine(); if (i % 2 == 0) { writer2.write(term); writer2.newLine(); expectedResult.add(term); } } writer1.close(); writer2.close(); out1.close(); out2.close(); String[] args = new String[] { "-D" + TezConfiguration.TEZ_AM_STAGING_DIR + "=" + stagingDirPath.toString(), inPath1.toString(), inPath2.toString(), "1", outPath.toString() }; assertEquals(0, sortMergeJoinExample.run(args)); FileStatus[] statuses = remoteFs.listStatus(outPath, new PathFilter() { public boolean accept(Path p) { String name = p.getName(); return !name.startsWith("_") && !name.startsWith("."); } }); assertEquals(1, statuses.length); FSDataInputStream inStream = remoteFs.open(statuses[0].getPath()); BufferedReader reader = new BufferedReader(new InputStreamReader(inStream)); String line; while ((line = reader.readLine()) != null) { assertTrue(expectedResult.remove(line)); } reader.close(); inStream.close(); assertEquals(0, expectedResult.size()); } /** * test whole {@link HashJoinExample} pipeline as following: <br> * {@link JoinDataGen} -> {@link HashJoinExample} -> {@link JoinValidate} * @throws Exception */ @Test(timeout = 120000) public void testHashJoinExamplePipeline() throws Exception { Path testDir = new Path("/tmp/testHashJoinExample"); Path stagingDirPath = new Path("/tmp/tez-staging-dir"); remoteFs.mkdirs(stagingDirPath); remoteFs.mkdirs(testDir); Path dataPath1 = new Path(testDir, "inPath1"); Path dataPath2 = new Path(testDir, "inPath2"); Path expectedOutputPath = new Path(testDir, "expectedOutputPath"); Path outPath = new Path(testDir, "outPath"); TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, stagingDirPath.toString()); TezClient tezSession = null; try { tezSession = TezClient.create("HashJoinExampleSession", tezConf, true); tezSession.start(); JoinDataGen dataGen = new JoinDataGen(); String[] dataGenArgs = new String[] { dataPath1.toString(), "1048576", dataPath2.toString(), "524288", expectedOutputPath.toString(), "2" }; assertEquals(0, dataGen.run(tezConf, dataGenArgs, tezSession)); HashJoinExample joinExample = new HashJoinExample(); String[] args = new String[] { dataPath1.toString(), dataPath2.toString(), "2", outPath.toString() }; assertEquals(0, joinExample.run(tezConf, args, tezSession)); JoinValidate joinValidate = new JoinValidate(); String[] validateArgs = new String[] { expectedOutputPath.toString(), outPath.toString(), "3" }; assertEquals(0, joinValidate.run(tezConf, validateArgs, tezSession)); } finally { if (tezSession != null) { tezSession.stop(); } } } /** * test whole {@link SortMergeJoinExample} pipeline as following: <br> * {@link JoinDataGen} -> {@link SortMergeJoinExample} -> {@link JoinValidate} * @throws Exception */ @Test(timeout = 120000) public void testSortMergeJoinExamplePipeline() throws Exception { Path testDir = new Path("/tmp/testSortMergeExample"); Path stagingDirPath = new Path("/tmp/tez-staging-dir"); remoteFs.mkdirs(stagingDirPath); remoteFs.mkdirs(testDir); Path dataPath1 = new Path(testDir, "inPath1"); Path dataPath2 = new Path(testDir, "inPath2"); Path expectedOutputPath = new Path(testDir, "expectedOutputPath"); Path outPath = new Path(testDir, "outPath"); TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, stagingDirPath.toString()); TezClient tezSession = null; try { tezSession = TezClient.create("SortMergeExampleSession", tezConf, true); tezSession.start(); JoinDataGen dataGen = new JoinDataGen(); String[] dataGenArgs = new String[] { dataPath1.toString(), "1048576", dataPath2.toString(), "524288", expectedOutputPath.toString(), "2" }; assertEquals(0, dataGen.run(tezConf, dataGenArgs, tezSession)); SortMergeJoinExample joinExample = new SortMergeJoinExample(); String[] args = new String[] { dataPath1.toString(), dataPath2.toString(), "2", outPath.toString() }; assertEquals(0, joinExample.run(tezConf, args, tezSession)); JoinValidate joinValidate = new JoinValidate(); String[] validateArgs = new String[] { expectedOutputPath.toString(), outPath.toString(), "3" }; assertEquals(0, joinValidate.run(tezConf, validateArgs, tezSession)); } finally { if (tezSession != null) { tezSession.stop(); } } } private void generateOrderedWordCountInput(Path inputDir) throws IOException { Path dataPath1 = new Path(inputDir, "inPath1"); Path dataPath2 = new Path(inputDir, "inPath2"); FSDataOutputStream f1 = null; FSDataOutputStream f2 = null; try { f1 = remoteFs.create(dataPath1); f2 = remoteFs.create(dataPath2); final String prefix = "a"; for (int i = 1; i <= 10; ++i) { final String word = prefix + "_" + i; for (int j = 10; j >= i; --j) { LOG.info("Writing " + word + " to input files"); f1.write(word.getBytes()); f1.writeChars("\t"); f2.write(word.getBytes()); f2.writeChars("\t"); } } f1.hsync(); f2.hsync(); } finally { if (f1 != null) { f1.close(); } if (f2 != null) { f2.close(); } } } private void verifyOrderedWordCountOutput(Path resultFile) throws IOException { FSDataInputStream inputStream = remoteFs.open(resultFile); final String prefix = "a"; int currentCounter = 10; byte[] buffer = new byte[4096]; int bytesRead = inputStream.read(buffer, 0, 4096); BufferedReader reader = new BufferedReader(new InputStreamReader(new ByteArrayInputStream(buffer, 0, bytesRead))); String line; while ((line = reader.readLine()) != null) { LOG.info("Line: " + line + ", counter=" + currentCounter); int pos = line.indexOf("\t"); String word = line.substring(0, pos-1); Assert.assertEquals(prefix + "_" + currentCounter, word); String val = line.substring(pos+1, line.length()); Assert.assertEquals((long)(11 - currentCounter) * 2, (long)Long.valueOf(val)); currentCounter--; } Assert.assertEquals(0, currentCounter); } private void verifyOutput(Path outputDir) throws IOException { FileStatus[] fileStatuses = remoteFs.listStatus(outputDir); Path resultFile = null; boolean foundResult = false; boolean foundSuccessFile = false; for (FileStatus fileStatus : fileStatuses) { if (!fileStatus.isFile()) { continue; } if (fileStatus.getPath().getName().equals("_SUCCESS")) { foundSuccessFile = true; continue; } if (fileStatus.getPath().getName().startsWith("part-")) { if (foundResult) { fail("Found 2 part files instead of 1" + ", paths=" + resultFile + "," + fileStatus.getPath()); } foundResult = true; resultFile = fileStatus.getPath(); LOG.info("Found output at " + resultFile); } } assertTrue(foundResult); assertTrue(resultFile != null); assertTrue(foundSuccessFile); verifyOrderedWordCountOutput(resultFile); } @Test(timeout = 60000) public void testOrderedWordCount() throws Exception { String inputDirStr = "/tmp/owc-input/"; Path inputDir = new Path(inputDirStr); Path stagingDirPath = new Path("/tmp/owc-staging-dir"); remoteFs.mkdirs(inputDir); remoteFs.mkdirs(stagingDirPath); generateOrderedWordCountInput(inputDir); String outputDirStr = "/tmp/owc-output/"; Path outputDir = new Path(outputDirStr); TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, stagingDirPath.toString()); TezClient tezSession = null; try { OrderedWordCount job = new OrderedWordCount(); Assert.assertTrue("OrderedWordCount failed", job.run(inputDirStr, outputDirStr, tezConf, 2)); verifyOutput(outputDir); } finally { remoteFs.delete(stagingDirPath, true); if (tezSession != null) { tezSession.stop(); } } } @Test(timeout = 60000) public void testSimpleSessionExample() throws Exception { Path stagingDirPath = new Path("/tmp/owc-staging-dir"); remoteFs.mkdirs(stagingDirPath); int numIterations = 2; String[] inputPaths = new String[numIterations]; String[] outputPaths = new String[numIterations]; Path[] outputDirs = new Path[numIterations]; for (int i=0; i<numIterations; ++i) { String inputDirStr = "/tmp/owc-input-" + i + "/"; inputPaths[i] = inputDirStr; Path inputDir = new Path(inputDirStr); remoteFs.mkdirs(inputDir); generateOrderedWordCountInput(inputDir); String outputDirStr = "/tmp/owc-output-" + i + "/"; outputPaths[i] = outputDirStr; Path outputDir = new Path(outputDirStr); outputDirs[i] = outputDir; } TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); tezConf.set(TezConfiguration.TEZ_AM_STAGING_DIR, stagingDirPath.toString()); YarnClient yarnClient = YarnClient.createYarnClient(); try { yarnClient.init(mrrTezCluster.getConfig()); yarnClient.start(); List<ApplicationReport> apps = yarnClient.getApplications(); int appsBeforeCount = apps != null ? apps.size() : 0; SimpleSessionExample job = new SimpleSessionExample(); Assert.assertTrue("SimpleSessionExample failed", job.run(inputPaths, outputPaths, tezConf, 2)); for (int i=0; i<numIterations; ++i) { verifyOutput(outputDirs[i]); } apps = yarnClient.getApplications(); int appsAfterCount = apps != null ? apps.size() : 0; // Running in session mode. So should only create 1 more app. Assert.assertEquals(appsBeforeCount + 1, appsAfterCount); } finally { remoteFs.delete(stagingDirPath, true); if (yarnClient != null) { yarnClient.stop(); } } } @Test (timeout=60000) public void testVertexOrder() throws Exception { TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); TezClient tezClient = TezClient.create("TestVertexOrder", tezConf); tezClient.start(); try { DAG dag = SimpleTestDAG.createDAGForVertexOrder("dag1", conf); DAGClient dagClient = tezClient.submitDAG(dag); DAGStatus dagStatus = dagClient.getDAGStatus(null); while (!dagStatus.isCompleted()) { LOG.info("Waiting for dag to complete. Sleeping for 500ms." + " DAG name: " + dag.getName() + " DAG context: " + dagClient.getExecutionContext() + " Current state: " + dagStatus.getState()); Thread.sleep(100); dagStatus = dagClient.getDAGStatus(null); } Assert.assertEquals(DAGStatus.State.SUCCEEDED, dagStatus.getState()); // verify vertex order Set<String> resultVertices = dagStatus.getVertexProgress().keySet(); Assert.assertEquals(6, resultVertices.size()); int i = 0; for (String vertexName : resultVertices){ if (i <= 1){ Assert.assertTrue( vertexName.equals("v1") || vertexName.equals("v2")); } else if (i == 2){ Assert.assertTrue( vertexName.equals("v3")); } else if (i <= 4){ Assert.assertTrue( vertexName.equals("v4") || vertexName.equals("v5")); } else { Assert.assertTrue( vertexName.equals("v6")); } i++; } } finally { if (tezClient != null) { tezClient.stop(); } } } @Test(timeout = 60000) public void testInputInitializerEvents() throws TezException, InterruptedException, IOException { TezConfiguration tezConf = new TezConfiguration(mrrTezCluster.getConfig()); TezClient tezClient = TezClient.create("TestInputInitializerEvents", tezConf); tezClient.start(); try { DAG dag = DAG.create("TestInputInitializerEvents"); Vertex vertex1 = Vertex.create(VERTEX_WITH_INITIALIZER_NAME, ProcessorDescriptor.create( SleepProcessor.class.getName()) .setUserPayload(new SleepProcessor.SleepProcessorConfig(1).toUserPayload()), 1) .addDataSource(INPUT1_NAME, DataSourceDescriptor .create(InputDescriptor.create(MultiAttemptDAG.NoOpInput.class.getName()), InputInitializerDescriptor.create(InputInitializerForTest.class.getName()), null)); Vertex vertex2 = Vertex.create(EVENT_GENERATING_VERTEX_NAME, ProcessorDescriptor.create(InputInitializerEventGeneratingProcessor.class.getName()), 5); dag.addVertex(vertex1).addVertex(vertex2); DAGClient dagClient = tezClient.submitDAG(dag); dagClient.waitForCompletion(); Assert.assertEquals(DAGStatus.State.SUCCEEDED, dagClient.getDAGStatus(null).getState()); } finally { tezClient.stop(); } } private static final String VERTEX_WITH_INITIALIZER_NAME = "VertexWithInitializer"; private static final String EVENT_GENERATING_VERTEX_NAME = "EventGeneratingVertex"; private static final String INPUT1_NAME = "Input1"; public static class InputInitializerEventGeneratingProcessor extends SimpleProcessor { public InputInitializerEventGeneratingProcessor( ProcessorContext context) { super(context); } @Override public void run() throws Exception { if (getContext().getTaskIndex() == 1 && getContext().getTaskAttemptNumber() == 0) { throw new IOException("Failing task 2, attempt 0"); } InputInitializerEvent initializerEvent = InputInitializerEvent.create( VERTEX_WITH_INITIALIZER_NAME, INPUT1_NAME, ByteBuffer.allocate(4).putInt(0, getContext().getTaskIndex())); List<Event> events = Lists.newArrayList(); events.add(initializerEvent); getContext().sendEvents(events); } } public static class InputInitializerForTest extends InputInitializer { private final ReentrantLock lock = new ReentrantLock(); private final Condition condition = lock.newCondition(); private final BitSet eventsSeen = new BitSet(); public InputInitializerForTest( InputInitializerContext initializerContext) { super(initializerContext); getContext().registerForVertexStateUpdates(EVENT_GENERATING_VERTEX_NAME, EnumSet.of( VertexState.SUCCEEDED)); } @Override public List<Event> initialize() throws Exception { lock.lock(); try { condition.await(); } finally { lock.unlock(); } return null; } @Override public void handleInputInitializerEvent(List<InputInitializerEvent> events) throws Exception { lock.lock(); try { for (InputInitializerEvent event : events) { Preconditions.checkArgument( event.getSourceVertexName().equals(EVENT_GENERATING_VERTEX_NAME)); int index = event.getUserPayload().getInt(0); Preconditions.checkState(!eventsSeen.get(index)); eventsSeen.set(index); } } finally { lock.unlock(); } } @Override public void onVertexStateUpdated(VertexStateUpdate stateUpdate) { lock.lock(); try { Preconditions.checkArgument(stateUpdate.getVertexState() == VertexState.SUCCEEDED); if (eventsSeen.cardinality() == getContext().getVertexNumTasks(EVENT_GENERATING_VERTEX_NAME)) { condition.signal(); } else { throw new IllegalStateException( "Received VertexState SUCCEEDED before receiving all InputInitializerEvents"); } } finally { lock.unlock(); } } } }
/******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. ******************************************************************************/ package org.apache.olingo.odata2.fit.client; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.olingo.odata2.api.ODataCallback; import org.apache.olingo.odata2.api.ODataService; import org.apache.olingo.odata2.api.edm.Edm; import org.apache.olingo.odata2.api.edm.EdmEntitySetInfo; import org.apache.olingo.odata2.api.edm.provider.EdmProvider; import org.apache.olingo.odata2.api.ep.EntityProvider; import org.apache.olingo.odata2.api.ep.EntityProviderWriteProperties; import org.apache.olingo.odata2.api.ep.callback.TombstoneCallback; import org.apache.olingo.odata2.api.ep.callback.TombstoneCallbackResult; import org.apache.olingo.odata2.api.ep.entry.DeletedEntryMetadata; import org.apache.olingo.odata2.api.ep.feed.ODataDeltaFeed; import org.apache.olingo.odata2.api.ep.feed.ODataFeed; import org.apache.olingo.odata2.api.exception.ODataException; import org.apache.olingo.odata2.api.processor.ODataResponse; import org.apache.olingo.odata2.api.processor.ODataSingleProcessor; import org.apache.olingo.odata2.api.uri.info.GetEntitySetUriInfo; import org.apache.olingo.odata2.core.exception.ODataRuntimeException; import org.apache.olingo.odata2.core.processor.ODataSingleProcessorService; import org.apache.olingo.odata2.fit.client.util.Client; import org.apache.olingo.odata2.ref.edm.ScenarioEdmProvider; import org.apache.olingo.odata2.testutil.fit.AbstractFitTest; import org.apache.olingo.odata2.testutil.server.ServletType; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; @Ignore public class ClientDeltaResponseTest extends AbstractFitTest { public ClientDeltaResponseTest(final ServletType servletType) { super(servletType); } private static final String DELTATOKEN_1234 = "!deltatoken=1234"; private static int roomDataCount = 2; private static int deletedRoomDataCount = 2; private Client client; StubProcessor processor; @Before @Override public void before() { super.before(); try { client = new Client(getEndpoint().toASCIIString()); } catch (Exception e) { throw new RuntimeException(e); } } @Override protected ODataService createService() throws ODataException { EdmProvider provider = new ScenarioEdmProvider(); processor = new StubProcessor(); return new ODataSingleProcessorService(provider, processor); } private class StubProcessor extends ODataSingleProcessor { @Override public ODataResponse readEntitySet(final GetEntitySetUriInfo uriInfo, final String contentType) throws ODataException { try { ArrayList<Map<String, Object>> deletedRoomData = null; ODataResponse response = null; EntityProviderWriteProperties properties = null; URI requestUri = getContext().getPathInfo().getRequestUri(); if (requestUri.getQuery() != null && requestUri.getQuery().contains(DELTATOKEN_1234)) { deletedRoomData = createDeletedRoomData(); } URI deltaLink; deltaLink = new URI(requestUri.getScheme(), requestUri.getUserInfo(), requestUri.getHost(), requestUri.getPort(), requestUri.getPath(), DELTATOKEN_1234, requestUri.getFragment()); TombstoneCallback tombstoneCallback = new TombstoneCallbackImpl(deletedRoomData, deltaLink.toASCIIString()); HashMap<String, ODataCallback> callbacks = new HashMap<String, ODataCallback>(); callbacks.put(TombstoneCallback.CALLBACK_KEY_TOMBSTONE, tombstoneCallback); properties = EntityProviderWriteProperties.serviceRoot(getContext().getPathInfo().getServiceRoot()).callbacks(callbacks) .build(); response = EntityProvider.writeFeed(contentType, uriInfo.getTargetEntitySet(), createRoomData(), properties); return response; } catch (Exception e) { throw new ODataRuntimeException(e); } } private ArrayList<Map<String, Object>> createRoomData() { ArrayList<Map<String, Object>> roomsData = new ArrayList<Map<String, Object>>(); for (int i = 1; i <= roomDataCount; i++) { Map<String, Object> roomData = new HashMap<String, Object>(); roomData.put("Id", String.valueOf(i)); roomData.put("Seats", i); roomData.put("Version", i); roomsData.add(roomData); } return roomsData; } private ArrayList<Map<String, Object>> createDeletedRoomData() { ArrayList<Map<String, Object>> deletedRoomData = new ArrayList<Map<String, Object>>(); for (int i = roomDataCount + 1; i < roomDataCount + 1 + deletedRoomDataCount; i++) { Map<String, Object> roomData = new HashMap<String, Object>(); roomData.put("Id", String.valueOf(i)); roomData.put("Seats", i); roomData.put("Version", i); deletedRoomData.add(roomData); } return deletedRoomData; } } @Test public void dummy() throws Exception {} @Test public void testEdm() throws Exception { Edm edm = client.getEdm(); assertNotNull(edm); assertNotNull(edm.getDefaultEntityContainer()); } @Test public void testEntitySets() throws Exception { List<EdmEntitySetInfo> sets = client.getEntitySets(); assertNotNull(sets); assertEquals(6, sets.size()); } private void testDeltaFeedWithDeltaLink(final String contentType) throws Exception { roomDataCount = 3; deletedRoomDataCount = 4; ODataFeed feed = client.readFeed("Container1", "Rooms", contentType); String deltaLink = feed.getFeedMetadata().getDeltaLink(); assertNotNull(feed); assertEquals(roomDataCount, feed.getEntries().size()); assertEquals(getEndpoint().toASCIIString() + "Rooms?" + DELTATOKEN_1234, feed.getFeedMetadata().getDeltaLink()); ODataDeltaFeed deltaFeed = client.readDeltaFeed("Container1", "Rooms", contentType, deltaLink); assertNotNull(deltaFeed); assertEquals(roomDataCount, deltaFeed.getEntries().size()); assertEquals(deltaLink, deltaFeed.getFeedMetadata().getDeltaLink()); List<DeletedEntryMetadata> deletedEntries = deltaFeed.getDeletedEntries(); assertNotNull(deletedEntries); assertEquals(deletedRoomDataCount, deletedEntries.size()); for (int i = 0; i < deletedRoomDataCount; i++) { assertEquals("http://localhost:19000/abc/ClientDeltaResponseTest/Rooms('" + (roomDataCount + i + 1) + "')", deletedEntries.get(i).getUri()); if ("application/json".equals(contentType)) { assertNull(deletedEntries.get(i).getWhen()); } else { assertNotNull(deletedEntries.get(i).getWhen()); } } } private void testDeltaFeedWithZeroEntries(final String contentType) throws Exception { roomDataCount = 0; deletedRoomDataCount = 0; ODataFeed feed = client.readFeed("Container1", "Rooms", contentType); String deltaLink = feed.getFeedMetadata().getDeltaLink(); assertNotNull(feed); assertEquals(roomDataCount, feed.getEntries().size()); assertEquals(getEndpoint().toASCIIString() + "Rooms?" + DELTATOKEN_1234, feed.getFeedMetadata().getDeltaLink()); ODataDeltaFeed deltaFeed = client.readDeltaFeed("Container1", "Rooms", contentType, deltaLink); assertNotNull(deltaFeed); assertEquals(roomDataCount, deltaFeed.getEntries().size()); assertEquals(deltaLink, deltaFeed.getFeedMetadata().getDeltaLink()); List<DeletedEntryMetadata> deletedEntries = deltaFeed.getDeletedEntries(); assertNotNull(deletedEntries); assertEquals(deletedRoomDataCount, deletedEntries.size()); } @Test public void testDeltaFeedWithDeltaLinkXml() throws Exception { testDeltaFeedWithDeltaLink("application/atom+xml"); } @Test public void testFeedWithDeltaLinkJson() throws Exception { testDeltaFeedWithDeltaLink("application/json"); } @Test public void testDeltaFeedWithZeroEntriesXml() throws Exception { testDeltaFeedWithZeroEntries("application/atom+xml"); } @Test public void testFeedWithZeroEntriesJson() throws Exception { testDeltaFeedWithZeroEntries("application/json"); } static public class TombstoneCallbackImpl implements TombstoneCallback { private ArrayList<Map<String, Object>> deletedEntriesData; private String deltaLink = null; public TombstoneCallbackImpl(final ArrayList<Map<String, Object>> deletedEntriesData, final String deltaLink) { this.deletedEntriesData = deletedEntriesData; this.deltaLink = deltaLink; } @Override public TombstoneCallbackResult getTombstoneCallbackResult() { TombstoneCallbackResult result = new TombstoneCallbackResult(); result.setDeletedEntriesData(deletedEntriesData); result.setDeltaLink(deltaLink); return result; } } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2002-2017 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.trans.steps.multimerge; import java.util.ArrayList; import java.util.List; import java.util.PriorityQueue; import org.pentaho.di.core.RowSet; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleStepException; import org.pentaho.di.core.row.RowDataUtil; import org.pentaho.di.core.row.RowMeta; import org.pentaho.di.core.row.RowMetaInterface; import org.pentaho.di.core.row.ValueMetaInterface; import org.pentaho.di.i18n.BaseMessages; import org.pentaho.di.trans.Trans; import org.pentaho.di.trans.TransHopMeta; import org.pentaho.di.trans.TransMeta; import org.pentaho.di.trans.step.BaseStep; import org.pentaho.di.trans.step.StepDataInterface; import org.pentaho.di.trans.step.StepIOMetaInterface; import org.pentaho.di.trans.step.StepInterface; import org.pentaho.di.trans.step.StepMeta; import org.pentaho.di.trans.step.StepMetaInterface; import org.pentaho.di.trans.step.errorhandling.StreamInterface; /** * Merge rows from 2 sorted streams and output joined rows with matched key fields. Use this instead of hash join is * both your input streams are too big to fit in memory. Note that both the inputs must be sorted on the join key. * * This is a first prototype implementation that only handles two streams and inner join. It also always outputs all * values from both streams. Ideally, we should: 1) Support any number of incoming streams 2) Allow user to choose the * join type (inner, outer) for each stream 3) Allow user to choose which fields to push to next step 4) Have multiple * output ports as follows: a) Containing matched records b) Unmatched records for each input port 5) Support incoming * rows to be sorted either on ascending or descending order. The currently implementation only supports ascending * * @author Biswapesh * @since 24-nov-2006 */ public class MultiMergeJoin extends BaseStep implements StepInterface { private static Class<?> PKG = MultiMergeJoinMeta.class; // for i18n purposes, needed by Translator2!! private MultiMergeJoinMeta meta; private MultiMergeJoinData data; public MultiMergeJoin( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta, Trans trans ) { super( stepMeta, stepDataInterface, copyNr, transMeta, trans ); } private boolean processFirstRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (MultiMergeJoinMeta) smi; data = (MultiMergeJoinData) sdi; TransMeta transMeta = getTransMeta(); TransHopMeta transHopMeta; StepIOMetaInterface stepIOMeta = meta.getStepIOMeta(); List<StreamInterface> infoStreams = stepIOMeta.getInfoStreams(); StreamInterface stream; StepMeta toStepMeta = meta.getParentStepMeta(); StepMeta fromStepMeta; ArrayList<String> inputStepNameList = new ArrayList<String>(); String[] inputStepNames = meta.getInputSteps(); String inputStepName; for ( int i = 0; i < infoStreams.size(); i++ ) { inputStepName = inputStepNames[i]; stream = infoStreams.get( i ); fromStepMeta = stream.getStepMeta(); if ( fromStepMeta == null ) { //should not arrive here, shoud typically have been caught by init. throw new KettleException( BaseMessages.getString( PKG, "MultiMergeJoin.Log.UnableToFindReferenceStream", inputStepName ) ); } //check the hop transHopMeta = transMeta.findTransHop( fromStepMeta, toStepMeta, true ); //there is no hop: this is unexpected. if ( transHopMeta == null ) { //should not arrive here, shoud typically have been caught by init. throw new KettleException( BaseMessages.getString( PKG, "MultiMergeJoin.Log.UnableToFindReferenceStream", inputStepName ) ); } else if ( transHopMeta.isEnabled() ) { inputStepNameList.add( inputStepName ); } else { logDetailed( BaseMessages.getString( PKG, "MultiMergeJoin.Log.IgnoringStep", inputStepName ) ); } } int streamSize = inputStepNameList.size(); if ( streamSize == 0 ) { return false; } String keyField; String[] keyFields; data.rowSets = new RowSet[streamSize]; RowSet rowSet; Object[] row; data.rows = new Object[streamSize][]; data.metas = new RowMetaInterface[streamSize]; data.rowLengths = new int[streamSize]; MultiMergeJoinData.QueueComparator comparator = new MultiMergeJoinData.QueueComparator( data ); data.queue = new PriorityQueue<MultiMergeJoinData.QueueEntry>( streamSize, comparator ); data.results = new ArrayList<List<Object[]>>( streamSize ); MultiMergeJoinData.QueueEntry queueEntry; data.queueEntries = new MultiMergeJoinData.QueueEntry[streamSize]; data.drainIndices = new int[streamSize]; data.keyNrs = new int[streamSize][]; data.dummy = new Object[streamSize][]; RowMetaInterface rowMeta; data.outputRowMeta = new RowMeta(); for ( int i = 0, j = 0; i < inputStepNames.length; i++ ) { inputStepName = inputStepNames[i]; if ( !inputStepNameList.contains( inputStepName ) ) { //ignore step with disabled hop. continue; } queueEntry = new MultiMergeJoinData.QueueEntry(); queueEntry.index = j; data.queueEntries[j] = queueEntry; data.results.add( new ArrayList<Object[]>() ); rowSet = findInputRowSet( inputStepName ); if ( rowSet == null ) { throw new KettleException( BaseMessages.getString( PKG, "MultiMergeJoin.Exception.UnableToFindSpecifiedStep", inputStepName ) ); } data.rowSets[j] = rowSet; row = getRowFrom( rowSet ); data.rows[j] = row; if ( row == null ) { rowMeta = getTransMeta().getStepFields( inputStepName ); data.metas[j] = rowMeta; } else { queueEntry.row = row; rowMeta = rowSet.getRowMeta(); keyField = meta.getKeyFields()[i]; String[] keyFieldParts = keyField.split( "," ); String keyFieldPart; data.keyNrs[j] = new int[keyFieldParts.length]; for ( int k = 0; k < keyFieldParts.length; k++ ) { keyFieldPart = keyFieldParts[k]; data.keyNrs[j][k] = rowMeta.indexOfValue( keyFieldPart ); if ( data.keyNrs[j][k] < 0 ) { String message = BaseMessages.getString( PKG, "MultiMergeJoin.Exception.UnableToFindFieldInReferenceStream", keyFieldPart, inputStepName ); logError( message ); throw new KettleStepException( message ); } } data.metas[j] = rowMeta; data.queue.add( data.queueEntries[j] ); } data.outputRowMeta.mergeRowMeta( rowMeta.clone() ); data.rowLengths[j] = rowMeta.size(); data.dummy[j] = RowDataUtil.allocateRowData( rowMeta.size() ); j++; } return true; } public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException { meta = (MultiMergeJoinMeta) smi; data = (MultiMergeJoinData) sdi; if ( first ) { if ( !processFirstRow( smi, sdi ) ) { setOutputDone(); return false; } first = false; } if ( log.isRowLevel() ) { String metaString = BaseMessages .getString( PKG, "MultiMergeJoin.Log.DataInfo", data.metas[0].getString( data.rows[0] ) + "" ); for ( int i = 1; i < data.metas.length; i++ ) { metaString += data.metas[i].getString( data.rows[i] ); } logRowlevel( metaString ); } /* * We can stop processing if any of the following is true: a) All streams are empty b) Any stream is empty and join * type is INNER */ int streamSize = data.metas.length; if ( data.optional ) { if ( data.queue.isEmpty() ) { setOutputDone(); return false; } MultiMergeJoinData.QueueEntry minEntry = data.queue.poll(); int drainSize = 1; data.rows[minEntry.index] = minEntry.row; data.drainIndices[0] = minEntry.index; MultiMergeJoinData.QueueComparator comparator = (MultiMergeJoinData.QueueComparator) data.queue.comparator(); while ( !data.queue.isEmpty() && comparator.compare( data.queue.peek(), minEntry ) == 0 ) { MultiMergeJoinData.QueueEntry entry = data.queue.poll(); data.rows[entry.index] = entry.row; data.drainIndices[drainSize++] = entry.index; } int index; Object[] row = null; // rows from nonempty input streams match: get all equal rows and create result set for ( int i = 0; i < drainSize; i++ ) { index = data.drainIndices[i]; data.results.get( index ).add( data.rows[index] ); while ( !isStopped() && ( ( row = getRowFrom( data.rowSets[index] ) ) != null && data.metas[index].compare( data.rows[index], row, data.keyNrs[index] ) == 0 ) ) { data.results.get( index ).add( row ); } if ( isStopped() ) { return false; } if ( row != null ) { data.queueEntries[index].row = row; data.queue.add( data.queueEntries[index] ); } } for ( int i = 0; i < streamSize; i++ ) { data.drainIndices[i] = 0; if ( data.results.get( i ).isEmpty() ) { data.results.get( i ).add( data.dummy[i] ); } } int current = 0; while ( true ) { for ( int i = 0; i < streamSize; i++ ) { data.rows[i] = data.results.get( i ).get( data.drainIndices[i] ); } row = RowDataUtil.createResizedCopy( data.rows, data.rowLengths ); putRow( data.outputRowMeta, row ); while ( ++data.drainIndices[current] >= data.results.get( current ).size() ) { data.drainIndices[current] = 0; if ( ++current >= streamSize ) { break; } } if ( current >= streamSize ) { break; } current = 0; } for ( int i = 0; i < streamSize; i++ ) { data.results.get( i ).clear(); } } else { if ( data.queue.size() < streamSize ) { data.queue.clear(); for ( int i = 0; i < streamSize; i++ ) { while ( data.rows[i] != null && !isStopped() ) { data.rows[i] = getRowFrom( data.rowSets[i] ); } } setOutputDone(); return false; } MultiMergeJoinData.QueueEntry minEntry = data.queue.poll(); int drainSize = 1; data.rows[minEntry.index] = minEntry.row; data.drainIndices[0] = minEntry.index; MultiMergeJoinData.QueueComparator comparator = (MultiMergeJoinData.QueueComparator) data.queue.comparator(); while ( !data.queue.isEmpty() && comparator.compare( data.queue.peek(), minEntry ) == 0 ) { MultiMergeJoinData.QueueEntry entry = data.queue.poll(); data.rows[entry.index] = entry.row; data.drainIndices[drainSize++] = entry.index; } Object[] row = null; if ( data.queue.isEmpty() ) { // rows from all input streams match: get all equal rows and create result set for ( int i = 0; i < streamSize; i++ ) { data.results.get( i ).add( data.rows[i] ); while ( !isStopped() && ( ( row = getRowFrom( data.rowSets[i] ) ) != null && data.metas[i].compare( data.rows[i], row, data.keyNrs[i] ) == 0 ) ) { data.results.get( i ).add( row ); } if ( isStopped() ) { return false; } if ( row != null ) { data.queueEntries[i].row = row; data.queue.add( data.queueEntries[i] ); } } for ( int i = 0; i < streamSize; i++ ) { data.drainIndices[i] = 0; } int current = 0; while ( true ) { for ( int i = 0; i < streamSize; i++ ) { data.rows[i] = data.results.get( i ).get( data.drainIndices[i] ); } row = RowDataUtil.createResizedCopy( data.rows, data.rowLengths ); putRow( data.outputRowMeta, row ); while ( ++data.drainIndices[current] >= data.results.get( current ).size() ) { data.drainIndices[current] = 0; if ( ++current >= streamSize ) { break; } } if ( current >= streamSize ) { break; } current = 0; } for ( int i = 0; i < streamSize; i++ ) { data.results.get( i ).clear(); } } else { // mismatch found and no results can be generated for ( int i = 0; i < drainSize; i++ ) { int index = data.drainIndices[i]; while ( ( row = getRowFrom( data.rowSets[index] ) ) != null && data.metas[index].compare( data.rows[index], row, data.keyNrs[index] ) == 0 ) { if ( isStopped() ) { break; } } if ( isStopped() || row == null ) { break; } data.queueEntries[index].row = row; data.queue.add( data.queueEntries[index] ); } if ( isStopped() ) { return false; } } } if ( checkFeedback( getLinesRead() ) ) { logBasic( BaseMessages.getString( PKG, "MultiMergeJoin.LineNumber" ) + getLinesRead() ); } return true; } /** * @see StepInterface#init(org.pentaho.di.trans.step.StepMetaInterface , org.pentaho.di.trans.step.StepDataInterface) */ public boolean init( StepMetaInterface smi, StepDataInterface sdi ) { meta = (MultiMergeJoinMeta) smi; data = (MultiMergeJoinData) sdi; if ( super.init( smi, sdi ) ) { StepIOMetaInterface stepIOMeta = meta.getStepIOMeta(); String[] inputStepNames = meta.getInputSteps(); String inputStepName; List<StreamInterface> infoStreams = stepIOMeta.getInfoStreams(); StreamInterface stream; for ( int i = 0; i < infoStreams.size(); i++ ) { inputStepName = inputStepNames[i]; stream = infoStreams.get( i ); if ( stream.getStepMeta() == null ) { logError( BaseMessages.getString( PKG, "MultiMergeJoin.Log.UnableToFindReferenceStream", inputStepName ) ); return false; } } String joinType = meta.getJoinType(); for ( int i = 0; i < MultiMergeJoinMeta.join_types.length; ++i ) { if ( joinType.equalsIgnoreCase( MultiMergeJoinMeta.join_types[i] ) ) { data.optional = MultiMergeJoinMeta.optionals[i]; return true; } } logError( BaseMessages.getString( PKG, "MultiMergeJoin.Log.InvalidJoinType", meta.getJoinType() ) ); return false; } return true; } /** * Checks whether incoming rows are join compatible. This essentially means that the keys being compared should be of * the same datatype and both rows should have the same number of keys specified * * @param row1 * Reference row * @param row2 * Row to compare to * * @return true when templates are compatible. */ protected boolean isInputLayoutValid( RowMetaInterface[] rows ) { if ( rows != null ) { // Compare the key types String[] keyFields = meta.getKeyFields(); /* * int nrKeyFields = keyFields.length; * * for (int i=0;i<nrKeyFields;i++) { ValueMetaInterface v1 = rows[0].searchValueMeta(keyFields[i]); if (v1 == * null) { return false; } for (int j = 1; j < rows.length; j++) { ValueMetaInterface v2 = * rows[j].searchValueMeta(keyFields[i]); if (v2 == null) { return false; } if ( v1.getType()!=v2.getType() ) { * return false; } } } */ // check 1 : keys are configured for each stream if ( rows.length != keyFields.length ) { logError( "keys are not configured for all the streams " ); return false; } // check:2 No of keys are same for each stream int prevCount = 0; List<String[]> keyList = new ArrayList<String[]>(); for ( int i = 0; i < keyFields.length; i++ ) { String[] keys = keyFields[i].split( "," ); keyList.add( keys ); int count = keys.length; if ( i != 0 && prevCount != count ) { logError( "Number of keys do not match " ); return false; } else { prevCount = count; } } // check:3 compare the key types for ( int i = 0; i < prevCount; i++ ) { ValueMetaInterface preValue = null; for ( int j = 0; j < rows.length; j++ ) { ValueMetaInterface v = rows[j].searchValueMeta( keyList.get( j )[i] ); if ( v == null ) { return false; } if ( j != 0 && v.getType() != preValue.getType() ) { logError( "key data type do not match " ); return false; } else { preValue = v; } } } } // we got here, all seems to be ok. return true; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.compaction; import java.util.*; import java.util.concurrent.Callable; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.stream.Collectors; import com.google.common.annotations.VisibleForTesting; import com.google.common.collect.Iterables; import org.apache.cassandra.index.Index; import com.google.common.primitives.Ints; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.db.Directories; import org.apache.cassandra.db.Memtable; import org.apache.cassandra.db.SerializationHeader; import org.apache.cassandra.db.PartitionPosition; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.db.lifecycle.SSTableSet; import org.apache.cassandra.dht.Range; import org.apache.cassandra.dht.Token; import org.apache.cassandra.io.sstable.Descriptor; import org.apache.cassandra.io.sstable.SSTableMultiWriter; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.sstable.ISSTableScanner; import org.apache.cassandra.io.sstable.metadata.MetadataCollector; import org.apache.cassandra.notifications.*; import org.apache.cassandra.schema.CompactionParams; import org.apache.cassandra.service.ActiveRepairService; import org.apache.cassandra.service.StorageService; /** * Manages the compaction strategies. * * Currently has two instances of actual compaction strategies per data directory - one for repaired data and one for * unrepaired data. This is done to be able to totally separate the different sets of sstables. */ public class CompactionStrategyManager implements INotificationConsumer { private static final Logger logger = LoggerFactory.getLogger(CompactionStrategyManager.class); private final ColumnFamilyStore cfs; private final List<AbstractCompactionStrategy> repaired = new ArrayList<>(); private final List<AbstractCompactionStrategy> unrepaired = new ArrayList<>(); private volatile boolean enabled = true; public volatile boolean isActive = true; private volatile CompactionParams params; private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock(); private final ReentrantReadWriteLock.ReadLock readLock = lock.readLock(); private final ReentrantReadWriteLock.WriteLock writeLock = lock.writeLock(); /* We keep a copy of the schema compaction parameters here to be able to decide if we should update the compaction strategy in maybeReloadCompactionStrategy() due to an ALTER. If a user changes the local compaction strategy and then later ALTERs a compaction parameter, we will use the new compaction parameters. */ private volatile CompactionParams schemaCompactionParams; private Directories.DataDirectory[] locations; public CompactionStrategyManager(ColumnFamilyStore cfs) { cfs.getTracker().subscribe(this); logger.trace("{} subscribed to the data tracker.", this); this.cfs = cfs; reload(cfs.metadata); params = cfs.metadata.params.compaction; locations = getDirectories().getWriteableLocations(); enabled = params.isEnabled(); } /** * Return the next background task * * Returns a task for the compaction strategy that needs it the most (most estimated remaining tasks) * */ public AbstractCompactionTask getNextBackgroundTask(int gcBefore) { if (!isEnabled()) return null; maybeReload(cfs.metadata); List<AbstractCompactionStrategy> strategies = new ArrayList<>(); readLock.lock(); try { strategies.addAll(repaired); strategies.addAll(unrepaired); Collections.sort(strategies, (o1, o2) -> Ints.compare(o2.getEstimatedRemainingTasks(), o1.getEstimatedRemainingTasks())); for (AbstractCompactionStrategy strategy : strategies) { AbstractCompactionTask task = strategy.getNextBackgroundTask(gcBefore); if (task != null) return task; } } finally { readLock.unlock(); } return null; } public boolean isEnabled() { return enabled && isActive; } public void resume() { isActive = true; } /** * pause compaction while we cancel all ongoing compactions * * Separate call from enable/disable to not have to save the enabled-state externally */ public void pause() { isActive = false; } private void startup() { writeLock.lock(); try { for (SSTableReader sstable : cfs.getSSTables(SSTableSet.CANONICAL)) { if (sstable.openReason != SSTableReader.OpenReason.EARLY) getCompactionStrategyFor(sstable).addSSTable(sstable); } repaired.forEach(AbstractCompactionStrategy::startup); unrepaired.forEach(AbstractCompactionStrategy::startup); } finally { writeLock.unlock(); } } /** * return the compaction strategy for the given sstable * * returns differently based on the repaired status and which vnode the compaction strategy belongs to * @param sstable * @return */ private AbstractCompactionStrategy getCompactionStrategyFor(SSTableReader sstable) { int index = getCompactionStrategyIndex(cfs, getDirectories(), sstable); readLock.lock(); try { if (sstable.isRepaired()) return repaired.get(index); else return unrepaired.get(index); } finally { readLock.unlock(); } } /** * Get the correct compaction strategy for the given sstable. If the first token starts within a disk boundary, we * will add it to that compaction strategy. * * In the case we are upgrading, the first compaction strategy will get most files - we do not care about which disk * the sstable is on currently (unless we don't know the local tokens yet). Once we start compacting we will write out * sstables in the correct locations and give them to the correct compaction strategy instance. * * @param cfs * @param locations * @param sstable * @return */ public static int getCompactionStrategyIndex(ColumnFamilyStore cfs, Directories locations, SSTableReader sstable) { if (!cfs.getPartitioner().splitter().isPresent()) return 0; List<PartitionPosition> boundaries = StorageService.getDiskBoundaries(cfs, locations.getWriteableLocations()); if (boundaries == null) { Directories.DataDirectory[] directories = locations.getWriteableLocations(); // try to figure out location based on sstable directory: for (int i = 0; i < directories.length; i++) { Directories.DataDirectory directory = directories[i]; if (sstable.descriptor.directory.getAbsolutePath().startsWith(directory.location.getAbsolutePath())) return i; } return 0; } int pos = Collections.binarySearch(boundaries, sstable.first); assert pos < 0; // boundaries are .minkeybound and .maxkeybound so they should never be equal return -pos - 1; } public void shutdown() { writeLock.lock(); try { isActive = false; repaired.forEach(AbstractCompactionStrategy::shutdown); unrepaired.forEach(AbstractCompactionStrategy::shutdown); } finally { writeLock.unlock(); } } public void maybeReload(CFMetaData metadata) { // compare the old schema configuration to the new one, ignore any locally set changes. if (metadata.params.compaction.equals(schemaCompactionParams) && Arrays.equals(locations, cfs.getDirectories().getWriteableLocations())) // any drives broken? return; writeLock.lock(); try { reload(metadata); } finally { writeLock.unlock(); } } /** * Reload the compaction strategies * * Called after changing configuration and at startup. * @param metadata */ private void reload(CFMetaData metadata) { boolean disabledWithJMX = !enabled && shouldBeEnabled(); if (!metadata.params.compaction.equals(schemaCompactionParams)) logger.trace("Recreating compaction strategy - compaction parameters changed for {}.{}", cfs.keyspace.getName(), cfs.getTableName()); else if (!Arrays.equals(locations, cfs.getDirectories().getWriteableLocations())) logger.trace("Recreating compaction strategy - writeable locations changed for {}.{}", cfs.keyspace.getName(), cfs.getTableName()); setStrategy(metadata.params.compaction); schemaCompactionParams = metadata.params.compaction; if (disabledWithJMX || !shouldBeEnabled()) disable(); else enable(); startup(); } public void replaceFlushed(Memtable memtable, Collection<SSTableReader> sstables) { cfs.getTracker().replaceFlushed(memtable, sstables); if (sstables != null && !sstables.isEmpty()) CompactionManager.instance.submitBackground(cfs); } public int getUnleveledSSTables() { readLock.lock(); try { if (repaired.get(0) instanceof LeveledCompactionStrategy && unrepaired.get(0) instanceof LeveledCompactionStrategy) { int count = 0; for (AbstractCompactionStrategy strategy : repaired) count += ((LeveledCompactionStrategy) strategy).getLevelSize(0); for (AbstractCompactionStrategy strategy : unrepaired) count += ((LeveledCompactionStrategy) strategy).getLevelSize(0); return count; } } finally { readLock.unlock(); } return 0; } public int[] getSSTableCountPerLevel() { readLock.lock(); try { if (repaired.get(0) instanceof LeveledCompactionStrategy && unrepaired.get(0) instanceof LeveledCompactionStrategy) { int[] res = new int[LeveledManifest.MAX_LEVEL_COUNT]; for (AbstractCompactionStrategy strategy : repaired) { int[] repairedCountPerLevel = ((LeveledCompactionStrategy) strategy).getAllLevelSize(); res = sumArrays(res, repairedCountPerLevel); } for (AbstractCompactionStrategy strategy : unrepaired) { int[] unrepairedCountPerLevel = ((LeveledCompactionStrategy) strategy).getAllLevelSize(); res = sumArrays(res, unrepairedCountPerLevel); } return res; } } finally { readLock.unlock(); } return null; } private static int[] sumArrays(int[] a, int[] b) { int[] res = new int[Math.max(a.length, b.length)]; for (int i = 0; i < res.length; i++) { if (i < a.length && i < b.length) res[i] = a[i] + b[i]; else if (i < a.length) res[i] = a[i]; else res[i] = b[i]; } return res; } public boolean shouldDefragment() { readLock.lock(); try { assert repaired.get(0).getClass().equals(unrepaired.get(0).getClass()); return repaired.get(0).shouldDefragment(); } finally { readLock.unlock(); } } public Directories getDirectories() { readLock.lock(); try { assert repaired.get(0).getClass().equals(unrepaired.get(0).getClass()); return repaired.get(0).getDirectories(); } finally { readLock.unlock(); } } private void handleFlushNotification(Iterable<SSTableReader> added) { readLock.lock(); try { for (SSTableReader sstable : added) getCompactionStrategyFor(sstable).addSSTable(sstable); } finally { readLock.unlock(); } } private void handleListChangedNotification(Iterable<SSTableReader> added, Iterable<SSTableReader> removed) { // a bit of gymnastics to be able to replace sstables in compaction strategies // we use this to know that a compaction finished and where to start the next compaction in LCS Directories.DataDirectory [] locations = cfs.getDirectories().getWriteableLocations(); int locationSize = cfs.getPartitioner().splitter().isPresent() ? locations.length : 1; List<Set<SSTableReader>> repairedRemoved = new ArrayList<>(locationSize); List<Set<SSTableReader>> repairedAdded = new ArrayList<>(locationSize); List<Set<SSTableReader>> unrepairedRemoved = new ArrayList<>(locationSize); List<Set<SSTableReader>> unrepairedAdded = new ArrayList<>(locationSize); for (int i = 0; i < locationSize; i++) { repairedRemoved.add(new HashSet<>()); repairedAdded.add(new HashSet<>()); unrepairedRemoved.add(new HashSet<>()); unrepairedAdded.add(new HashSet<>()); } for (SSTableReader sstable : removed) { int i = getCompactionStrategyIndex(cfs, getDirectories(), sstable); if (sstable.isRepaired()) repairedRemoved.get(i).add(sstable); else unrepairedRemoved.get(i).add(sstable); } for (SSTableReader sstable : added) { int i = getCompactionStrategyIndex(cfs, getDirectories(), sstable); if (sstable.isRepaired()) repairedAdded.get(i).add(sstable); else unrepairedAdded.get(i).add(sstable); } // we need write lock here since we might be moving sstables between strategies writeLock.lock(); try { for (int i = 0; i < locationSize; i++) { if (!repairedRemoved.get(i).isEmpty()) repaired.get(i).replaceSSTables(repairedRemoved.get(i), repairedAdded.get(i)); else repaired.get(i).addSSTables(repairedAdded.get(i)); if (!unrepairedRemoved.get(i).isEmpty()) unrepaired.get(i).replaceSSTables(unrepairedRemoved.get(i), unrepairedAdded.get(i)); else unrepaired.get(i).addSSTables(unrepairedAdded.get(i)); } } finally { writeLock.unlock(); } } private void handleRepairStatusChangedNotification(Iterable<SSTableReader> sstables) { // we need a write lock here since we move sstables from one strategy instance to another writeLock.lock(); try { for (SSTableReader sstable : sstables) { int index = getCompactionStrategyIndex(cfs, getDirectories(), sstable); if (sstable.isRepaired()) { unrepaired.get(index).removeSSTable(sstable); repaired.get(index).addSSTable(sstable); } else { repaired.get(index).removeSSTable(sstable); unrepaired.get(index).addSSTable(sstable); } } } finally { writeLock.unlock(); } } private void handleDeletingNotification(SSTableReader deleted) { readLock.lock(); try { getCompactionStrategyFor(deleted).removeSSTable(deleted); } finally { readLock.unlock(); } } public void handleNotification(INotification notification, Object sender) { maybeReload(cfs.metadata); if (notification instanceof SSTableAddedNotification) { handleFlushNotification(((SSTableAddedNotification) notification).added); } else if (notification instanceof SSTableListChangedNotification) { SSTableListChangedNotification listChangedNotification = (SSTableListChangedNotification) notification; handleListChangedNotification(listChangedNotification.added, listChangedNotification.removed); } else if (notification instanceof SSTableRepairStatusChanged) { handleRepairStatusChangedNotification(((SSTableRepairStatusChanged) notification).sstables); } else if (notification instanceof SSTableDeletingNotification) { handleDeletingNotification(((SSTableDeletingNotification) notification).deleting); } } public void enable() { writeLock.lock(); try { if (repaired != null) repaired.forEach(AbstractCompactionStrategy::enable); if (unrepaired != null) unrepaired.forEach(AbstractCompactionStrategy::enable); // enable this last to make sure the strategies are ready to get calls. enabled = true; } finally { writeLock.unlock(); } } public void disable() { writeLock.lock(); try { // disable this first avoid asking disabled strategies for compaction tasks enabled = false; if (repaired != null) repaired.forEach(AbstractCompactionStrategy::disable); if (unrepaired != null) unrepaired.forEach(AbstractCompactionStrategy::disable); } finally { writeLock.unlock(); } } /** * Create ISSTableScanners from the given sstables * * Delegates the call to the compaction strategies to allow LCS to create a scanner * @param sstables * @param ranges * @return */ @SuppressWarnings("resource") public AbstractCompactionStrategy.ScannerList getScanners(Collection<SSTableReader> sstables, Collection<Range<Token>> ranges) { assert repaired.size() == unrepaired.size(); List<Set<SSTableReader>> repairedSSTables = new ArrayList<>(); List<Set<SSTableReader>> unrepairedSSTables = new ArrayList<>(); for (int i = 0; i < repaired.size(); i++) { repairedSSTables.add(new HashSet<>()); unrepairedSSTables.add(new HashSet<>()); } for (SSTableReader sstable : sstables) { if (sstable.isRepaired()) repairedSSTables.get(getCompactionStrategyIndex(cfs, getDirectories(), sstable)).add(sstable); else unrepairedSSTables.get(getCompactionStrategyIndex(cfs, getDirectories(), sstable)).add(sstable); } List<ISSTableScanner> scanners = new ArrayList<>(sstables.size()); readLock.lock(); try { for (int i = 0; i < repairedSSTables.size(); i++) { if (!repairedSSTables.get(i).isEmpty()) scanners.addAll(repaired.get(i).getScanners(repairedSSTables.get(i), ranges).scanners); } for (int i = 0; i < unrepairedSSTables.size(); i++) { if (!unrepairedSSTables.get(i).isEmpty()) scanners.addAll(unrepaired.get(i).getScanners(unrepairedSSTables.get(i), ranges).scanners); } return new AbstractCompactionStrategy.ScannerList(scanners); } finally { readLock.unlock(); } } public AbstractCompactionStrategy.ScannerList getScanners(Collection<SSTableReader> sstables) { return getScanners(sstables, null); } public Collection<Collection<SSTableReader>> groupSSTablesForAntiCompaction(Collection<SSTableReader> sstablesToGroup) { readLock.lock(); try { Map<Integer, List<SSTableReader>> groups = sstablesToGroup.stream().collect(Collectors.groupingBy((s) -> getCompactionStrategyIndex(cfs, getDirectories(), s))); Collection<Collection<SSTableReader>> anticompactionGroups = new ArrayList<>(); for (Map.Entry<Integer, List<SSTableReader>> group : groups.entrySet()) anticompactionGroups.addAll(unrepaired.get(group.getKey()).groupSSTablesForAntiCompaction(group.getValue())); return anticompactionGroups; } finally { readLock.unlock(); } } public long getMaxSSTableBytes() { readLock.lock(); try { return unrepaired.get(0).getMaxSSTableBytes(); } finally { readLock.unlock(); } } public AbstractCompactionTask getCompactionTask(LifecycleTransaction txn, int gcBefore, long maxSSTableBytes) { maybeReload(cfs.metadata); validateForCompaction(txn.originals(), cfs, getDirectories()); return getCompactionStrategyFor(txn.originals().iterator().next()).getCompactionTask(txn, gcBefore, maxSSTableBytes); } private static void validateForCompaction(Iterable<SSTableReader> input, ColumnFamilyStore cfs, Directories directories) { SSTableReader firstSSTable = Iterables.getFirst(input, null); assert firstSSTable != null; boolean repaired = firstSSTable.isRepaired(); int firstIndex = getCompactionStrategyIndex(cfs, directories, firstSSTable); for (SSTableReader sstable : input) { if (sstable.isRepaired() != repaired) throw new UnsupportedOperationException("You can't mix repaired and unrepaired data in a compaction"); if (firstIndex != getCompactionStrategyIndex(cfs, directories, sstable)) throw new UnsupportedOperationException("You can't mix sstables from different directories in a compaction"); } } public Collection<AbstractCompactionTask> getMaximalTasks(final int gcBefore, final boolean splitOutput) { maybeReload(cfs.metadata); // runWithCompactionsDisabled cancels active compactions and disables them, then we are able // to make the repaired/unrepaired strategies mark their own sstables as compacting. Once the // sstables are marked the compactions are re-enabled return cfs.runWithCompactionsDisabled(new Callable<Collection<AbstractCompactionTask>>() { @Override public Collection<AbstractCompactionTask> call() throws Exception { List<AbstractCompactionTask> tasks = new ArrayList<>(); readLock.lock(); try { for (AbstractCompactionStrategy strategy : repaired) { Collection<AbstractCompactionTask> task = strategy.getMaximalTask(gcBefore, splitOutput); if (task != null) tasks.addAll(task); } for (AbstractCompactionStrategy strategy : unrepaired) { Collection<AbstractCompactionTask> task = strategy.getMaximalTask(gcBefore, splitOutput); if (task != null) tasks.addAll(task); } } finally { readLock.unlock(); } if (tasks.isEmpty()) return null; return tasks; } }, false, false); } public AbstractCompactionTask getUserDefinedTask(Collection<SSTableReader> sstables, int gcBefore) { maybeReload(cfs.metadata); validateForCompaction(sstables, cfs, getDirectories()); readLock.lock(); try { return getCompactionStrategyFor(sstables.iterator().next()).getUserDefinedTask(sstables, gcBefore); } finally { readLock.unlock(); } } public int getEstimatedRemainingTasks() { int tasks = 0; readLock.lock(); try { for (AbstractCompactionStrategy strategy : repaired) tasks += strategy.getEstimatedRemainingTasks(); for (AbstractCompactionStrategy strategy : unrepaired) tasks += strategy.getEstimatedRemainingTasks(); } finally { readLock.unlock(); } return tasks; } public boolean shouldBeEnabled() { return params.isEnabled(); } public String getName() { readLock.lock(); try { return unrepaired.get(0).getName(); } finally { readLock.unlock(); } } public List<List<AbstractCompactionStrategy>> getStrategies() { readLock.lock(); try { return Arrays.asList(repaired, unrepaired); } finally { readLock.unlock(); } } public void setNewLocalCompactionStrategy(CompactionParams params) { logger.info("Switching local compaction strategy from {} to {}}", this.params, params); writeLock.lock(); try { setStrategy(params); if (shouldBeEnabled()) enable(); else disable(); startup(); } finally { writeLock.unlock(); } } private void setStrategy(CompactionParams params) { repaired.forEach(AbstractCompactionStrategy::shutdown); unrepaired.forEach(AbstractCompactionStrategy::shutdown); repaired.clear(); unrepaired.clear(); if (cfs.getPartitioner().splitter().isPresent()) { locations = cfs.getDirectories().getWriteableLocations(); for (int i = 0; i < locations.length; i++) { repaired.add(CFMetaData.createCompactionStrategyInstance(cfs, params)); unrepaired.add(CFMetaData.createCompactionStrategyInstance(cfs, params)); } } else { repaired.add(CFMetaData.createCompactionStrategyInstance(cfs, params)); unrepaired.add(CFMetaData.createCompactionStrategyInstance(cfs, params)); } this.params = params; } public CompactionParams getCompactionParams() { return params; } public boolean onlyPurgeRepairedTombstones() { return Boolean.parseBoolean(params.options().get(AbstractCompactionStrategy.ONLY_PURGE_REPAIRED_TOMBSTONES)); } public SSTableMultiWriter createSSTableMultiWriter(Descriptor descriptor, long keyCount, long repairedAt, MetadataCollector collector, SerializationHeader header, Collection<Index> indexes, LifecycleTransaction txn) { readLock.lock(); try { if (repairedAt == ActiveRepairService.UNREPAIRED_SSTABLE) { return unrepaired.get(0).createSSTableMultiWriter(descriptor, keyCount, repairedAt, collector, header, indexes, txn); } else { return repaired.get(0).createSSTableMultiWriter(descriptor, keyCount, repairedAt, collector, header, indexes, txn); } } finally { readLock.unlock(); } } }
/* * Copyright 2015-present Open Networking Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.onosproject.net.behaviour; import com.google.common.base.Strings; import com.google.common.collect.Lists; import org.onosproject.net.DeviceId; import org.onosproject.net.SparseAnnotations; import java.util.List; import java.util.Optional; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; /** * The default implementation of bridge. */ public final class DefaultBridgeDescription implements BridgeDescription { private final String name; /* Optional OpenFlow configurations */ private final List<ControllerInfo> controllers; private final boolean enableLocalController; private final Optional<FailMode> failMode; private final Optional<String> datapathId; private final Optional<String> datapathType; private final Optional<List<ControlProtocolVersion>> controlProtocols; private final Optional<Boolean> disableInBand; private final Optional<Boolean> mcastSnoopingEnable; /* Adds more configurations */ private DefaultBridgeDescription(String name, List<ControllerInfo> controllers, boolean enableLocalController, Optional<FailMode> failMode, Optional<String> datapathId, Optional<String> datapathType, Optional<Boolean> disableInBand, Optional<Boolean> mcastSnoopingEnable, Optional<List<ControlProtocolVersion>> controlProtocols) { this.name = checkNotNull(name); this.controllers = controllers; this.enableLocalController = enableLocalController; this.failMode = failMode; this.datapathId = datapathId; this.datapathType = datapathType; this.disableInBand = disableInBand; this.mcastSnoopingEnable = mcastSnoopingEnable; this.controlProtocols = controlProtocols; } @Override public SparseAnnotations annotations() { return null; } @Override public String name() { return name; } @Override public List<ControllerInfo> controllers() { return controllers; } @Override public boolean enableLocalController() { return enableLocalController; } @Override public Optional<FailMode> failMode() { return failMode; } @Override public Optional<String> datapathId() { return datapathId; } @Override public Optional<String> datapathType() { return datapathType; } @Override public Optional<List<ControlProtocolVersion>> controlProtocols() { return controlProtocols; } @Override public Optional<DeviceId> deviceId() { if (datapathId.isPresent()) { return Optional.of(DeviceId.deviceId("of:" + datapathId.get())); } else { return Optional.empty(); } } @Override public Optional<Boolean> disableInBand() { return disableInBand; } @Override public Optional<Boolean> mcastSnoopingEnable() { return mcastSnoopingEnable; } /** * Creates and returns a new builder instance. * * @return new builder */ public static BridgeDescription.Builder builder() { return new Builder(); } public static final class Builder implements BridgeDescription.Builder { private String name; private List<ControllerInfo> controllers = Lists.newArrayList(); private boolean enableLocalController = false; private Optional<FailMode> failMode = Optional.empty(); private Optional<String> datapathId = Optional.empty(); private Optional<String> datapathType = Optional.empty(); private Optional<List<ControlProtocolVersion>> controlProtocols = Optional.empty(); private Optional<Boolean> disableInBand = Optional.empty(); private Optional<Boolean> mcastSnoopingEnable = Optional.empty(); private Builder() { } @Override public BridgeDescription build() { return new DefaultBridgeDescription(name, controllers, enableLocalController, failMode, datapathId, datapathType, disableInBand, mcastSnoopingEnable, controlProtocols); } @Override public Builder name(String name) { checkArgument(!Strings.isNullOrEmpty(name)); this.name = name; return this; } @Override public Builder controllers(List<ControllerInfo> controllers) { if (controllers != null) { this.controllers = Lists.newArrayList(controllers); } return this; } @Override public Builder enableLocalController() { this.enableLocalController = true; return this; } @Override public Builder failMode(FailMode failMode) { this.failMode = Optional.ofNullable(failMode); return this; } @Override public Builder datapathId(String datapathId) { this.datapathId = Optional.ofNullable(datapathId); return this; } @Override public Builder datapathType(String datapathType) { this.datapathType = Optional.ofNullable(datapathType); return this; } @Override public Builder controlProtocols(List<ControlProtocolVersion> controlProtocols) { this.controlProtocols = Optional.ofNullable(controlProtocols); return this; } @Override public Builder disableInBand() { this.disableInBand = Optional.of(Boolean.TRUE); return this; } @Override public BridgeDescription.Builder mcastSnoopingEnable() { this.mcastSnoopingEnable = Optional.of(Boolean.TRUE); return this; } } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.rcfile.binary; import io.airlift.slice.Slice; import io.airlift.slice.SliceOutput; import io.prestosql.rcfile.ColumnData; import io.prestosql.rcfile.EncodeOutput; import io.prestosql.spi.block.Block; import io.prestosql.spi.block.BlockBuilder; import io.prestosql.spi.type.Type; import static io.airlift.slice.SizeOf.SIZE_OF_INT; import static io.prestosql.rcfile.RcFileDecoderUtils.decodeVIntSize; import static io.prestosql.rcfile.RcFileDecoderUtils.isNegativeVInt; import static io.prestosql.rcfile.RcFileDecoderUtils.readVInt; import static io.prestosql.rcfile.RcFileDecoderUtils.writeVInt; import static java.lang.Math.floorDiv; import static java.lang.Math.floorMod; import static java.lang.Math.toIntExact; public class TimestampEncoding implements BinaryColumnEncoding { private final Type type; public TimestampEncoding(Type type) { this.type = type; } @Override public void encodeColumn(Block block, SliceOutput output, EncodeOutput encodeOutput) { for (int position = 0; position < block.getPositionCount(); position++) { if (!block.isNull(position)) { writeTimestamp(output, type.getLong(block, position)); } encodeOutput.closeEntry(); } } @Override public void encodeValueInto(Block block, int position, SliceOutput output) { writeTimestamp(output, type.getLong(block, position)); } @Override public Block decodeColumn(ColumnData columnData) { int size = columnData.rowCount(); BlockBuilder builder = type.createBlockBuilder(null, size); Slice slice = columnData.getSlice(); for (int i = 0; i < size; i++) { int length = columnData.getLength(i); if (length != 0) { int offset = columnData.getOffset(i); long millis = getTimestamp(slice, offset); type.writeLong(builder, millis); } else { builder.appendNull(); } } return builder.build(); } @Override public int getValueOffset(Slice slice, int offset) { return 0; } @Override public int getValueLength(Slice slice, int offset) { int length = 4; if (hasNanosVInt(slice.getByte(offset))) { int nanosVintLength = decodeVIntSize(slice, offset + 4); length += nanosVintLength; // is there extra data for "seconds" if (isNegativeVInt(slice, offset + 4)) { length += decodeVIntSize(slice, offset + 4 + nanosVintLength); } } return length; } @Override public void decodeValueInto(BlockBuilder builder, Slice slice, int offset, int length) { long millis = getTimestamp(slice, offset); type.writeLong(builder, millis); } private static boolean hasNanosVInt(byte b) { return (b >> 7) != 0; } private static long getTimestamp(Slice slice, int offset) { // read seconds (low 32 bits) int lowest31BitsOfSecondsAndFlag = Integer.reverseBytes(slice.getInt(offset)); long seconds = lowest31BitsOfSecondsAndFlag & 0x7FFF_FFFF; offset += SIZE_OF_INT; int nanos = 0; if (lowest31BitsOfSecondsAndFlag < 0) { // read nanos // this is an inline version of readVint so it can be stitched together // the the code to read the seconds high bits below byte nanosFirstByte = slice.getByte(offset); int nanosLength = decodeVIntSize(nanosFirstByte); nanos = (int) readVInt(slice, offset, nanosLength); nanos = decodeNanos(nanos); // read seconds (high 32 bits) if (isNegativeVInt(nanosFirstByte)) { // We compose the seconds field from two parts. The lowest 31 bits come from the first four // bytes. The higher-order bits come from the second VInt that follows the nanos field. long highBits = readVInt(slice, offset + nanosLength); seconds |= (highBits << 31); } } long millis = (seconds * 1000) + (nanos / 1_000_000); return millis; } @SuppressWarnings("NonReproducibleMathCall") private static int decodeNanos(int nanos) { if (nanos < 0) { // This means there is a second VInt present that specifies additional bits of the timestamp. // The reversed nanoseconds value is still encoded in this VInt. nanos = -nanos - 1; } int nanosDigits = (int) Math.floor(Math.log10(nanos)) + 1; // Reverse the nanos digits (base 10) int temp = 0; while (nanos != 0) { temp *= 10; temp += nanos % 10; nanos /= 10; } nanos = temp; if (nanosDigits < 9) { nanos *= Math.pow(10, 9 - nanosDigits); } return nanos; } private static void writeTimestamp(SliceOutput output, long millis) { long seconds = floorDiv(millis, 1000); int nanos = toIntExact(floorMod(millis, 1000) * 1_000_000); writeTimestamp(seconds, nanos, output); } private static void writeTimestamp(long seconds, int nanos, SliceOutput output) { // <seconds-low-32><nanos>[<seconds-high-32>] // seconds-low-32 is vint encoded // nanos is reversed // seconds-high-32 is vint encoded // seconds-low-32 and nanos have the top bit set when second-high is present boolean hasSecondsHigh32 = seconds < 0 || seconds > Integer.MAX_VALUE; int nanosReversed = reverseDecimal(nanos); int secondsLow32 = (int) seconds; if (nanosReversed == 0 && !hasSecondsHigh32) { secondsLow32 &= 0X7FFF_FFFF; } else { secondsLow32 |= 0x8000_0000; } output.writeInt(Integer.reverseBytes(secondsLow32)); if (hasSecondsHigh32 || nanosReversed != 0) { // The sign of the reversed-nanoseconds field indicates that there is a second VInt present int value = hasSecondsHigh32 ? ~nanosReversed : nanosReversed; writeVInt(output, value); } if (hasSecondsHigh32) { int secondsHigh32 = (int) (seconds >> 31); writeVInt(output, secondsHigh32); } } private static int reverseDecimal(int nanos) { int decimal = 0; if (nanos != 0) { int counter = 0; while (counter < 9) { decimal *= 10; decimal += nanos % 10; nanos /= 10; counter++; } } return decimal; } }
/* * Copyright 2015 herd contributors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.finra.herd.service.helper; import java.util.ArrayList; import java.util.List; import com.amazonaws.services.elasticmapreduce.model.ActionOnFailure; import com.amazonaws.services.elasticmapreduce.model.StepConfig; import com.amazonaws.services.elasticmapreduce.util.StepFactory; import org.springframework.stereotype.Component; import org.springframework.util.CollectionUtils; import org.finra.herd.model.api.xml.EmrPigStep; import org.finra.herd.model.api.xml.EmrPigStepAddRequest; /** * The Pig step helper. */ @Component public class EmrPigStepHelper extends EmrStepHelper { @Override public Object buildResponseFromRequest(Object stepRequest) { EmrPigStepAddRequest emrPigStepAddRequest = (EmrPigStepAddRequest) stepRequest; EmrPigStep step = new EmrPigStep(); step.setNamespace(emrPigStepAddRequest.getNamespace()); step.setEmrClusterDefinitionName(emrPigStepAddRequest.getEmrClusterDefinitionName()); step.setEmrClusterName(emrPigStepAddRequest.getEmrClusterName()); step.setStepName(emrPigStepAddRequest.getStepName().trim()); step.setScriptLocation(emrPigStepAddRequest.getScriptLocation().trim().replaceAll(getS3ManagedReplaceString(), emrHelper.getS3StagingLocation())); // Add the script arguments if (!CollectionUtils.isEmpty(emrPigStepAddRequest.getScriptArguments())) { List<String> scriptArguments = new ArrayList<>(); step.setScriptArguments(scriptArguments); for (String argument : emrPigStepAddRequest.getScriptArguments()) { scriptArguments.add(argument.trim()); } } step.setContinueOnError(emrPigStepAddRequest.isContinueOnError()); return step; } @Override public StepConfig getEmrStepConfig(Object step) { EmrPigStep pigStep = (EmrPigStep) step; // Default ActionOnFailure is to cancel the execution and wait ActionOnFailure actionOnFailure = ActionOnFailure.CANCEL_AND_WAIT; if (pigStep.isContinueOnError() != null && pigStep.isContinueOnError()) { // Override based on user input actionOnFailure = ActionOnFailure.CONTINUE; } // If there are no arguments to hive script if (CollectionUtils.isEmpty(pigStep.getScriptArguments())) { // Just build the StepConfig object and return return new StepConfig().withName(pigStep.getStepName().trim()).withActionOnFailure(actionOnFailure) .withHadoopJarStep(new StepFactory().newRunPigScriptStep(pigStep.getScriptLocation().trim())); } // If there are arguments specified else { return new StepConfig().withName(pigStep.getStepName().trim()).withActionOnFailure(actionOnFailure).withHadoopJarStep(new StepFactory() .newRunPigScriptStep(pigStep.getScriptLocation().trim(), pigStep.getScriptArguments().toArray(new String[pigStep.getScriptArguments().size()]))); } } @Override public String getRequestEmrClusterDefinitionName(Object stepRequest) { return ((EmrPigStepAddRequest) stepRequest).getEmrClusterDefinitionName(); } @Override public String getRequestEmrClusterId(Object stepRequest) { return ((EmrPigStepAddRequest) stepRequest).getEmrClusterId(); } @Override public String getRequestEmrClusterName(Object stepRequest) { return ((EmrPigStepAddRequest) stepRequest).getEmrClusterName(); } @Override public String getRequestNamespace(Object stepRequest) { return ((EmrPigStepAddRequest) stepRequest).getNamespace(); } @Override public String getRequestStepName(Object stepRequest) { return ((EmrPigStepAddRequest) stepRequest).getStepName(); } @Override public String getStepId(Object step) { return ((EmrPigStep) step).getId(); } @Override public String getStepRequestType() { return EmrPigStepAddRequest.class.getName(); } @Override public String getStepType() { return EmrPigStep.class.getName(); } @Override public Boolean isRequestContinueOnError(Object stepRequest) { return ((EmrPigStepAddRequest) stepRequest).isContinueOnError(); } @Override public void setRequestContinueOnError(Object stepRequest, Boolean continueOnError) { ((EmrPigStepAddRequest) stepRequest).setContinueOnError(continueOnError); } @Override public void setRequestEmrClusterDefinitionName(Object stepRequest, String clusterDefinitionName) { ((EmrPigStepAddRequest) stepRequest).setEmrClusterDefinitionName(clusterDefinitionName); } @Override public void setRequestEmrClusterId(Object stepRequest, String emrClusterId) { ((EmrPigStepAddRequest) stepRequest).setEmrClusterId(emrClusterId); } @Override public void setRequestEmrClusterName(Object stepRequest, String clusterName) { ((EmrPigStepAddRequest) stepRequest).setEmrClusterName(clusterName); } @Override public void setRequestNamespace(Object stepRequest, String namespace) { ((EmrPigStepAddRequest) stepRequest).setNamespace(namespace); } @Override public void setRequestStepName(Object stepRequest, String stepName) { ((EmrPigStepAddRequest) stepRequest).setStepName(stepName); } @Override public void setStepId(Object step, String stepId) { ((EmrPigStep) step).setId(stepId); } @Override public void validateAddStepRequest(Object step) { EmrPigStepAddRequest pigStepRequest = (EmrPigStepAddRequest) step; validateStepName(pigStepRequest.getStepName()); validateScriptLocation(pigStepRequest.getScriptLocation()); } @Override public String getRequestAccountId(Object stepRequest) { return ((EmrPigStepAddRequest) stepRequest).getAccountId(); } @Override public void setRequestAccountId(Object stepRequest, String accountId) { ((EmrPigStepAddRequest) stepRequest).setAccountId(accountId); } }
/* * Hibernate, Relational Persistence for Idiomatic Java * * Copyright (c) 2009 by Red Hat Inc and/or its affiliates or by * third-party contributors as indicated by either @author tags or express * copyright attribution statements applied by the authors. All * third-party contributions are distributed under license by Red Hat Inc. * * This copyrighted material is made available to anyone wishing to use, modify, * copy, or redistribute it subject to the terms and conditions of the GNU * Lesser General Public License, as published by the Free Software Foundation. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License * for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this distribution; if not, write to: * Free Software Foundation, Inc. * 51 Franklin Street, Fifth Floor * Boston, MA 02110-1301 USA */ package org.hibernate.ejb.metamodel; import java.io.Serializable; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import javax.persistence.metamodel.Attribute; import javax.persistence.metamodel.Bindable; import javax.persistence.metamodel.CollectionAttribute; import javax.persistence.metamodel.ListAttribute; import javax.persistence.metamodel.ManagedType; import javax.persistence.metamodel.MapAttribute; import javax.persistence.metamodel.PluralAttribute; import javax.persistence.metamodel.SetAttribute; import javax.persistence.metamodel.SingularAttribute; import org.hibernate.annotations.common.AssertionFailure; /** * Defines commonality for the JPA {@link ManagedType} hierarchy of interfaces. * * @author Steve Ebersole */ public abstract class AbstractManagedType<X> extends AbstractType<X> implements ManagedType<X>, Serializable { private final AbstractManagedType<? super X> superType; private final Map<String,Attribute<X, ?>> declaredAttributes = new HashMap<String, Attribute<X,?>>(); private final Map<String, SingularAttribute<X, ?>> declaredSingularAttributes = new HashMap<String, SingularAttribute<X,?>>(); private final Map<String, PluralAttribute<X, ?, ?>> declaredPluralAttributes = new HashMap<String, PluralAttribute<X,?,?>>(); protected AbstractManagedType(Class<X> javaType, AbstractManagedType<? super X> superType) { super( javaType ); this.superType = superType; } protected AbstractManagedType<? super X> getSupertype() { return superType; } private boolean locked = false; public Builder<X> getBuilder() { if ( locked ) { throw new IllegalStateException( "Type has been locked" ); } return new Builder<X>() { public void addAttribute(Attribute<X,?> attribute) { declaredAttributes.put( attribute.getName(), attribute ); final Bindable.BindableType bindableType = ( ( Bindable ) attribute ).getBindableType(); switch ( bindableType ) { case SINGULAR_ATTRIBUTE : { declaredSingularAttributes.put( attribute.getName(), (SingularAttribute<X,?>) attribute ); break; } case PLURAL_ATTRIBUTE : { declaredPluralAttributes.put(attribute.getName(), (PluralAttribute<X,?,?>) attribute ); break; } default : { throw new AssertionFailure( "unknown bindable type: " + bindableType ); } } } }; } public void lock() { locked = true; } public static interface Builder<X> { public void addAttribute(Attribute<X,?> attribute); } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public Set<Attribute<? super X, ?>> getAttributes() { HashSet attributes = new HashSet<Attribute<X, ?>>( declaredAttributes.values() ); if ( getSupertype() != null ) { attributes.addAll( getSupertype().getAttributes() ); } return attributes; } /** * {@inheritDoc} */ public Set<Attribute<X, ?>> getDeclaredAttributes() { return new HashSet<Attribute<X, ?>>( declaredAttributes.values() ); } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public Attribute<? super X, ?> getAttribute(String name) { Attribute<? super X, ?> attribute = declaredAttributes.get( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getAttribute( name ); } return attribute; } /** * {@inheritDoc} */ public Attribute<X, ?> getDeclaredAttribute(String name) { Attribute<X, ?> attr = declaredAttributes.get( name ); checkNotNull( "Attribute ", attr, name ); return attr; } private void checkNotNull(String attributeType, Attribute<?,?> attribute, String name) { if ( attribute == null ) { throw new IllegalArgumentException( attributeType + " named " + name + " is not present" ); } } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public Set<SingularAttribute<? super X, ?>> getSingularAttributes() { HashSet attributes = new HashSet<SingularAttribute<X, ?>>( declaredSingularAttributes.values() ); if ( getSupertype() != null ) { attributes.addAll( getSupertype().getSingularAttributes() ); } return attributes; } /** * {@inheritDoc} */ public Set<SingularAttribute<X, ?>> getDeclaredSingularAttributes() { return new HashSet<SingularAttribute<X, ?>>( declaredSingularAttributes.values() ); } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public SingularAttribute<? super X, ?> getSingularAttribute(String name) { SingularAttribute<? super X, ?> attribute = declaredSingularAttributes.get( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getSingularAttribute( name ); } return attribute; } /** * {@inheritDoc} */ public SingularAttribute<X, ?> getDeclaredSingularAttribute(String name) { final SingularAttribute<X, ?> attr = declaredSingularAttributes.get( name ); checkNotNull( "SingularAttribute ", attr, name ); return attr; } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public <Y> SingularAttribute<? super X, Y> getSingularAttribute(String name, Class<Y> type) { SingularAttribute<? super X, ?> attribute = declaredSingularAttributes.get( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getSingularAttribute( name ); } checkTypeForSingleAttribute( "SingularAttribute ", attribute, name, type ); return ( SingularAttribute<? super X, Y> ) attribute; } /** * {@inheritDoc} */ @SuppressWarnings( "unchecked") public <Y> SingularAttribute<X, Y> getDeclaredSingularAttribute(String name, Class<Y> javaType) { final SingularAttribute<X, ?> attr = declaredSingularAttributes.get( name ); checkTypeForSingleAttribute( "SingularAttribute ", attr, name, javaType ); return ( SingularAttribute<X, Y> ) attr; } private <Y> void checkTypeForSingleAttribute( String attributeType, SingularAttribute<?,?> attribute, String name, Class<Y> javaType) { if ( attribute == null || ( javaType != null && !attribute.getBindableJavaType().equals( javaType ) ) ) { if ( isPrimitiveVariant( attribute, javaType ) ) { return; } throw new IllegalArgumentException( attributeType + " named " + name + ( javaType != null ? " and of type " + javaType.getName() : "" ) + " is not present" ); } } @SuppressWarnings({ "SimplifiableIfStatement" }) protected <Y> boolean isPrimitiveVariant(SingularAttribute<?,?> attribute, Class<Y> javaType) { if ( attribute == null ) { return false; } Class declaredType = attribute.getBindableJavaType(); if ( declaredType.isPrimitive() ) { return ( Boolean.class.equals( javaType ) && Boolean.TYPE.equals( declaredType ) ) || ( Character.class.equals( javaType ) && Character.TYPE.equals( declaredType ) ) || ( Byte.class.equals( javaType ) && Byte.TYPE.equals( declaredType ) ) || ( Short.class.equals( javaType ) && Short.TYPE.equals( declaredType ) ) || ( Integer.class.equals( javaType ) && Integer.TYPE.equals( declaredType ) ) || ( Long.class.equals( javaType ) && Long.TYPE.equals( declaredType ) ) || ( Float.class.equals( javaType ) && Float.TYPE.equals( declaredType ) ) || ( Double.class.equals( javaType ) && Double.TYPE.equals( declaredType ) ); } if ( javaType.isPrimitive() ) { return ( Boolean.class.equals( declaredType ) && Boolean.TYPE.equals( javaType ) ) || ( Character.class.equals( declaredType ) && Character.TYPE.equals( javaType ) ) || ( Byte.class.equals( declaredType ) && Byte.TYPE.equals( javaType ) ) || ( Short.class.equals( declaredType ) && Short.TYPE.equals( javaType ) ) || ( Integer.class.equals( declaredType ) && Integer.TYPE.equals( javaType ) ) || ( Long.class.equals( declaredType ) && Long.TYPE.equals( javaType ) ) || ( Float.class.equals( declaredType ) && Float.TYPE.equals( javaType ) ) || ( Double.class.equals( declaredType ) && Double.TYPE.equals( javaType ) ); } return false; } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public Set<PluralAttribute<? super X, ?, ?>> getPluralAttributes() { HashSet attributes = new HashSet<PluralAttribute<? super X, ?, ?>>( declaredPluralAttributes.values() ); if ( getSupertype() != null ) { attributes.addAll( getSupertype().getPluralAttributes() ); } return attributes; } /** * {@inheritDoc} */ public Set<PluralAttribute<X, ?, ?>> getDeclaredPluralAttributes() { return new HashSet<PluralAttribute<X,?,?>>( declaredPluralAttributes.values() ); } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public CollectionAttribute<? super X, ?> getCollection(String name) { PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } basicCollectionCheck( attribute, name ); return ( CollectionAttribute<X, ?> ) attribute; } private PluralAttribute<? super X, ?, ?> getPluralAttribute(String name) { return declaredPluralAttributes.get( name ); } private void basicCollectionCheck(PluralAttribute<? super X, ?, ?> attribute, String name) { checkNotNull( "CollectionAttribute", attribute, name ); if ( ! CollectionAttribute.class.isAssignableFrom( attribute.getClass() ) ) { throw new IllegalArgumentException( name + " is not a CollectionAttribute: " + attribute.getClass() ); } } /** * {@inheritDoc} */ @SuppressWarnings( "unchecked") public CollectionAttribute<X, ?> getDeclaredCollection(String name) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); basicCollectionCheck( attribute, name ); return ( CollectionAttribute<X, ?> ) attribute; } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public SetAttribute<? super X, ?> getSet(String name) { PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } basicSetCheck( attribute, name ); return (SetAttribute<? super X, ?>) attribute; } private void basicSetCheck(PluralAttribute<? super X, ?, ?> attribute, String name) { checkNotNull( "SetAttribute", attribute, name ); if ( ! SetAttribute.class.isAssignableFrom( attribute.getClass() ) ) { throw new IllegalArgumentException( name + " is not a SetAttribute: " + attribute.getClass() ); } } /** * {@inheritDoc} */ @SuppressWarnings( "unchecked") public SetAttribute<X, ?> getDeclaredSet(String name) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); basicSetCheck( attribute, name ); return ( SetAttribute<X, ?> ) attribute; } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public ListAttribute<? super X, ?> getList(String name) { PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } basicListCheck( attribute, name ); return (ListAttribute<? super X, ?>) attribute; } private void basicListCheck(PluralAttribute<? super X, ?, ?> attribute, String name) { checkNotNull( "ListAttribute", attribute, name ); if ( ! ListAttribute.class.isAssignableFrom( attribute.getClass() ) ) { throw new IllegalArgumentException( name + " is not a ListAttribute: " + attribute.getClass() ); } } /** * {@inheritDoc} */ public ListAttribute<X, ?> getDeclaredList(String name) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); basicListCheck( attribute, name ); return ( ListAttribute<X, ?> ) attribute; } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public MapAttribute<? super X, ?, ?> getMap(String name) { PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } basicMapCheck( attribute, name ); return (MapAttribute<? super X, ?, ?>) attribute; } private void basicMapCheck(PluralAttribute<? super X, ?, ?> attribute, String name) { checkNotNull( "MapAttribute", attribute, name ); if ( ! MapAttribute.class.isAssignableFrom( attribute.getClass() ) ) { throw new IllegalArgumentException( name + " is not a MapAttribute: " + attribute.getClass() ); } } /** * {@inheritDoc} */ public MapAttribute<X, ?, ?> getDeclaredMap(String name) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); basicMapCheck( attribute, name ); return ( MapAttribute<X,?,?> ) attribute; } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public <E> CollectionAttribute<? super X, E> getCollection(String name, Class<E> elementType) { PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } checkCollectionElementType( attribute, name, elementType ); return ( CollectionAttribute<? super X, E> ) attribute; } /** * {@inheritDoc} */ public <E> CollectionAttribute<X, E> getDeclaredCollection(String name, Class<E> elementType) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); checkCollectionElementType( attribute, name, elementType ); return ( CollectionAttribute<X, E> ) attribute; } private <E> void checkCollectionElementType(PluralAttribute<?,?,?> attribute, String name, Class<E> elementType) { checkTypeForPluralAttributes( "CollectionAttribute", attribute, name, elementType, PluralAttribute.CollectionType.COLLECTION ); } private <E> void checkTypeForPluralAttributes( String attributeType, PluralAttribute<?,?,?> attribute, String name, Class<E> elementType, PluralAttribute.CollectionType collectionType) { if ( attribute == null || ( elementType != null && !attribute.getBindableJavaType().equals( elementType ) ) || attribute.getCollectionType() != collectionType ) { throw new IllegalArgumentException( attributeType + " named " + name + ( elementType != null ? " and of element type " + elementType : "" ) + " is not present" ); } } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public <E> SetAttribute<? super X, E> getSet(String name, Class<E> elementType) { PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } checkSetElementType( attribute, name, elementType ); return ( SetAttribute<? super X, E> ) attribute; } private <E> void checkSetElementType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<E> elementType) { checkTypeForPluralAttributes( "SetAttribute", attribute, name, elementType, PluralAttribute.CollectionType.SET ); } /** * {@inheritDoc} */ public <E> SetAttribute<X, E> getDeclaredSet(String name, Class<E> elementType) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); checkSetElementType( attribute, name, elementType ); return ( SetAttribute<X, E> ) attribute; } /** * {@inheritDoc} */ @SuppressWarnings({ "unchecked" }) public <E> ListAttribute<? super X, E> getList(String name, Class<E> elementType) { PluralAttribute<? super X, ?, ?> attribute = declaredPluralAttributes.get( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } checkListElementType( attribute, name, elementType ); return ( ListAttribute<? super X, E> ) attribute; } private <E> void checkListElementType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<E> elementType) { checkTypeForPluralAttributes( "ListAttribute", attribute, name, elementType, PluralAttribute.CollectionType.LIST ); } /** * {@inheritDoc} */ public <E> ListAttribute<X, E> getDeclaredList(String name, Class<E> elementType) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); checkListElementType( attribute, name, elementType ); return ( ListAttribute<X, E> ) attribute; } @SuppressWarnings({ "unchecked" }) public <K, V> MapAttribute<? super X, K, V> getMap(String name, Class<K> keyType, Class<V> valueType) { PluralAttribute<? super X, ?, ?> attribute = getPluralAttribute( name ); if ( attribute == null && getSupertype() != null ) { attribute = getSupertype().getPluralAttribute( name ); } checkMapValueType( attribute, name, valueType ); final MapAttribute<? super X, K, V> mapAttribute = ( MapAttribute<? super X, K, V> ) attribute; checkMapKeyType( mapAttribute, name, keyType ); return mapAttribute; } private <V> void checkMapValueType(PluralAttribute<? super X, ?, ?> attribute, String name, Class<V> valueType) { checkTypeForPluralAttributes( "MapAttribute", attribute, name, valueType, PluralAttribute.CollectionType.MAP); } private <K,V> void checkMapKeyType(MapAttribute<? super X, K, V> mapAttribute, String name, Class<K> keyType) { if ( mapAttribute.getKeyJavaType() != keyType ) { throw new IllegalArgumentException( "MapAttribute named " + name + " does not support a key of type " + keyType ); } } public <K, V> MapAttribute<X, K, V> getDeclaredMap(String name, Class<K> keyType, Class<V> valueType) { final PluralAttribute<X,?,?> attribute = declaredPluralAttributes.get( name ); checkMapValueType( attribute, name, valueType ); final MapAttribute<X, K, V> mapAttribute = ( MapAttribute<X, K, V> ) attribute; checkMapKeyType( mapAttribute, name, keyType ); return mapAttribute; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.dbcp2; import java.io.PrintWriter; import java.sql.Connection; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.NoSuchElementException; import java.util.logging.Logger; import javax.sql.DataSource; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.commons.pool2.ObjectPool; import org.apache.commons.pool2.impl.GenericObjectPool; /** * A simple {@link DataSource} implementation that obtains * {@link Connection}s from the specified {@link ObjectPool}. * * @param <C> The connection type * * @author Rodney Waldhoff * @author Glenn L. Nielsen * @author James House * @author Dirk Verbeeck * @version $Id: PoolingDataSource.java 1677110 2015-05-01 11:15:22Z sebb $ * @since 2.0 */ public class PoolingDataSource<C extends Connection> implements DataSource, AutoCloseable { private static final Log log = LogFactory.getLog(PoolingDataSource.class); /** Controls access to the underlying connection */ private boolean accessToUnderlyingConnectionAllowed = false; public PoolingDataSource(ObjectPool<C> pool) { if (null == pool) { throw new NullPointerException("Pool must not be null."); } _pool = pool; // Verify that _pool's factory refers back to it. If not, log a warning and try to fix. if (_pool instanceof GenericObjectPool<?>) { PoolableConnectionFactory pcf = (PoolableConnectionFactory) ((GenericObjectPool<?>) _pool).getFactory(); if (pcf == null) { throw new NullPointerException("PoolableConnectionFactory must not be null."); } if (pcf.getPool() != _pool) { log.warn(Utils.getMessage("poolingDataSource.factoryConfig")); @SuppressWarnings("unchecked") // PCF must have a pool of PCs ObjectPool<PoolableConnection> p = (ObjectPool<PoolableConnection>) _pool; pcf.setPool(p); } } } /** * Close and free all {@link Connection}s from the pool. * @since 2.1 */ @Override public void close() throws Exception { try { _pool.close(); } catch(RuntimeException rte) { throw new RuntimeException(Utils.getMessage("pool.close.fail"), rte); } catch(Exception e) { throw new SQLException(Utils.getMessage("pool.close.fail"), e); } } /** * Returns the value of the accessToUnderlyingConnectionAllowed property. * * @return true if access to the underlying {@link Connection} is allowed, false otherwise. */ public boolean isAccessToUnderlyingConnectionAllowed() { return this.accessToUnderlyingConnectionAllowed; } /** * Sets the value of the accessToUnderlyingConnectionAllowed property. * It controls if the PoolGuard allows access to the underlying connection. * (Default: false) * * @param allow Access to the underlying connection is granted when true. */ public void setAccessToUnderlyingConnectionAllowed(boolean allow) { this.accessToUnderlyingConnectionAllowed = allow; } /* JDBC_4_ANT_KEY_BEGIN */ @Override public boolean isWrapperFor(Class<?> iface) throws SQLException { return false; } @Override public <T> T unwrap(Class<T> iface) throws SQLException { throw new SQLException("PoolingDataSource is not a wrapper."); } /* JDBC_4_ANT_KEY_END */ @Override public Logger getParentLogger() throws SQLFeatureNotSupportedException { throw new SQLFeatureNotSupportedException(); } //--- DataSource methods ----------------------------------------- /** * Return a {@link java.sql.Connection} from my pool, * according to the contract specified by {@link ObjectPool#borrowObject}. */ @Override public Connection getConnection() throws SQLException { try { C conn = _pool.borrowObject(); if (conn == null) { return null; } return new PoolGuardConnectionWrapper<>(conn); } catch(SQLException e) { throw e; } catch(NoSuchElementException e) { throw new SQLException("Cannot get a connection, pool error " + e.getMessage(), e); } catch(RuntimeException e) { throw e; } catch(Exception e) { throw new SQLException("Cannot get a connection, general error", e); } } /** * Throws {@link UnsupportedOperationException} * @throws UnsupportedOperationException */ @Override public Connection getConnection(String uname, String passwd) throws SQLException { throw new UnsupportedOperationException(); } /** * Returns my log writer. * @return my log writer * @see DataSource#getLogWriter */ @Override public PrintWriter getLogWriter() { return _logWriter; } /** * Throws {@link UnsupportedOperationException}. * @throws UnsupportedOperationException As this * implementation does not support this feature. */ @Override public int getLoginTimeout() { throw new UnsupportedOperationException("Login timeout is not supported."); } /** * Throws {@link UnsupportedOperationException}. * @throws UnsupportedOperationException As this * implementation does not support this feature. */ @Override public void setLoginTimeout(int seconds) { throw new UnsupportedOperationException("Login timeout is not supported."); } /** * Sets my log writer. * @see DataSource#setLogWriter */ @Override public void setLogWriter(PrintWriter out) { _logWriter = out; } /** My log writer. */ private PrintWriter _logWriter = null; private final ObjectPool<C> _pool; protected ObjectPool<C> getPool() { return _pool; } /** * PoolGuardConnectionWrapper is a Connection wrapper that makes sure a * closed connection cannot be used anymore. * @since 2.0 */ private class PoolGuardConnectionWrapper<D extends Connection> extends DelegatingConnection<D> { PoolGuardConnectionWrapper(D delegate) { super(delegate); } /** * @see org.apache.commons.dbcp2.DelegatingConnection#getDelegate() */ @Override public D getDelegate() { if (isAccessToUnderlyingConnectionAllowed()) { return super.getDelegate(); } return null; } /** * @see org.apache.commons.dbcp2.DelegatingConnection#getInnermostDelegate() */ @Override public Connection getInnermostDelegate() { if (isAccessToUnderlyingConnectionAllowed()) { return super.getInnermostDelegate(); } return null; } @Override public void close() throws SQLException { if (getDelegateInternal() != null) { super.close(); super.setDelegate(null); } } @Override public boolean isClosed() throws SQLException { if (getDelegateInternal() == null) { return true; } return super.isClosed(); } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package rov.rasputin.Commander; import com.github.sarxos.webcam.Webcam; import com.github.sarxos.webcam.WebcamImageTransformer; import com.github.sarxos.webcam.WebcamPanel; import com.github.sarxos.webcam.ds.ipcam.*; import java.awt.BorderLayout; import java.awt.Color; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Font; import java.awt.Graphics2D; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.image.BufferedImage; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.util.logging.Level; import java.util.logging.Logger; import javax.swing.*; import org.jdesktop.layout.GroupLayout; import org.netbeans.lib.awtextra.AbsoluteConstraints; import org.netbeans.lib.awtextra.AbsoluteLayout; /** * * @author 15998 */ public class Display extends javax.swing.JFrame { private Worker worker; private Properties settings; private Webcam olga, alexei, maria; public PS3Controller controller; private void loadSettings() { Webcam.setDriver(new IpCamDriver()); try { settings = new Properties(); InputStream in = getClass().getResourceAsStream("settings.properties"); if(in == null) { System.out.println("ERROR"); } settings.load(in); in.close(); } catch(IOException ex) { Logger.getLogger(Display.class.getName()).log(Level.SEVERE, null, ex); } } private void createAndStartWorker() { worker = new Worker(this, settings); worker.start(); } private void createCam() { String user = settings.getProperty("camUser"); String pass = settings.getProperty("camPass"); IpCamAuth auth = new IpCamAuth(user, pass); IpCamDeviceRegistry.unregisterAll(); String format = "http://%s/videostream.cgi?loginuse=%s&loginpas=%s"; Dimension d = new Dimension(585, 390); try { IpCamDeviceRegistry.register("Olga", String.format(format, settings.getProperty("olgaAddr"), user, pass), IpCamMode.PUSH, auth).setResolution(d); IpCamDeviceRegistry.register("Alexei", String.format(format, settings.getProperty("alexeiAddr"), user, pass), IpCamMode.PUSH, auth).setResolution(d); IpCamDeviceRegistry.register("Maria", String.format(format, settings.getProperty("mariaAddr"), user, pass), IpCamMode.PUSH, auth).setResolution(d); } catch(MalformedURLException ex) { Logger.getLogger(Display.class.getName()).log(Level.SEVERE, null, ex); } } private void connectCam() { olga = Webcam.getWebcams().get(0); olga.setImageTransformer(new WebcamImageTransformer() { @Override public BufferedImage transform(BufferedImage image) { int w = image.getWidth(); int h = image.getHeight(); BufferedImage modified = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB); Graphics2D g2 = modified.createGraphics(); g2.drawImage(image, w, h, -w, -h, null); g2.dispose(); modified.flush(); return modified; } }); WebcamPanel subOlga = new WebcamPanel(olga); subOlga.setFPSDisplayed(true); subOlga.setDisplayDebugInfo(true); subOlga.setFPSLimit(60); olgaPanel.removeAll(); olgaPanel.add(subOlga); olgaPanel.revalidate(); olgaPanel.repaint(); alexei = Webcam.getWebcams().get(1); WebcamPanel subAlexei = new WebcamPanel(alexei); subAlexei.setFPSDisplayed(true); subAlexei.setDisplayDebugInfo(true); subAlexei.setFPSLimit(60); alexeiPanel.removeAll(); alexeiPanel.add(subAlexei); alexeiPanel.revalidate(); alexeiPanel.repaint(); maria = Webcam.getWebcams().get(2); WebcamPanel subMaria = new WebcamPanel(maria); subMaria.setFPSDisplayed(true); subMaria.setDisplayDebugInfo(true); subMaria.setFPSLimit(60); mariaPanel.removeAll(); mariaPanel.add(subMaria); mariaPanel.revalidate(); mariaPanel.repaint(); pack(); } private void disconnectCam() { olga.close(); alexei.close(); maria.close(); } /** * Creates new form Interface */ public Display() { loadSettings(); createCam(); initComponents(); //setExtendedState(JFrame.MAXIMIZED_BOTH); getContentPane().setBackground(Color.black); createAndStartWorker(); connectCam(); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { mariaPanel = new JPanel(); alexeiPanel = new JPanel(); olgaPanel = new JPanel(); meterset = new JPanel(); stabilizationStateLabel = new JLabel(); clawStateLabel = new JLabel(); rasputinStateLabel = new JLabel(); reconnectCamBtn = new JButton(); depthLabel = new JLabel(); reconnectControllerBtn = new JButton(); quitBtn = new JButton(); zeroButton = new JButton(); setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setTitle("ROV Rasputin Commander"); setBackground(new Color(0, 0, 0)); setCursor(new Cursor(Cursor.DEFAULT_CURSOR)); setMinimumSize(new Dimension(1200, 780)); setName("rovCommander"); // NOI18N setUndecorated(true); getContentPane().setLayout(new AbsoluteLayout()); mariaPanel.setBackground(new Color(0, 0, 255)); mariaPanel.setAlignmentX(0.0F); mariaPanel.setAlignmentY(0.0F); mariaPanel.setMaximumSize(new Dimension(585, 390)); mariaPanel.setMinimumSize(new Dimension(585, 390)); mariaPanel.setPreferredSize(new Dimension(585, 390)); mariaPanel.setLayout(new BorderLayout()); getContentPane().add(mariaPanel, new AbsoluteConstraints(0, 390, -1, -1)); alexeiPanel.setBackground(new Color(0, 0, 255)); alexeiPanel.setMaximumSize(new Dimension(585, 390)); alexeiPanel.setMinimumSize(new Dimension(585, 390)); alexeiPanel.setName(""); // NOI18N alexeiPanel.setPreferredSize(new Dimension(585, 390)); alexeiPanel.setLayout(new BorderLayout()); getContentPane().add(alexeiPanel, new AbsoluteConstraints(0, 0, -1, -1)); olgaPanel.setBackground(new Color(0, 0, 255)); olgaPanel.setMaximumSize(new Dimension(585, 390)); olgaPanel.setMinimumSize(new Dimension(585, 390)); olgaPanel.setPreferredSize(new Dimension(585, 390)); olgaPanel.setLayout(new BorderLayout()); getContentPane().add(olgaPanel, new AbsoluteConstraints(585, 0, -1, -1)); meterset.setBackground(new Color(0, 0, 0)); meterset.setMaximumSize(new Dimension(380, 380)); meterset.setMinimumSize(new Dimension(380, 380)); GroupLayout metersetLayout = new GroupLayout(meterset); meterset.setLayout(metersetLayout); metersetLayout.setHorizontalGroup(metersetLayout.createParallelGroup(GroupLayout.LEADING) .add(0, 380, Short.MAX_VALUE) ); metersetLayout.setVerticalGroup(metersetLayout.createParallelGroup(GroupLayout.LEADING) .add(0, 380, Short.MAX_VALUE) ); getContentPane().add(meterset, new AbsoluteConstraints(590, 395, 380, 380)); stabilizationStateLabel.setFont(stabilizationStateLabel.getFont().deriveFont(stabilizationStateLabel.getFont().getStyle() | Font.BOLD, stabilizationStateLabel.getFont().getSize()+4)); stabilizationStateLabel.setForeground(new Color(255, 255, 255)); stabilizationStateLabel.setText("Stabilization State"); getContentPane().add(stabilizationStateLabel, new AbsoluteConstraints(1000, 410, -1, -1)); clawStateLabel.setFont(clawStateLabel.getFont().deriveFont(clawStateLabel.getFont().getStyle() | Font.BOLD, clawStateLabel.getFont().getSize()+4)); clawStateLabel.setForeground(new Color(255, 255, 255)); clawStateLabel.setText("Claw State"); getContentPane().add(clawStateLabel, new AbsoluteConstraints(1000, 440, -1, -1)); rasputinStateLabel.setFont(rasputinStateLabel.getFont().deriveFont(rasputinStateLabel.getFont().getStyle() | Font.BOLD, rasputinStateLabel.getFont().getSize()+4)); rasputinStateLabel.setForeground(new Color(255, 255, 255)); rasputinStateLabel.setText("Rasputin State"); getContentPane().add(rasputinStateLabel, new AbsoluteConstraints(1000, 470, -1, -1)); reconnectCamBtn.setText("Force Camera Reconnect"); reconnectCamBtn.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { reconnectCamBtnActionPerformed(evt); } }); getContentPane().add(reconnectCamBtn, new AbsoluteConstraints(990, 730, 190, 40)); depthLabel.setFont(depthLabel.getFont().deriveFont(depthLabel.getFont().getStyle() | Font.BOLD, depthLabel.getFont().getSize()+4)); depthLabel.setForeground(new Color(0, 255, 0)); depthLabel.setText("Depth"); getContentPane().add(depthLabel, new AbsoluteConstraints(1000, 500, -1, -1)); reconnectControllerBtn.setText("Force Controller Reconnect"); reconnectControllerBtn.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { reconnectControllerBtnActionPerformed(evt); } }); getContentPane().add(reconnectControllerBtn, new AbsoluteConstraints(990, 680, 190, 40)); quitBtn.setText("Exit Program"); quitBtn.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { quitBtnActionPerformed(evt); } }); getContentPane().add(quitBtn, new AbsoluteConstraints(990, 630, 190, 40)); zeroButton.setText("Zero Depth"); zeroButton.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent evt) { zeroButtonActionPerformed(evt); } }); getContentPane().add(zeroButton, new AbsoluteConstraints(990, 580, 190, 40)); pack(); }// </editor-fold>//GEN-END:initComponents private void reconnectCamBtnActionPerformed(ActionEvent evt)//GEN-FIRST:event_reconnectCamBtnActionPerformed {//GEN-HEADEREND:event_reconnectCamBtnActionPerformed disconnectCam(); connectCam(); }//GEN-LAST:event_reconnectCamBtnActionPerformed private void reconnectControllerBtnActionPerformed(ActionEvent evt)//GEN-FIRST:event_reconnectControllerBtnActionPerformed {//GEN-HEADEREND:event_reconnectControllerBtnActionPerformed controller = new PS3Controller(); controller.poll(); }//GEN-LAST:event_reconnectControllerBtnActionPerformed private void quitBtnActionPerformed(ActionEvent evt)//GEN-FIRST:event_quitBtnActionPerformed {//GEN-HEADEREND:event_quitBtnActionPerformed disconnectCam(); System.exit(0); }//GEN-LAST:event_quitBtnActionPerformed private void zeroButtonActionPerformed(ActionEvent evt) {//GEN-FIRST:event_zeroButtonActionPerformed // TODO add your handling code here: worker.depthOffset = worker.depth; }//GEN-LAST:event_zeroButtonActionPerformed /** * @param args the command line arguments */ public static void main(String args[]) { /* Set the system look and feel */ //<editor-fold defaultstate="collapsed" desc=" Look and feel setting code (optional) "> /* If Nimbus (introduced in Java SE 6) is not available, stay with the default look and feel. * For details see http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.html */ try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch(ClassNotFoundException | InstantiationException | IllegalAccessException | javax.swing.UnsupportedLookAndFeelException ex) { java.util.logging.Logger.getLogger(Display.class.getName()).log(java.util.logging.Level.SEVERE, null, ex); } //</editor-fold> /* Create and display the form */ java.awt.EventQueue.invokeLater(() -> { new Display().setVisible(true); }); } // Variables declaration - do not modify//GEN-BEGIN:variables private JPanel alexeiPanel; public JLabel clawStateLabel; public JLabel depthLabel; private JPanel mariaPanel; public JPanel meterset; private JPanel olgaPanel; private JButton quitBtn; public JLabel rasputinStateLabel; private JButton reconnectCamBtn; private JButton reconnectControllerBtn; public JLabel stabilizationStateLabel; private JButton zeroButton; // End of variables declaration//GEN-END:variables }
/* * Copyright 2015-2017 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.hawkular.agent.javaagent.config; import java.io.File; // Below is a copy of org.jboss.security.util.StringPropertyReplacer with some minor alterations // and the addditional feature of supporting environment variables via ${env.name:default} /* * JBoss, Home of Professional Open Source * Copyright 2005, JBoss Inc., and individual contributors as indicated * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ import java.util.Properties; /** * A utility class for replacing properties in strings. * * @author <a href="mailto:jason@planet57.com">Jason Dillon</a> * @author <a href="Scott.Stark@jboss.org">Scott Stark</a> * @author <a href="claudio.vesco@previnet.it">Claudio Vesco</a> * @author <a href="mailto:adrian@jboss.com">Adrian Brock</a> * @author <a href="mailto:dimitris@jboss.org">Dimitris Andreadis</a> * @version <tt>$Revision$</tt> */ public final class StringPropertyReplacer { /** New line string constant */ public static final String NEWLINE = System.lineSeparator(); /** File separator value */ private static final String FILE_SEPARATOR = File.separator; /** Path separator value */ private static final String PATH_SEPARATOR = File.pathSeparator; /** File separator alias */ private static final String FILE_SEPARATOR_ALIAS = "/"; /** Path separator alias */ private static final String PATH_SEPARATOR_ALIAS = ":"; // States used in property parsing private static final int NORMAL = 0; private static final int SEEN_DOLLAR = 1; private static final int IN_BRACKET = 2; /** * Go through the input string and replace any occurance of ${p} with * the System.getProperty(p) value. If there is no such property p defined, * then the ${p} reference will remain unchanged. * * If the property reference is of the form ${p:v} and there is no such property p, * then the default value v will be returned. * * If the property reference is of the form ${p1,p2} or ${p1,p2:v} then * the primary and the secondary properties will be tried in turn, before * returning either the unchanged input, or the default value. * * The property ${/} is replaced with System.getProperty("file.separator") * value and the property ${:} is replaced with System.getProperty("path.separator"). * * @param string - the string with possible ${} references * @return the input string with all property references replaced if any. * If there are no valid references the input string will be returned. */ public static String replaceProperties(final String string) { return replaceProperties(string, null); } /** * Go through the input string and replace any occurance of ${p} with * the props.getProperty(p) value. If there is no such property p defined, * then the ${p} reference will remain unchanged. * * If the property reference is of the form ${p:v} and there is no such property p, * then the default value v will be returned. * * If the property reference is of the form ${p1,p2} or ${p1,p2:v} then * the primary and the secondary properties will be tried in turn, before * returning either the unchanged input, or the default value. * * The property ${/} is replaced with System.getProperty("file.separator") * value and the property ${:} is replaced with System.getProperty("path.separator"). * * @param string - the string with possible ${} references * @param props - the source for ${x} property ref values, null means use System.getProperty() * @return the input string with all property references replaced if any. * If there are no valid references the input string will be returned. */ public static String replaceProperties(final String string, final Properties props) { if (string == null || string.isEmpty()) { return string; } final char[] chars = string.toCharArray(); StringBuffer buffer = new StringBuffer(); boolean properties = false; int state = NORMAL; int start = 0; for (int i = 0; i < chars.length; ++i) { char c = chars[i]; // Dollar sign outside brackets if (c == '$' && state != IN_BRACKET) { state = SEEN_DOLLAR; } // Open bracket immediatley after dollar else if (c == '{' && state == SEEN_DOLLAR) { buffer.append(string.substring(start, i - 1)); state = IN_BRACKET; start = i - 1; } // No open bracket after dollar else if (state == SEEN_DOLLAR) { state = NORMAL; } // Closed bracket after open bracket else if (c == '}' && state == IN_BRACKET) { // No content if (start + 2 == i) { buffer.append("${}"); // REVIEW: Correct? } else // Collect the system property { String value = null; String key = string.substring(start + 2, i); // check for alias if (FILE_SEPARATOR_ALIAS.equals(key)) { value = FILE_SEPARATOR; } else if (PATH_SEPARATOR_ALIAS.equals(key)) { value = PATH_SEPARATOR; } else { // check from the properties value = getReplacementString(key, props); if (value == null) { // Check for a default value ${key:default} int colon = key.indexOf(':'); if (colon > 0) { String realKey = key.substring(0, colon); value = getReplacementString(realKey, props); if (value == null) { // Check for a composite key, "key1,key2" value = resolveCompositeKey(realKey, props); // Not a composite key either, use the specified default if (value == null) { value = key.substring(colon + 1); } } } else { // No default, check for a composite key, "key1,key2" value = resolveCompositeKey(key, props); } } } if (value != null) { properties = true; buffer.append(value); } else { buffer.append("${"); buffer.append(key); buffer.append('}'); } } start = i + 1; state = NORMAL; } } // No properties if (properties == false) { return string; } // Collect the trailing characters if (start != chars.length) { buffer.append(string.substring(start, chars.length)); } // Done return buffer.toString(); } /** * Try to resolve a "key" from the provided properties by * checking if it is actually a "key1,key2", in which case * try first "key1", then "key2". If all fails, return null. * * It also accepts "key1," and ",key2". * * @param key the key to resolve * @param props the properties to use * @return the resolved key or null */ private static String resolveCompositeKey(String key, Properties props) { String value = null; // Look for the comma int comma = key.indexOf(','); if (comma > -1) { // If we have a first part, try resolve it if (comma > 0) { // Check the first part String key1 = key.substring(0, comma); value = getReplacementString(key1, props); } // Check the second part, if there is one and first lookup failed if (value == null && comma < key.length() - 1) { String key2 = key.substring(comma + 1); value = getReplacementString(key2, props); } } // Return whatever we've found or null return value; } private static String getReplacementString(String key, Properties props) { String value; // If the key starts with "env." the value is obtained from an environment variable or null if not defined. // If the key starts with "<set>" the value is "true" if the named system property exists; "false" otherwise. // If the key starts with "<set>env." the value is "true" if the named env var exists; "false" otherwise. // If the key starts with "<notset>" the value is "false" if the named system property exists; "true" otherwise. // If the key starts with "<notset>env." the value is "false" if the named env var exists; "true" otherwise. // Otherwise, the value is obtained from a system property, or null if not defined. final String envPrefix = "env."; final String setPrefix = "<set>"; final String notsetPrefix = "<notset>"; if (key.startsWith(envPrefix)) { key = key.substring(envPrefix.length()); value = System.getenv(key); } else if (key.startsWith(setPrefix)) { if (key.contains(",") || key.contains(":")) { throw new IllegalArgumentException( "'<set>' expressions always resolve to a value. " + "Specifying a composite key or a default value is invalid: " + key); } key = key.substring(setPrefix.length()); if (key.startsWith(envPrefix)) { key = key.substring(envPrefix.length()); value = Boolean.valueOf(System.getenv(key) != null).toString(); } else { value = Boolean.valueOf(System.getProperty(key) != null).toString(); } } else if (key.startsWith(notsetPrefix)) { if (key.contains(",") || key.contains(":")) { throw new IllegalArgumentException( "<notset>' expressions always resolve to a value. " + "Specifying a composite key or a default value is invalid: " + key); } key = key.substring(notsetPrefix.length()); if (key.startsWith(envPrefix)) { key = key.substring(envPrefix.length()); value = Boolean.valueOf(System.getenv(key) == null).toString(); } else { value = Boolean.valueOf(System.getProperty(key) == null).toString(); } } else { if (props != null) { value = props.getProperty(key); } else { value = System.getProperty(key); } } return value; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.drill.exec.server.rest; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import org.apache.drill.shaded.guava.com.google.common.base.Preconditions; import org.apache.drill.shaded.guava.com.google.common.collect.Sets; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.work.WorkManager; import org.glassfish.jersey.server.mvc.Viewable; import org.joda.time.DateTime; import org.joda.time.format.DateTimeFormat; import org.joda.time.format.DateTimeFormatter; import javax.annotation.security.RolesAllowed; import javax.inject.Inject; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.SecurityContext; import javax.xml.bind.annotation.XmlRootElement; import java.io.BufferedReader; import java.io.File; import java.io.FileFilter; import java.io.FileReader; import java.io.IOException; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import static org.apache.drill.exec.server.rest.auth.DrillUserPrincipal.ADMIN_ROLE; @Path("/") @RolesAllowed(ADMIN_ROLE) public class LogsResources { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(LogsResources.class); @Inject DrillRestServer.UserAuthEnabled authEnabled; @Inject SecurityContext sc; @Inject WorkManager work; private static final FileFilter file_filter = new FileFilter() { @Override public boolean accept(File file) { return file.isFile(); } }; private static final DateTimeFormatter format = DateTimeFormat.forPattern("MM/dd/yyyy HH:mm:ss"); @GET @Path("/logs") @Produces(MediaType.TEXT_HTML) public Viewable getLogs() { Set<Log> logs = getLogsJSON(); return ViewableWithPermissions.create(authEnabled.get(), "/rest/logs/list.ftl", sc, logs); } @GET @Path("/logs.json") @Produces(MediaType.APPLICATION_JSON) public Set<Log> getLogsJSON() { Set<Log> logs = Sets.newTreeSet(); File[] files = getLogFolder().listFiles(file_filter); for (File file : files) { logs.add(new Log(file.getName(), file.length(), file.lastModified())); } return logs; } @GET @Path("/log/{name}/content") @Produces(MediaType.TEXT_HTML) public Viewable getLog(@PathParam("name") String name) throws IOException { try { LogContent content = getLogJSON(name); return ViewableWithPermissions.create(authEnabled.get(), "/rest/logs/log.ftl", sc, content); } catch (Exception | Error e) { logger.error("Exception was thrown when fetching log {} :\n{}", name, e); return ViewableWithPermissions.create(authEnabled.get(), "/rest/errorMessage.ftl", sc, e); } } @GET @Path("/log/{name}/content.json") @Produces(MediaType.APPLICATION_JSON) public LogContent getLogJSON(@PathParam("name") final String name) throws IOException { File file = getFileByName(getLogFolder(), name); final int maxLines = work.getContext().getOptionManager().getOption(ExecConstants.WEB_LOGS_MAX_LINES).num_val.intValue(); try (BufferedReader br = new BufferedReader(new FileReader(file))) { @SuppressWarnings("serial") Map<Integer, String> cache = new LinkedHashMap<Integer, String>(maxLines, .75f, true) { @Override protected boolean removeEldestEntry(Map.Entry<Integer, String> eldest) { return size() > maxLines; } }; String line; int i = 0; while ((line = br.readLine()) != null) { cache.put(i++, line); } return new LogContent(file.getName(), cache.values(), maxLines); } } @GET @Path("/log/{name}/download") @Produces(MediaType.TEXT_PLAIN) public Response getFullLog(@PathParam("name") final String name) { File file = getFileByName(getLogFolder(), name); return Response.ok(file) .header(HttpHeaders.CONTENT_DISPOSITION, String.format("attachment;filename=\"%s\"", name)) .build(); } private File getLogFolder() { return new File(Preconditions.checkNotNull(System.getenv("DRILL_LOG_DIR"), "DRILL_LOG_DIR variable is not set")); } private File getFileByName(File folder, final String name) { File[] files = folder.listFiles((dir, fileName) -> fileName.equals(name)); if (files.length == 0) { throw new DrillRuntimeException (name + " doesn't exist"); } return files[0]; } @XmlRootElement public class Log implements Comparable<Log> { private String name; private long size; private DateTime lastModified; @JsonCreator public Log (@JsonProperty("name") String name, @JsonProperty("size") long size, @JsonProperty("lastModified") long lastModified) { this.name = name; this.size = size; this.lastModified = new DateTime(lastModified); } public String getName() { return name; } public String getSize() { return Math.ceil(size / 1024d) + " KB"; } public String getLastModified() { return lastModified.toString(format); } @Override public int compareTo(Log log) { return this.getName().compareTo(log.getName()); } } @XmlRootElement public class LogContent { private String name; private Collection<String> lines; private int maxLines; @JsonCreator public LogContent (@JsonProperty("name") String name, @JsonProperty("lines") Collection<String> lines, @JsonProperty("maxLines") int maxLines) { this.name = name; this.lines = lines; this.maxLines = maxLines; } public String getName() { return name; } public Collection<String> getLines() { return lines; } public int getMaxLines() { return maxLines; } } }
package ri.app.rtalk; import java.awt.AWTEvent; import java.awt.BorderLayout; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.GridLayout; import java.awt.Point; import java.awt.Toolkit; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.WindowEvent; import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.InputStreamReader; import java.net.URL; import javax.swing.JCheckBoxMenuItem; import javax.swing.JDialog; import javax.swing.JFrame; import javax.swing.JMenu; import javax.swing.JMenuBar; import javax.swing.JMenuItem; import javax.swing.JPanel; import javax.swing.JScrollPane; import org.eclipse.jetty.server.Handler; import org.eclipse.jetty.server.Server; import org.eclipse.jetty.server.handler.ContextHandlerCollection; import org.eclipse.jetty.servlet.ServletContextHandler; import ri.app.web.StatusPanel; import ri.core.guru.GuruAccess; import ri.core.guru.GuruHelper; import ri.core.guru.GuruMgr; import ri.core.guru.sm.GuruSystemMessage; import ri.core.guru.smrouter.SmRouter; import ri.core.panel.AboutBox; import ri.core.panel.Emit; import ri.core.panel.MenuPromptDialog; import ri.core.riri.RiriApp; import ri.core.rtpanel.SmLauncher; import ri.core.rtpanel.WhiteboardApp; import ri.core.util.Csv; import ri.core.util.RiLookAndFeel; import ri.core.web.servlet.GuruConnection; import ri.core.web.servlet.ServletGuruSm; import ri.core.web.servlet.ServletRawSm; import ri.core.web.servlet.ServletSm7; import ri.tool.dbbridge.DatabaseSmBridge; import ri.tool.guru.QuickQueryDialog; import ri.tool.guru.oleo.Oleo; import ri.tool.guru.stdfclean.StdfCleanup; import ri.tool.misc.SimTestExec; import ri.tool.smbridge.BridgePanel; import ri.tool.smbridge.SmEmit; /**RtServer * * Command line for TestSm (mac os version): * TestSm /Library/Java/JavaVirtualMachines/1.7.0u-dev.jdk/Contents/Home/bin/java -agentpath:javaDebug.dylib -Xbootclasspath/a:TestSM.jar ri/experiment2/TestSm rtDebug guruIp=192.168.56.15 rtBoot=GAKRE8CABJADXV00 rtLaunch=RtTestCases:benchmark1 * * Notes on building the Guru application: * -If using RtalkBase.jar: since that jar file ends up with most of the classes needed from RiApps and RiApps7 * -If, instead, just including all the ri.core.rtalk files in RtServer, then just load all * in the bootClasspath (for now): * java -Xbootclasspath/a:RtServer.jar;jetty-all-7.5.4.v20111024.jar;servlet-api-2.5-20081211.jar;json_simple-1.1.jar -agentlib:JvmtiDebug ri.app.web.RtServer -rtBoot GAKRE8CABJADXV00 * * This provides the following functions: * SM Router, SM Bridge, Textmode Bridge, Whiteboard Launcher, * Database-SM bridge, Local Guru Cache, Console Log, Rtalk, Web Server * * RiBridge * |_SM Router * |_Oleo (Remote Guru) * |_SM Bridge * |_TextMode Bridge * |_Whiteboard Launcher * |_Database-SM Bridge * |_STDF Cleanup tool * |_Local Guru Cache * |_Console Log * |_Rtalk (requires java 1.7+) * |_Web server * |_SM servlet (websocket or http) * |_rawSM servlet (websocket only) * |_GuruQuery servlet * |_GuruFetch servlet * |_GuruRiri servlet * |_FileFetch servlet * |_Echo servlet (websocket only) * * ------------------------------------------------------------------------------------------ * Documentation for the webserver portion: * Powered by Jetty. This provides servlets for interacting with Guru over http: * This version (as opposed to GuruWebServer) allows use of HTML 5 including web sockets. * Requires Java 1.5+ and Jetty 7.5+. * * 1. /guru/query?... * 2. /guru/riri?... * 3. /guru/sm?... * 4. Guru Fetch * 5. File Fetch * In addition, provides a GUI interface for controlling and monitoring the web server. * * Sample usage: * ------------------------------------------------------------------------ * * SM: * http://localhost:7501/guru/sm?%1cSM%1dSMDST=00000000%1dSMSRC=%1dSRC=%1cSomeChannel%1cIt is%1dJust%1dSo%1dHappenin * http://localhost:7501/guru/sm? * * Query: * http://localhost:7501/guru/query?exec&ri.sys.Title=Digital%20Translate * http://localhost:7501/guru/query?KA&ri.sys.Name=SampleOer2 * http://localhost:7501/guru/query?KV&ri.sys.Name=SampleOer&ri.sys.ObjClass,ri.sys.Title,ri.sys.Name * http://localhost:7501/guru/query?KR&ri.sys.Name=SampleOer&ri.sys.ObjClass,ri.sys.Title,ri.sys.Name * http://localhost:7501/guru/query?quit //terminate web server * Options: OR=Object Retrieve, KQ=Keys Query, KV=KeyValues, KR=KeyRange, KA=Object Attributes * * RIRI: * http://localhost:7501/guru/riri?GR%1dSCOPE=LOCAL%1c%1dri.sys.ObjClass=WebPage%1dlast * http://localhost:7501/guru/riri?KA%1dSCOPE=LOCAL%1c%1dri.sys.ObjClass=WebPage%1dri.sys.Name=SampleOer%1dlast * * Guru Fetch: * http://localhost:7501/product/review/SampleFileToFetch.html * ri.sys.FilePath^ ri.sys.Name^ ri.sys.FileExt^ * * Javascript: * http://localhost:7501/product/GservTest1.html * http://localhost:7501/GservTest2.html * * File Fetch: * http://localhost:7501/riapps/web/oer/sample.html //note: any file in /RiApps. * * ------------------------------------------------------------------------ * The following external jar files are required for this web server: * o jetty-all-7.3.1.v20110307.jar: Jetty server * o servlet-api-2.5-20081211 javax.servlet 2.5 * o commons-logging-1.1.1.jar: Apache Commons logging * Also: the servlets must be specifically included in the Jar file (since they go in by Reflection). * * ------------------------------------------------------------------------ * JarMaker build settings: * Dest Dir: C:\Pgms\workspace\GuruApplication\GM3EKE2A_RtServer * Output Jar: RtServer.jar * Input classNames: ri.app.web.RtServer, ri.core.web.servlet.ServletGuruQuery, ri.core.web.servlet.ServletGuruRiri, ri.core.web.servlet.ServletGuruFetch, ri.core.web.servlet.ServletFileFetch, ri.core.web.servlet.ServletGuruSm7, ri.core.web.servlet.ServletGuruRawSm, ri.core.web.servlet.ServletEcho * Excludes: java.,javax.,sun. * Classpath: C:\pgms\workspace\RiApps;C:\pgms\workspace\Jars\commons-logging-1.1.1.jar;jetty-all-7.5.4.v20111024.jar;C:\pgms\workspace\Jars\servlet-api-2.5-20081211.jar;C:\pgms\workspace\Jars\json_simple-1.1.jar * Addtl app.zip Files: jetty-all-7.5.4.v20111024.jar, servlet-api-2.5-20081211.jar * * ------------------------------------------------------------------------ * History: * -ver 9 OER 1/4/13: Updated SmLauncher for 'database' * -ver 8 OER 8/24/12: Fixed issues with PipeCLient auto reconnect * -ver 7 OER 8/20/12:Added auto-reconnect to pipeclient * -ver 6 7/8/12 OER: * o Disabled the web server functionality until such time as it may be needed. * o updated the way Rtalk is instantiated and command line parameters are passed downto it. * -ver 5 7/6/12 OER: * o Modified formatting of command line parameters. Was: "-param1 -param2 value...", Now: "param1 param2=value..." * o No longer parsing Rtalk cmd line params at main(), now these are passed down to rtalk itself * -ver 4 6/22/12 OER: * o Removed GuruLook (standalone application now) * -ver 3 11/30/11 OER: * o Synchronized with GuruWebServer and ensured they share the following (with websocket functionality * turned off for GuruWebServer): SM Router, Oleo, SM Bridge, TextMode Bridge, Whiteboard Launcher, * Database-SM Bridge, Local Guru Cache, Console Log, Web server * o added 'Status' view to show state of all above items * -ver 2 11/22/11 oer: * o Added MenuTools: SmEmit, SmLauncher, WbApp, GuruLook, SimTestExec * -Created 9/3/2011, OER. Based on ri.app.web.GuruWebServer version 5. * ------------------------------------------------------------------------ * */ public class RtServer extends JFrame implements RiriApp { private static final long serialVersionUID=1L; //eliminates compiler warning private static final String _appVersionNo="9"; private static boolean _webserverEnabled=true; //if this is false then the web server functionality is disabled (can be enabled/disabled at the command line) //private static final String DEFAULT_CMDLINE_ARGS = "rtDebug guruIp=192.168.56.15 rtBoot=GAKRE8CABJADXV00 rtLoad=GAKRE8CABJADXV00 rtLaunch=RtTestCases:benchmark1"; private static final String DEFAULT_CMDLINE_ARGS = "rtDebug rtBoot=GAKRE8CABJADXV00 rtLoad=GAKRE8CABJADXV00 rtLaunch=RtTestCases:benchmark1"; private static String _title = "RT Server"; //description for user, title, about box etc. private boolean _guiEnabled=true; //default is to provide a web server gui display private String _guruIp = "localhost"; //ip address where Guru Web Server makes its guru queries public int _guruPort = 50010; //port used to communicate to guru private int _httpPort=7501; //port used to communicate over http private Server _server = null; private JPanel _pnlBridges = new JPanel(new GridLayout(0,1,0,0)); //This is the panel containing all the current BridgePanels private SmRouter _smRouter = null; //view panel for SM Router activity //Settings related to Rtalk (non-null values override default settings): private RtalkForRtServer _rtalk = null; private boolean _disableLoadingOfRtalk=false; //default is to load rtalk (in any case only loads if java runtime supports it) private Boolean _doRtalk = null; //true if Rtalk is to be initialized (this gets set to false if the jvm does not provide the facilities for running rtalk) private String _noRtalkReason = null; //if non-null then this is a description of the reason for rtalk not being initialized private boolean _initialBrowserLaunch=false; //set to false to disable initial browser launch private SmLauncher _smLauncher = null; private static boolean _verboseServlets=false; private JCheckBoxMenuItem _menuViewVerbose=new JCheckBoxMenuItem("Verbose", _verboseServlets); //verbose servlets private QuickQueryDialog _queryDialog = null; private StdfCleanup _stdfCleanupTool = null; //Non webserver related fields: private SmEmit _smEmit = null; private RtServer _me = this; //convenience thing private String[] _cmdLineArgs=null; //this holds the command line args the program was started with (so can restart rtalk) /*Constr*/ public RtServer(String[] args, String title) throws Exception { super(); _cmdLineArgs = args; //save for Rtalk restarts decodeCmdlineArgs(args); //System.out.println("Guruip="+_guruIp+", guruPort="+_guruPort+", httpPort="+_httpPort); if(_guiEnabled) RiLookAndFeel.init(args); _title = title; setTitle(_title); GuruMgr guru; guru = GuruAccess.hookupAndRegister(this, _title, false); //initial hook up to Guru String connectString = null; if(_guruIp!=null && !_guruIp.toLowerCase().startsWith("local")) { connectString = _guruIp; if(_guruPort!=50000 && _guruPort!=50010) connectString += ":"+_guruPort; guru=GuruAccess.instance(connectString); if(guru!=null) Emit.out("Connected to guru at: "+connectString); else Emit.out("Unable to connect to guru at: "+connectString); } RtalkForRtServer.setConnectString(connectString); if(guru!=null) guru.setSuperUserMode(true); //bypasses normal guru permissions limitations okToRunRtalk(); //check environment to determine suitability for running Rtalk BridgePanel smBridge=null; //initial SM bridge if(_guiEnabled) { enableEvents(AWTEvent.WINDOW_EVENT_MASK); guiInit(); if(connectString==null) //i.e. if just using local guru smBridge = doAddBridge("localhost", null, false); else smBridge = doAddBridge(connectString, null, false); Dimension scrnSz = Toolkit.getDefaultToolkit().getScreenSize(); int wd = 800; int ht = 400; setSize(wd, ht); setLocation(scrnSz.width-wd-10, scrnSz.height-ht-100); //near the bottom right validate(); setVisible(true); setExtendedState(JFrame.ICONIFIED); //start out iconified. Note for OS2: do this AFTER setVisible to be able to see iconified, do it BEFORE to see nothing } setupWebserver(); //Start things up: _smRouter = new SmRouter(this, _title); //handles local routing of SM messages if(smBridge!=null) smBridge.doButn1Connect(); //connect up the bridge to localhost. This is at the end because it helps to delay this a bit, or else the Guru-bar button name can get messed up (you see the same name twice) _smLauncher = new SmLauncher(this,true); //just always have this running. It responds to channels "whiteboard", "database" (case insensitive) doServerStart(true); //get the server going doRestartRtalk(); //if conditions are ok for running rtalk then this initializes it if(_initialBrowserLaunch) doLaunch(); //initial web browser launch } /**Initialize gui related components*/ private void guiInit() { setupMenuBar(); getContentPane().setLayout(new BorderLayout()); //TODO: Change from Emit text area to JEditorPane and have it respond to hyperlinks (see example below) Otherwise just put a button or something to do it. Component emitComponent = Emit.checkoutTextArea(true); if(emitComponent!=null) { //if something else didn't already take it, we have it getContentPane().add(emitComponent, BorderLayout.CENTER); Emit.captureConsoleOut(true); //let the Emit Window capture console output } getContentPane().add(new JScrollPane(_pnlBridges), BorderLayout.SOUTH); } /**Example: This provides hyperlink functionality to entries in a JEditorPane*/ //class Hyperactive implements HyperlinkListener { // public void hyperlinkUpdate(HyperlinkEvent e) { // if(e.getEventType() != HyperlinkEvent.EventType.ACTIVATED) return; // JEditorPane pane=(JEditorPane)e.getSource(); // if(e instanceof HTMLFrameHyperlinkEvent) { // HTMLFrameHyperlinkEvent evt=(HTMLFrameHyperlinkEvent)e; // HTMLDocument doc=(HTMLDocument)pane.getDocument(); // doc.processHTMLFrameHyperlinkEvent(evt); // return; // } // //i.e. e not instanceof HTMLFrameHyperlinkEvent: // try { pane.setPage(e.getURL()); } // catch(Throwable t) { t.printStackTrace(); } // } //} /**Sets up the main window pull down menus*/ private void setupMenuBar() { JMenuBar menuBar=new JMenuBar(); JMenu menuFile=new JMenu("File"); JMenu menuView=new JMenu("View"); JMenu menuAction=new JMenu("Action"); JMenu menuTools=new JMenu("Tools"); JMenu menuHelp=new JMenu("Help"); JMenuItem menuFileExit=new JMenuItem("Exit"); JMenuItem menuViewStatus=new JMenuItem("Status"); JMenuItem menuViewSmRouter=new JMenuItem("SM Router"); JMenuItem menuViewConsole=new JMenuItem("Console"); JMenuItem menuActionLaunch=new JMenuItem("Launch"); JMenuItem menuActionStartWebserver=new JMenuItem("Start (Restart) Webserver"); JMenuItem menuActionStopWebserver=new JMenuItem("Stop Webserver"); JMenuItem menuActionRestartRtalk=new JMenuItem("Restart Rtalk"); JMenuItem menuActionAddSmBridge=new JMenuItem("Add an SM Bridge"); JMenuItem menuActionRemoveSmBridge=new JMenuItem("Remove an SM Bridge"); JMenuItem menuToolsOleo=new JMenuItem("Oleo (Remote Guru)"); JMenuItem menuToolsDbBridge=new JMenuItem("dbase-sm bridge"); JMenuItem menuToolsSmLauncher=new JMenuItem("SM Launcher"); JMenuItem menuToolsWhitebdApp=new JMenuItem("Whiteboard App"); JMenuItem menuToolsSmEmit=new JMenuItem("SM Emit"); JMenuItem menuToolsSimTestExec=new JMenuItem("Simulated TestExec"); JMenuItem menuToolsStdfCleanup=new JMenuItem("STDF Cleanup Tool"); JMenuItem menuToolsQuickQuery=new JMenuItem("Quick Query"); JMenuItem menuHelpAbout=new JMenuItem("About"); setJMenuBar(menuBar); menuBar.add(menuFile); menuBar.add(menuView); menuBar.add(menuAction); menuBar.add(menuTools); menuBar.add(menuHelp); menuFile.add(menuFileExit); menuView.add(menuViewStatus); menuView.add(menuViewSmRouter); menuView.add(menuViewConsole); menuView.add(_menuViewVerbose); if(_webserverEnabled) { //if the web server functionality is enabled menuAction.add(menuActionLaunch); menuAction.add(menuActionStartWebserver); menuAction.add(menuActionStopWebserver); } menuAction.add(menuActionRestartRtalk); menuAction.add(menuActionAddSmBridge); menuAction.add(menuActionRemoveSmBridge); menuTools.add(menuToolsOleo); menuTools.add(menuToolsDbBridge); menuTools.add(menuToolsSmLauncher); menuTools.addSeparator(); menuTools.add(menuToolsWhitebdApp); menuTools.add(menuToolsSmEmit); menuTools.add(menuToolsSimTestExec); menuTools.add(menuToolsStdfCleanup); menuTools.add(menuToolsQuickQuery); menuHelp.add(menuHelpAbout); if(!okToRunRtalk()) menuActionRestartRtalk.setEnabled(false); menuFileExit.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doExit(); } }); menuViewStatus.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doShowStatusDialog(); } }); menuViewSmRouter.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { _smRouter.setVisible(true); } }); menuViewConsole.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { RtServerConsole.instance().setVisible(true); } }); _menuViewVerbose.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { _verboseServlets=_menuViewVerbose.isSelected(); } }); menuActionLaunch.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doLaunch(); } }); menuActionStartWebserver.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doServerStart(true); } }); menuActionStopWebserver.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doServerStop(true); } }); menuActionRestartRtalk.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doRestartRtalk(); } }); menuActionAddSmBridge.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doAddBridge("localhost", null, true); } }); menuActionRemoveSmBridge.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doRemoveBridge(); } }); menuToolsSmEmit.addActionListener(new ActionListener() { //SM Emit tool public void actionPerformed(ActionEvent e) { if(_smEmit==null) _smEmit = new SmEmit(true); _smEmit.setVisible(true); } }); menuToolsSmLauncher.addActionListener(new ActionListener() { //RT Launcher (SM based RT application launcher) public void actionPerformed(ActionEvent e) { if(_smLauncher==null) _smLauncher = new SmLauncher(this,true); _smLauncher.setVisible(true); } }); menuToolsOleo.addActionListener(new ActionListener() { //Oleo (remote guru) public void actionPerformed(ActionEvent e) { new Oleo(this).setVisible(true); } }); menuToolsWhitebdApp.addActionListener(new ActionListener() { //Rtalk Application public void actionPerformed(ActionEvent e) { new WhiteboardApp(false, null, null, false); } }); menuToolsDbBridge.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { new DatabaseSmBridge(this); } }); menuToolsSimTestExec.addActionListener(new ActionListener() { //Sim. Testexec (for debug) public void actionPerformed(ActionEvent e) { new SimTestExec(this); } }); menuToolsStdfCleanup.addActionListener(new ActionListener() { //STDF Cleanup Tool public void actionPerformed(ActionEvent e) { if(_stdfCleanupTool==null) _stdfCleanupTool = new StdfCleanup(this); else _stdfCleanupTool.setVisible(true); } }); menuToolsQuickQuery.addActionListener(new ActionListener() { //Quick Query for Guru public void actionPerformed(ActionEvent e) { if(_queryDialog==null) _queryDialog = new QuickQueryDialog(_me); _queryDialog.setVisible(true); } }); menuHelpAbout.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { doHelpAbout(); } }); } private void doShowStatusDialog() { JDialog d = new JDialog(this, "Status", false); Container cp = d.getContentPane(); cp.setLayout(new BorderLayout()); cp.add(new StatusPanel(true, okToRunRtalk(), _webserverEnabled), BorderLayout.CENTER); d.pack(); d.setLocationRelativeTo(this); d.setVisible(true); } /**Add a another SM bridge to the list. * Returns the created bridge. * Does not do any 'connect'*/ private BridgePanel doAddBridge(String ip1, String ip2, boolean doPack) { BridgePanel bp = new BridgePanel(null, ip1, true, false, null, ip2, true, false); _pnlBridges.add(bp); if(doPack) specialPack(); return bp; } /**Disconnect and remove an SM bridge from the list*/ private void doRemoveBridge() { int cnt = _pnlBridges.getComponentCount(); if(cnt==0) return; //nothing to do Object choice=null; if(cnt==1) choice="1"; //if there is only one entry then already know which one to delete else if(cnt>1) { String[] values = new String[cnt]; for(int i=0; i<cnt; i++) values[i] = Integer.toString(i+1); MenuPromptDialog d = new MenuPromptDialog(this, "Remove Which Bridge?", values, -1, true); d.setVisible(true); choice = d.getSelectedValue(); } if(choice==null) return; //operation cancelled try { int which = Integer.parseInt(choice.toString())-1; if(which >=0) _pnlBridges.remove(which); _pnlBridges.invalidate(); specialPack(); } catch(Exception e) { Emit.out("*Nothing removed because "+e); } //debug } /**This is a 'pack()' that tries to keep the bottom of the Window in the same spot and all of window visible * Also, width is preserved.*/ private void specialPack() { Dimension oldSz = getSize(); //save current position and size Point oldLoc = getLocation(); pack(); Dimension scnSz = Toolkit.getDefaultToolkit().getScreenSize(); Dimension newSz = getSize(); newSz.width = oldSz.width; //preserve the old width (for now) int newY = oldLoc.y-(newSz.height-oldSz.height); if(newY<10) newY=10; int newX = oldLoc.x; if( (newX+newSz.width) > scnSz.width-10) newX = scnSz.width-newSz.width-10; if(newX < 10) newX=10; setSize(newSz); setLocation(newX, newY); //subtract new size from oldSize, adjust vpos by that amount } /**Determines whether the currently executing Java has the facilities to run Rtalk*/ private boolean okToRunRtalk() { if(_doRtalk==null) { //initialize values first time thru if(_disableLoadingOfRtalk) { _doRtalk = Boolean.FALSE; _noRtalkReason = "Note: Rtalk is disabled."; } else { int verno = getJdkVersion(); if(verno >= 7) //ok to run rtalk _doRtalk = Boolean.TRUE; else { //can't use rtalk on java 1.6 on down _doRtalk = Boolean.FALSE; if(verno==0) _noRtalkReason = "Rtalk not available because: Unable to determine current JDK runtime version (Rtalk requires JDK 7 or higher)."; else _noRtalkReason = "Rtalk not available because: Currently running JDK "+verno+". Rtalk requires JDK 7 or higher."; } } } return _doRtalk.booleanValue(); } /**If conditions are ok for running rtalk then this initializes (or re-initializes) it*/ private void doRestartRtalk() { if(okToRunRtalk()) { if(_rtalk!=null) //if has been previously initialized then need to shut down the old one _rtalk.shutdown(); _rtalk = new RtalkForRtServer(_cmdLineArgs); } else Emit.out(_noRtalkReason); } /**Returns the main jdk version number. Examples: JDK 1.4 ==> 4, 1.7 ==> 7 etc. * If unable to determine jdk version number then just returns 0.*/ private int getJdkVersion() { String ver = System.getProperty("java.runtime.version"); //i.e. "1.6.0_24-b07" if(ver==null) System.getProperty("java.version"); //just in case if(ver==null) return 0; //give up, unknown version number try { int p1 = ver.indexOf('.'); int p2 = ver.indexOf('.', p1+1); p1++; //point to one past the initial decimal if(p2<0) p2 = ver.indexOf('_', p1); //just in case if(p2<0) p2 = p1+1; //just in case return Integer.parseInt(ver.substring(p1, p2)); } catch(Exception e) { return 0; } //unknown version number } /**Sets up the web server with the required servlets*/ private void setupWebserver() throws Exception { if(!_webserverEnabled) return; //if the web server functionality is disabled _server = new Server(_httpPort); GuruConnection.useGuruIpAndPort(_guruIp, _guruPort); //set the ip and port that all servlets will use to access Guru //Map servlet classes to URI's: ServletContextHandler cth1 = new ServletContextHandler(ServletContextHandler.SESSIONS); cth1.setContextPath("/guru"); cth1.setDisplayName("guru servlet"); cth1.addServlet(ri.core.web.servlet.ServletGuruQuery7.class, "/query/*"); //guru/query servlet //cth1.addServlet(ri.core.web.servlet.ServletGuruRiri7.class, "/riri/*"); //guru/riri servlet cth1.addServlet(ri.core.web.servlet.ServletSm7.class, "/sm/*"); //guru/sm7 (websocket) servlet cth1.addServlet(ri.core.web.servlet.ServletRawSm.class, "/rawsm/*"); //guru/rawsm (websocket) servlet cth1.addServlet(ri.core.web.servlet.ServletEcho.class, "/echo/*"); //echo websocket servlet (debug*) //Use FileFetch servlet for file system location, e.g., C:\RiApps, D:\RiApps: ServletContextHandler cth2 = new ServletContextHandler(ServletContextHandler.SESSIONS); cth2.setContextPath("/riapps"); //define web application for context path //cth2.setResourceBase(getRiAppsDir()); <--make this work correctly so can elim the RiUriTypeFileFetch7.setFileBase kludge cth2.addServlet(ri.core.web.servlet.ServletFileFetch7.class, "/*"); //file fetch servlet //Use GuruFetch servlet for context path "/*": ServletContextHandler cth3 = new ServletContextHandler(ServletContextHandler.SESSIONS); cth3.setContextPath("/"); //define web application for context path cth3.addServlet(ri.core.web.servlet.ServletGuruFetch7.class, "/*"); //guru fetch servlet ContextHandlerCollection contextCollection = new ContextHandlerCollection(); contextCollection.setHandlers(new Handler[] {cth1, cth2, cth3}); _server.setHandler(contextCollection); } /**Start the web server*/ private void doServerStart(boolean verbose) { if(!_webserverEnabled) return; //if the web server functionality is disabled if(verbose) { try { Emit.out("Jetty version: "+_server.getClass().getPackage().getImplementationVersion()); } catch(Exception e) { Emit.out("Jetty version: <unknown> because "+e); } } if(_server.isStarted()) { if(verbose) Emit.out("Stopping Currently running server..."); try { _server.stop(); } catch(Exception e) {} } if(verbose) Emit.out("Starting Server..."); try { _server.start(); if(verbose) Emit.out("Server Started"); Emit.out("--- "+_title+" Server listening on port "+_httpPort+"\n"); } catch(Exception e) { Emit.out("\n--- Error starting server: "+e+"\n"); } ServletGuruSm.reInitialize(); try { //Do an initial SM query, just to get the SM servlet started up URL url = new URL("http://"+_guruIp+":"+_httpPort+"/guru/sm?"); BufferedReader in=new BufferedReader(new InputStreamReader(url.openStream())); while(in.readLine()!=null) ; //loop until all is read. in.close(); } catch(FileNotFoundException e) { Emit.out("> No SM connection"); } catch(Exception e) { Emit.out("Initial SM setup failed because: "+e); } ServletSm7.reInitialize(); try { //Do an initial SM query, just to get the SM servlet started up URL url = new URL("http://"+_guruIp+":"+_httpPort+"/guru/sm?"); BufferedReader in=new BufferedReader(new InputStreamReader(url.openStream())); while(in.readLine()!=null) ; //loop until all is read. in.close(); } catch(FileNotFoundException e) { Emit.out("> No SM connection"); } catch(Exception e) { Emit.out("Initial SM setup failed because: "+e); } ServletRawSm.reInitialize(); //currently not needed try { //Do an initial rawSM query, just to get the SM servlet started up URL url = new URL("http://"+_guruIp+":"+_httpPort+"/guru/rawsm?"); //note: functionality in rawsm for this is currently not implemented BufferedReader in=new BufferedReader(new InputStreamReader(url.openStream())); while(in.readLine()!=null) ; //loop until all is read. in.close(); } catch(FileNotFoundException e) { Emit.out("> No rawSM connection"); } catch(Exception e) { Emit.out("Initial rawSM setup failed because: "+e); } } /**Stop the web server*/ private void doServerStop(boolean verbose) { if(!_webserverEnabled) return; //if the web server functionality is disabled if(verbose) Emit.out("Stopping Server..."); try { _server.stop(); } catch(Exception e) {} if(verbose) Emit.out("Server Stopped"); GuruConnection.clear(); } /**Launch a web browser with Launch apps url*/ private void doLaunch() { if(!_webserverEnabled) return; //if the web server functionality is disabled String thisIp = "localhost"; //###determine this, don't use localhost String url = "http://"+thisIp+':'+_httpPort+"/index.html"; try { java.awt.Desktop.getDesktop().browse(java.net.URI.create(url)); //this way only works for java 1.6+ //BrowserLauncher.openURL(url); //manual way to launch a browser } catch(Exception e) { Emit.out("Unable to launch browser because: "+e); } } /**Used by servlets to determine whether to be verbose*/ public static boolean isVerboseServlets() { return _verboseServlets; } /**File | Exit action performed*/ private void doExit() { ririExitApplication(); _smRouter.closeDown(); if(_webserverEnabled) { //if the web server functionality is enabled doServerStop(false); System.out.println("Webserver terminated..."); } System.exit(0); } /**Override in subclasses to indicate application version number*/ private String getAppVersionNumber() { return _appVersionNo; } /**Displays the Help | About Dialog*/ private void doHelpAbout() { String[] info = new String[] { _title, "Version: " + getAppVersionNumber(), "Copyright Roos Instruments (c) "+AboutBox.getYearStringFor(2011), }; //String[] usage = Csv.toArray(getUsageString(), '\n'); String[] usage = new String[0]; //disable display of usage string String[] about = new String[info.length+usage.length]; int i=0; for(int j=0; j<info.length; i++, j++) about[i] = info[j]; for(int j=0; j<usage.length; i++, j++) about[i] = usage[j]; new AboutBox(this, about, null); } // ------------------------ // RiriApp related methods // ------------------------- /**Instructs a RiriApp to Exit * Note: Required for interface RiriApp*/ public void ririExitApplication() { try { GuruAccess.instance().unregisterRiriApp(this); } catch (Exception e) {} } /**Informs a RiriApp that the user has changed * Note: Required for interface RiriApp*/ public void ririUserChanged(String cid) {} /**Instructs a RiriApp to go to the top of the Zorder * Note: Required for interface RiriApp*/ public void ririZorderTop() { GuruHelper.doZorderTopFor(this); } /**System message (SM) from Guru * Note: Required for interface RiriApp*/ public void receiveGuruSystemMessage(GuruSystemMessage sm) { //Emit.out("WebServer got SM: ["+sm+"]"); if(sm==null) return; //just in case String channel = sm.getChannelAsString(); if(channel!=null && channel.equalsIgnoreCase("console")) { RtServerConsole.instance(this).log(sm); } } /**Overridden so can exit on System Close*/ protected void processWindowEvent(WindowEvent e) { super.processWindowEvent(e); if (e.getID() == WindowEvent.WINDOW_CLOSING) doExit(); } // ----------------------------------- // Methods required for SmEmitParent: // ----------------------------------- /**Returns current list of connection names * Required for interface SmEmitParent_*/ // public String[] getConnectionNames() { // int cnt = _pnlBridges.getComponentCount(); // ArrayList list = new ArrayList(); // for(int i=0; i<cnt; i++) { // Component co = _pnlBridges.getComponent(i); // if(co instanceof BridgePanel) { // BridgePanel bp = (BridgePanel)co; // list.add(bp.getName(0)); // list.add(bp.getName(1)); // } // } // cnt = list.size(); // String[] sa = new String[cnt]; // for(int i=0; i<cnt; i++) // sa[i] = list.get(i).toString(); // return sa; // } // // /**Returns specified sm connection. // * Required for interface SmEmitParent_*/ // public Connection_ getConnection(String name) { // if(name==null) return null; //just in case // int cnt = _pnlBridges.getComponentCount(); // for(int i=0; i<cnt; i++) { // Component co = _pnlBridges.getComponent(i); // if(co instanceof BridgePanel) { // BridgePanel bp = (BridgePanel)co; // if(name.equals(bp.getName(0))) return bp.getConnection(0); // if(name.equals(bp.getName(1))) return bp.getConnection(1); // } // } // return null; //no match // } // ----------------------------------- /**help on command line args*/ private String getUsageString() { return "\n"+ "Command line parameters: RtServer guruIp=ipAddress:port httpPort=portNo\n" + " webserver=true gui=false browser=false rtalk=false rtBoot=revisionId\n"+ " rtLoad=revisionId rtLaunch=package:method rtDebug help\n"+ "\n"+ "-guruIp=ipAddress:port: Specifies which Guru use (default is localhost)\n" + " Default port assumed if not specified.\n"+ " Examples: guru.roos.com, localhost, 192.168.1.143:50000, useSLP('criteria').\n"+ "-httpPort=portNo: port number to communicate with web browser (default is "+_httpPort+")\n"+ "-webserver=false: enables/disables the web server feature.\n"+ "-gui=false: turn off the web server gui display (i.e. run headless).\n"+ "-browser=false: Do not launch a web browser on startup\n"+ "-rtalk=false: Disable Rtalk.\n"+ "-rtBoot=revisionId: boots rtalk using the given ri.sys.RevisionId.\n"+ "-rtLoad=revisionId: defines the ri.sys.Revision to be loaded by rtalk on startup.\n"+ "-rtLaunch=methodName: initial method to be called on rtalk startup.\n"+ "-rtDebug: enables rtalk debug mode.\n"+ "Notes:\n"+ " Commands are case insensitive.\n"+ " Option: guruIp useSLP('criteria') not yet implemeted.\n"; } /*i.e. "guruip=192.168.1.143:50000 httpPort=7501 gui=false help"*/ private void decodeCmdlineArgs(String[] args) { int len = args==null ? 0 : args.length; for(int i=0; i<len; i++) { try { String cmd, val="", arg=args[i]; //extract "cmd" or "cmd=val" from each command line parameter int pos=arg.indexOf('='); if(pos<0) cmd = arg.trim(); //no equals means it's all cmd else { //get the cmd and value portions cmd = arg.substring(0, pos).trim(); if(pos < (arg.length()-1)) //ie: unless equals sign is the last char val = arg.substring(pos+1).trim(); } if(cmd.equalsIgnoreCase("guruip")) { //user defined guru IP address String guruIpAndPort = val; int p = guruIpAndPort.indexOf(':'); _guruPort=-1; _guruIp=null; if(p>=0) { _guruIp = guruIpAndPort.substring(0, p); try { _guruPort = Integer.parseInt(guruIpAndPort.substring(p+1)); } catch(Exception e) {} } else _guruIp = guruIpAndPort; if(_guruIp==null || _guruIp.length()==0) _guruIp="localhost"; if(_guruPort<0) { //i.e. if none specified if(_guruIp.equalsIgnoreCase("localhost")) _guruPort=50010; else _guruPort = 50000; } } else if(cmd.equalsIgnoreCase("httpport")) { try { _httpPort = Integer.parseInt(val); } catch(Exception e) {} //on error, leave the original setting } else if(cmd.equalsIgnoreCase("webserver")) { //enable/disable the web server feature if(val.equalsIgnoreCase("false")) _webserverEnabled=false; else _webserverEnabled=true; } else if(cmd.equalsIgnoreCase("gui")) { //disable web server gui display if(val.equalsIgnoreCase("false")) _guiEnabled=false; else _guiEnabled=true; } else if(cmd.equalsIgnoreCase("browser")) { //disable initial launch of web browser if(val.equalsIgnoreCase("false")) _initialBrowserLaunch=false; else _initialBrowserLaunch=true; } else if(cmd.equalsIgnoreCase("rtalk")) { //disable loading of Rtalk if(val.equalsIgnoreCase("false")) _disableLoadingOfRtalk=true; else _disableLoadingOfRtalk=false; } else if(cmd.equalsIgnoreCase("help") || cmd.equals("?")) { System.out.println(getUsageString()); System.exit(0); } } catch(Exception e) { System.out.println("Problem with command line parameter #"+(i+1)+"["+args[i]+"]: "+e.getMessage()); } } } /**Main*/ public static void main(String[] args) { if(args.length==0) args = Csv.toArray(DEFAULT_CMDLINE_ARGS, ' '); RiLookAndFeel.init(args); RtServer webServer=null; try { webServer = new RtServer(args, _title); } catch(Throwable t) { t.printStackTrace(); if(webServer!=null) webServer.doServerStop(false); //shutdown } } }
/* * Copyright (C) 2017 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.gapid.widgets; import static com.google.gapid.util.Logging.throttleLogRpcError; import com.google.common.base.Preconditions; import com.google.common.util.concurrent.ListenableFuture; import com.google.gapid.image.Images; import com.google.gapid.rpc.Rpc; import com.google.gapid.rpc.RpcException; import com.google.gapid.rpc.UiErrorCallback; import com.google.gapid.server.Client.DataUnavailableException; import com.google.gapid.util.Events; import com.google.gapid.util.Events.ListenerCollection; import org.eclipse.swt.SWT; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.graphics.ImageData; import org.eclipse.swt.widgets.Widget; import java.util.concurrent.ExecutionException; import java.util.function.Supplier; import java.util.logging.Logger; /** * A widget that displays an {@link Image} that may need to be loaded. While the image is being * loaded, a loading indicator is drawn. */ public class LoadableImage { protected static final Logger LOG = Logger.getLogger(LoadableImage.class.getName()); private final ListenerCollection<Listener> listeners = Events.silentListeners(Listener.class); private int loadCount = 0; protected final Widget widget; private final Supplier<ListenableFuture<Object>> futureSupplier; private ListenableFuture<Object> future; protected final LoadingIndicator loading; private final LoadingIndicator.Repaintable repaintable; protected final ErrorStrategy errorStrategy; private State state; private Image image, errorImage; protected LoadableImage(Widget widget, Supplier<ListenableFuture<Object>> futureSupplier, LoadingIndicator loading, LoadingIndicator.Repaintable repaintable, ErrorStrategy errorStrategy) { this.widget = widget; this.futureSupplier = futureSupplier; this.loading = loading; this.repaintable = repaintable; this.errorStrategy = errorStrategy; state = State.NOT_STARTED; } public static Builder newBuilder(LoadingIndicator loading) { return new Builder(loading); } public LoadableImage load() { loadCount++; if (loadCount > 1 || state != State.NOT_STARTED) { return this; } loadCount = 1; state = State.LOADING; listeners.fire().onLoadingStart(); loading.scheduleForRedraw(repaintable); future = futureSupplier.get(); Rpc.listen(future, new UiErrorCallback<Object, Object, Image>(widget, LOG) { @Override protected ResultOrError<Object, Image> onRpcThread(Rpc.Result<Object> result) throws RpcException, ExecutionException { try { return success(result.get()); } catch (RpcException | ExecutionException e) { if (!widget.isDisposed()) { return error(errorStrategy.handleError(e)); } return error(null); } } @Override protected void onUiThreadSuccess(Object result) { if (result instanceof Image) { updateImage((Image)result); } else { updateImage(Images.createNonScaledImage(widget.getDisplay(), (ImageData)result)); } } @Override protected void onUiThreadError(Image errorIcon) { updateErrorImage(errorIcon); } }); return this; } public LoadableImage unload() { loadCount--; if (loadCount > 0 || state != State.LOADING) { return this; } future.cancel(true); state = State.NOT_STARTED; return this; } public Image getImage() { switch (state) { case NOT_STARTED: return getLoadingImage(); case LOADING: loading.scheduleForRedraw(repaintable); return getLoadingImage(); case LOADED: return image; case FAILED: return errorImage; case DISPOSED: SWT.error(SWT.ERROR_WIDGET_DISPOSED); return null; default: throw new AssertionError(); } } public boolean hasFinished() { return (state != State.NOT_STARTED) && (state != State.LOADING); } protected Image getLoadingImage() { return loading.getCurrentFrame(); } public void dispose() { if (image != null) { image.dispose(); } image = null; errorImage = null; state = State.DISPOSED; } /** @param result The loaded image, may not be null. */ protected void updateImage(Image result) { if (state == State.LOADING) { state = State.LOADED; image = result; errorImage = null; listeners.fire().onLoaded(true); } else { result.dispose(); } } /** @param result The error icon to show, may be null. */ protected void updateErrorImage(Image result) { if (state == State.LOADING) { state = State.FAILED; image = null; errorImage = result; listeners.fire().onLoaded(false); } } public void addListener(Listener listener) { listeners.addListener(listener); } public void removeListener(Listener listener) { listeners.removeListener(listener); } public static interface Listener extends Events.Listener { /** * Event indicating that the image has started to load. */ public default void onLoadingStart() { /* empty */ } /** * Event indicating that the image has finished loading. * @param success whether the image was loaded successfully */ public default void onLoaded(boolean success) { /* empty */ } } /** * Determines how to deal with image loading errors. */ public static interface ErrorStrategy { public Image handleError(Exception e); } /** * Builder for {@link LoadableImage}. If built using a future, it is assumed * to already be loading, while if built with a supplier, the {@link #load()} * method needs to be invoked to start the loading process. */ public static class Builder { private final LoadingIndicator loading; private Supplier<ListenableFuture<Object>> futureSupplier; private ErrorStrategy errorStrategy; private boolean small; protected Builder(LoadingIndicator loading) { this.loading = loading; } public Builder small() { this.small = true; return this; } public Builder large() { this.small = false; return this; } public Builder forImageData(ListenableFuture<ImageData> future) { this.futureSupplier = cast(supplier(future)); return this; } public Builder forImageData(Supplier<ListenableFuture<ImageData>> future) { this.futureSupplier = cast(future); return this; } public Builder forImage(ListenableFuture<Image> future) { this.futureSupplier = cast(supplier(future)); return this; } public Builder forImage(Supplier<ListenableFuture<Image>> future) { this.futureSupplier = cast(future); return this; } public Builder onErrorReturnNull() { this.errorStrategy = e -> { logImageError(e); return null; }; return this; } public Builder onErrorShowErrorIcon(Theme theme) { this.errorStrategy = e -> { logImageError(e); return theme.error(); }; return this; } private static void logImageError(Exception e) { if (!(e instanceof DataUnavailableException)) { throttleLogRpcError(LOG, "Failed to load image", e); } } public LoadableImage build(Widget widget, LoadingIndicator.Repaintable repaintable) { Preconditions.checkState(futureSupplier != null); Preconditions.checkState(errorStrategy != null); LoadableImage result; if (small) { result = new LoadableImage(widget, futureSupplier, loading, repaintable, errorStrategy) { @Override protected Image getLoadingImage() { return loading.getCurrentSmallFrame(); } }; } else { result = new LoadableImage(widget, futureSupplier, loading, repaintable, errorStrategy); } return result.load(); } private static <T> Supplier<ListenableFuture<T>> supplier(ListenableFuture<T> future) { return () -> future; } @SuppressWarnings("unchecked") private static Supplier<ListenableFuture<Object>> cast(Supplier<?> future) { return (Supplier<ListenableFuture<Object>>)future; } } private static enum State { NOT_STARTED, LOADING, LOADED, FAILED, DISPOSED; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.beam.sdk.io.hadoop.inputformat; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import com.google.auto.value.AutoValue; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import com.google.common.util.concurrent.AtomicDouble; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.math.BigDecimal; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.concurrent.atomic.AtomicLong; import javax.annotation.Nullable; import org.apache.beam.sdk.annotations.Experimental; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.KvCoder; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.hadoop.SerializableConfiguration; import org.apache.beam.sdk.io.hadoop.WritableCoder; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.SimpleFunction; import org.apache.beam.sdk.transforms.display.DisplayData; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TypeDescriptor; import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.ObjectWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * A {@link HadoopInputFormatIO} is a Transform for reading data from any source which * implements Hadoop {@link InputFormat}. For example- Cassandra, Elasticsearch, HBase, Redis, * Postgres etc. {@link HadoopInputFormatIO} has to make several performance trade-offs in * connecting to {@link InputFormat}, so if there is another Beam IO Transform specifically for * connecting to your data source of choice, we would recommend using that one, but this IO * Transform allows you to connect to many data sources that do not yet have a Beam IO Transform. * * <p>You will need to pass a Hadoop {@link Configuration} with parameters specifying how the read * will occur. Many properties of the Configuration are optional, and some are required for certain * {@link InputFormat} classes, but the following properties must be set for all InputFormats: * <ul> * <li>{@code mapreduce.job.inputformat.class}: The {@link InputFormat} class used to connect to * your data source of choice.</li> * <li>{@code key.class}: The key class returned by the {@link InputFormat} in * {@code mapreduce.job.inputformat.class}.</li> * <li>{@code value.class}: The value class returned by the {@link InputFormat} in * {@code mapreduce.job.inputformat.class}.</li> * </ul> * For example: * * <pre> * { * Configuration myHadoopConfiguration = new Configuration(false); * // Set Hadoop InputFormat, key and value class in configuration * myHadoopConfiguration.setClass(&quot;mapreduce.job.inputformat.class&quot;, * MyDbInputFormatClass, InputFormat.class); * myHadoopConfiguration.setClass(&quot;key.class&quot;, MyDbInputFormatKeyClass, Object.class); * myHadoopConfiguration.setClass(&quot;value.class&quot;, * MyDbInputFormatValueClass, Object.class); * } * </pre> * * <p>You will need to check to see if the key and value classes output by the {@link InputFormat} * have a Beam {@link Coder} available. If not, you can use withKeyTranslation/withValueTranslation * to specify a method transforming instances of those classes into another class that is supported * by a Beam {@link Coder}. These settings are optional and you don't need to specify translation * for both key and value. If you specify a translation, you will need to make sure the K or V of * the read transform match the output type of the translation. * * <p>You will need to set appropriate InputFormat key and value class (i.e. "key.class" and * "value.class") in Hadoop {@link Configuration}. If you set different InputFormat key or * value class than InputFormat's actual key or value class then, it may result in an error like * "unexpected extra bytes after decoding" while the decoding process of key/value object happens. * Hence, it is important to set appropriate InputFormat key and value class. * * <h3>Reading using {@link HadoopInputFormatIO}</h3> * * <pre> * {@code * Pipeline p = ...; // Create pipeline. * // Read data only with Hadoop configuration. * p.apply("read", * HadoopInputFormatIO.<InputFormatKeyClass, InputFormatKeyClass>read() * .withConfiguration(myHadoopConfiguration); * } * // Read data with configuration and key translation (Example scenario: Beam Coder is not * available for key class hence key translation is required.). * SimpleFunction&lt;InputFormatKeyClass, MyKeyClass&gt; myOutputKeyType = * new SimpleFunction&lt;InputFormatKeyClass, MyKeyClass&gt;() { * public MyKeyClass apply(InputFormatKeyClass input) { * // ...logic to transform InputFormatKeyClass to MyKeyClass * } * }; * </pre> * * <pre> * {@code * p.apply("read", * HadoopInputFormatIO.<MyKeyClass, InputFormatKeyClass>read() * .withConfiguration(myHadoopConfiguration) * .withKeyTranslation(myOutputKeyType); * } * </pre> * * <p>// Read data with configuration and value translation (Example scenario: Beam Coder is not * available for value class hence value translation is required.). * * <pre> * {@code * SimpleFunction&lt;InputFormatValueClass, MyValueClass&gt; myOutputValueType = * new SimpleFunction&lt;InputFormatValueClass, MyValueClass&gt;() { * public MyValueClass apply(InputFormatValueClass input) { * // ...logic to transform InputFormatValueClass to MyValueClass * } * }; * } * </pre> * * <pre> * {@code * p.apply("read", * HadoopInputFormatIO.<InputFormatKeyClass, MyValueClass>read() * .withConfiguration(myHadoopConfiguration) * .withValueTranslation(myOutputValueType); * } * </pre> */ @Experimental(Experimental.Kind.SOURCE_SINK) public class HadoopInputFormatIO { private static final Logger LOG = LoggerFactory.getLogger(HadoopInputFormatIO.class); /** * Creates an uninitialized {@link HadoopInputFormatIO.Read}. Before use, the {@code Read} must * be initialized with a HadoopInputFormatIO.Read#withConfiguration(HadoopConfiguration) that * specifies the source. A key/value translation may also optionally be specified using * {@link HadoopInputFormatIO.Read#withKeyTranslation}/ * {@link HadoopInputFormatIO.Read#withValueTranslation}. */ public static <K, V> Read<K, V> read() { return new AutoValue_HadoopInputFormatIO_Read.Builder<K, V>().build(); } /** * A {@link PTransform} that reads from any data source which implements Hadoop InputFormat. For * e.g. Cassandra, Elasticsearch, HBase, Redis, Postgres, etc. See the class-level Javadoc on * {@link HadoopInputFormatIO} for more information. * @param <K> Type of keys to be read. * @param <V> Type of values to be read. * @see HadoopInputFormatIO */ @AutoValue public abstract static class Read<K, V> extends PTransform<PBegin, PCollection<KV<K, V>>> { // Returns the Hadoop Configuration which contains specification of source. @Nullable public abstract SerializableConfiguration getConfiguration(); @Nullable public abstract SimpleFunction<?, K> getKeyTranslationFunction(); @Nullable public abstract SimpleFunction<?, V> getValueTranslationFunction(); @Nullable public abstract TypeDescriptor<K> getKeyTypeDescriptor(); @Nullable public abstract TypeDescriptor<V> getValueTypeDescriptor(); @Nullable public abstract TypeDescriptor<?> getinputFormatClass(); @Nullable public abstract TypeDescriptor<?> getinputFormatKeyClass(); @Nullable public abstract TypeDescriptor<?> getinputFormatValueClass(); abstract Builder<K, V> toBuilder(); @AutoValue.Builder abstract static class Builder<K, V> { abstract Builder<K, V> setConfiguration(SerializableConfiguration configuration); abstract Builder<K, V> setKeyTranslationFunction(SimpleFunction<?, K> function); abstract Builder<K, V> setValueTranslationFunction(SimpleFunction<?, V> function); abstract Builder<K, V> setKeyTypeDescriptor(TypeDescriptor<K> keyTypeDescriptor); abstract Builder<K, V> setValueTypeDescriptor(TypeDescriptor<V> valueTypeDescriptor); abstract Builder<K, V> setInputFormatClass(TypeDescriptor<?> inputFormatClass); abstract Builder<K, V> setInputFormatKeyClass(TypeDescriptor<?> inputFormatKeyClass); abstract Builder<K, V> setInputFormatValueClass(TypeDescriptor<?> inputFormatValueClass); abstract Read<K, V> build(); } /** * Returns a new {@link HadoopInputFormatIO.Read} that will read from the source using the * options provided by the given configuration. * * <p>Does not modify this object. */ public Read<K, V> withConfiguration(Configuration configuration) { validateConfiguration(configuration); TypeDescriptor<?> inputFormatClass = TypeDescriptor.of(configuration.getClass("mapreduce.job.inputformat.class", null)); TypeDescriptor<?> inputFormatKeyClass = TypeDescriptor.of(configuration.getClass("key.class", null)); TypeDescriptor<?> inputFormatValueClass = TypeDescriptor.of(configuration.getClass("value.class", null)); Builder<K, V> builder = toBuilder().setConfiguration(new SerializableConfiguration(configuration)); builder.setInputFormatClass(inputFormatClass); builder.setInputFormatKeyClass(inputFormatKeyClass); builder.setInputFormatValueClass(inputFormatValueClass); /* * Sets the output key class to InputFormat key class if withKeyTranslation() is not called * yet. */ if (getKeyTranslationFunction() == null) { builder.setKeyTypeDescriptor((TypeDescriptor<K>) inputFormatKeyClass); } /* * Sets the output value class to InputFormat value class if withValueTranslation() is not * called yet. */ if (getValueTranslationFunction() == null) { builder.setValueTypeDescriptor((TypeDescriptor<V>) inputFormatValueClass); } return builder.build(); } /** * Returns a new {@link HadoopInputFormatIO.Read} that will transform the keys read from the * source using the given key translation function. * * <p>Does not modify this object. */ public Read<K, V> withKeyTranslation(SimpleFunction<?, K> function) { checkNotNull(function, "function"); // Sets key class to key translation function's output class type. return toBuilder().setKeyTranslationFunction(function) .setKeyTypeDescriptor((TypeDescriptor<K>) function.getOutputTypeDescriptor()).build(); } /** * Returns a new {@link HadoopInputFormatIO.Read} that will transform the values read from the * source using the given value translation function. * * <p>Does not modify this object. */ public Read<K, V> withValueTranslation(SimpleFunction<?, V> function) { checkNotNull(function, "function"); // Sets value class to value translation function's output class type. return toBuilder().setValueTranslationFunction(function) .setValueTypeDescriptor((TypeDescriptor<V>) function.getOutputTypeDescriptor()).build(); } @Override public PCollection<KV<K, V>> expand(PBegin input) { validateTransform(); // Get the key and value coders based on the key and value classes. CoderRegistry coderRegistry = input.getPipeline().getCoderRegistry(); Coder<K> keyCoder = getDefaultCoder(getKeyTypeDescriptor(), coderRegistry); Coder<V> valueCoder = getDefaultCoder(getValueTypeDescriptor(), coderRegistry); HadoopInputFormatBoundedSource<K, V> source = new HadoopInputFormatBoundedSource<K, V>( getConfiguration(), keyCoder, valueCoder, getKeyTranslationFunction(), getValueTranslationFunction()); return input.getPipeline().apply(org.apache.beam.sdk.io.Read.from(source)); } /** * Validates that the mandatory configuration properties such as InputFormat class, InputFormat * key and value classes are provided in the Hadoop configuration. */ private void validateConfiguration(Configuration configuration) { checkNotNull(configuration, "configuration"); checkNotNull(configuration.get("mapreduce.job.inputformat.class"), "configuration.get(\"mapreduce.job.inputformat.class\")"); checkNotNull(configuration.get("key.class"), "configuration.get(\"key.class\")"); checkNotNull(configuration.get("value.class"), "configuration.get(\"value.class\")"); } /** * Validates construction of this transform. */ @VisibleForTesting void validateTransform() { checkNotNull(getConfiguration(), "getConfiguration()"); // Validate that the key translation input type must be same as key class of InputFormat. validateTranslationFunction(getinputFormatKeyClass(), getKeyTranslationFunction(), "Key translation's input type is not same as hadoop InputFormat : %s key class : %s"); // Validate that the value translation input type must be same as value class of InputFormat. validateTranslationFunction(getinputFormatValueClass(), getValueTranslationFunction(), "Value translation's input type is not same as hadoop InputFormat : " + "%s value class : %s"); } /** * Validates translation function given for key/value translation. */ private void validateTranslationFunction(TypeDescriptor<?> inputType, SimpleFunction<?, ?> simpleFunction, String errorMsg) { if (simpleFunction != null) { if (!simpleFunction.getInputTypeDescriptor().equals(inputType)) { throw new IllegalArgumentException( String.format(errorMsg, getinputFormatClass().getRawType(), inputType.getRawType())); } } } /** * Returns the default coder for a given type descriptor. Coder Registry is queried for correct * coder, if not found in Coder Registry, then check if the type descriptor provided is of type * Writable, then WritableCoder is returned, else exception is thrown "Cannot find coder". */ public <T> Coder<T> getDefaultCoder(TypeDescriptor<?> typeDesc, CoderRegistry coderRegistry) { Class classType = typeDesc.getRawType(); try { return (Coder<T>) coderRegistry.getCoder(typeDesc); } catch (CannotProvideCoderException e) { if (Writable.class.isAssignableFrom(classType)) { return (Coder<T>) WritableCoder.of(classType); } throw new IllegalStateException(String.format("Cannot find coder for %s : ", typeDesc) + e.getMessage(), e); } } } /** * Bounded source implementation for {@link HadoopInputFormatIO}. * @param <K> Type of keys to be read. * @param <V> Type of values to be read. */ public static class HadoopInputFormatBoundedSource<K, V> extends BoundedSource<KV<K, V>> implements Serializable { private final SerializableConfiguration conf; private final Coder<K> keyCoder; private final Coder<V> valueCoder; @Nullable private final SimpleFunction<?, K> keyTranslationFunction; @Nullable private final SimpleFunction<?, V> valueTranslationFunction; private final SerializableSplit inputSplit; private transient List<SerializableSplit> inputSplits; private long boundedSourceEstimatedSize = 0; private transient InputFormat<?, ?> inputFormatObj; private transient TaskAttemptContext taskAttemptContext; private static final Set<Class<?>> immutableTypes = new HashSet<Class<?>>( Arrays.asList( String.class, Byte.class, Short.class, Integer.class, Long.class, Float.class, Double.class, Boolean.class, BigInteger.class, BigDecimal.class)); HadoopInputFormatBoundedSource( SerializableConfiguration conf, Coder<K> keyCoder, Coder<V> valueCoder, @Nullable SimpleFunction<?, K> keyTranslationFunction, @Nullable SimpleFunction<?, V> valueTranslationFunction) { this(conf, keyCoder, valueCoder, keyTranslationFunction, valueTranslationFunction, null); } protected HadoopInputFormatBoundedSource( SerializableConfiguration conf, Coder<K> keyCoder, Coder<V> valueCoder, @Nullable SimpleFunction<?, K> keyTranslationFunction, @Nullable SimpleFunction<?, V> valueTranslationFunction, SerializableSplit inputSplit) { this.conf = conf; this.inputSplit = inputSplit; this.keyCoder = keyCoder; this.valueCoder = valueCoder; this.keyTranslationFunction = keyTranslationFunction; this.valueTranslationFunction = valueTranslationFunction; } public SerializableConfiguration getConfiguration() { return conf; } @Override public void validate() { checkNotNull(conf, "conf"); checkNotNull(keyCoder, "keyCoder"); checkNotNull(valueCoder, "valueCoder"); } @Override public void populateDisplayData(DisplayData.Builder builder) { super.populateDisplayData(builder); Configuration hadoopConfig = getConfiguration().get(); if (hadoopConfig != null) { builder.addIfNotNull(DisplayData.item("mapreduce.job.inputformat.class", hadoopConfig.get("mapreduce.job.inputformat.class")) .withLabel("InputFormat Class")); builder.addIfNotNull(DisplayData.item("key.class", hadoopConfig.get("key.class")) .withLabel("Key Class")); builder.addIfNotNull(DisplayData.item("value.class", hadoopConfig.get("value.class")) .withLabel("Value Class")); } } @Override public List<BoundedSource<KV<K, V>>> split(long desiredBundleSizeBytes, PipelineOptions options) throws Exception { // desiredBundleSizeBytes is not being considered as splitting based on this // value is not supported by inputFormat getSplits() method. if (inputSplit != null) { LOG.info("Not splitting source {} because source is already split.", this); return ImmutableList.of((BoundedSource<KV<K, V>>) this); } computeSplitsIfNecessary(); LOG.info("Generated {} splits. Size of first split is {} ", inputSplits.size(), inputSplits .get(0).getSplit().getLength()); return Lists.transform(inputSplits, new Function<SerializableSplit, BoundedSource<KV<K, V>>>() { @Override public BoundedSource<KV<K, V>> apply(SerializableSplit serializableInputSplit) { HadoopInputFormatBoundedSource<K, V> hifBoundedSource = new HadoopInputFormatBoundedSource<K, V>(conf, keyCoder, valueCoder, keyTranslationFunction, valueTranslationFunction, serializableInputSplit); return hifBoundedSource; } }); } @Override public long getEstimatedSizeBytes(PipelineOptions po) throws Exception { if (inputSplit == null) { // If there are no splits computed yet, then retrieve the splits. computeSplitsIfNecessary(); return boundedSourceEstimatedSize; } return inputSplit.getSplit().getLength(); } /** * This is a helper function to compute splits. This method will also calculate size of the * data being read. Note: This method is executed exactly once and the splits are retrieved * and cached in this. These splits are further used by split() and * getEstimatedSizeBytes(). */ @VisibleForTesting void computeSplitsIfNecessary() throws IOException, InterruptedException { if (inputSplits != null) { return; } createInputFormatInstance(); List<InputSplit> splits = inputFormatObj.getSplits(Job.getInstance(conf.get())); if (splits == null) { throw new IOException("Error in computing splits, getSplits() returns null."); } if (splits.isEmpty()) { throw new IOException("Error in computing splits, getSplits() returns a empty list"); } boundedSourceEstimatedSize = 0; inputSplits = new ArrayList<SerializableSplit>(); for (InputSplit inputSplit : splits) { if (inputSplit == null) { throw new IOException("Error in computing splits, split is null in InputSplits list " + "populated by getSplits() : "); } boundedSourceEstimatedSize += inputSplit.getLength(); inputSplits.add(new SerializableSplit(inputSplit)); } } /** * Creates instance of InputFormat class. The InputFormat class name is specified in the Hadoop * configuration. */ protected void createInputFormatInstance() throws IOException { if (inputFormatObj == null) { try { taskAttemptContext = new TaskAttemptContextImpl(conf.get(), new TaskAttemptID()); inputFormatObj = (InputFormat<?, ?>) conf .get() .getClassByName( conf.get().get("mapreduce.job.inputformat.class")) .newInstance(); /* * If InputFormat explicitly implements interface {@link Configurable}, then setConf() * method of {@link Configurable} needs to be explicitly called to set all the * configuration parameters. For example: InputFormat classes which implement Configurable * are {@link org.apache.hadoop.mapreduce.lib.db.DBInputFormat DBInputFormat}, {@link * org.apache.hadoop.hbase.mapreduce.TableInputFormat TableInputFormat}, etc. */ if (Configurable.class.isAssignableFrom(inputFormatObj.getClass())) { ((Configurable) inputFormatObj).setConf(conf.get()); } } catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) { throw new IOException("Unable to create InputFormat object: ", e); } } } @VisibleForTesting InputFormat<?, ?> getInputFormat(){ return inputFormatObj; } @VisibleForTesting void setInputFormatObj(InputFormat<?, ?> inputFormatObj) { this.inputFormatObj = inputFormatObj; } @Override public Coder<KV<K, V>> getOutputCoder() { return KvCoder.of(keyCoder, valueCoder); } @Override public BoundedReader<KV<K, V>> createReader(PipelineOptions options) throws IOException { this.validate(); if (inputSplit == null) { throw new IOException("Cannot create reader as source is not split yet."); } else { createInputFormatInstance(); return new HadoopInputFormatReader<>( this, keyTranslationFunction, valueTranslationFunction, inputSplit, inputFormatObj, taskAttemptContext); } } /** * BoundedReader for Hadoop InputFormat source. * * @param <K> Type of keys RecordReader emits. * @param <V> Type of values RecordReader emits. */ class HadoopInputFormatReader<T1, T2> extends BoundedSource.BoundedReader<KV<K, V>> { private final HadoopInputFormatBoundedSource<K, V> source; @Nullable private final SimpleFunction<T1, K> keyTranslationFunction; @Nullable private final SimpleFunction<T2, V> valueTranslationFunction; private final SerializableSplit split; private RecordReader<T1, T2> recordReader; private volatile boolean doneReading = false; private AtomicLong recordsReturned = new AtomicLong(); // Tracks the progress of the RecordReader. private AtomicDouble progressValue = new AtomicDouble(); private transient InputFormat<T1, T2> inputFormatObj; private transient TaskAttemptContext taskAttemptContext; private HadoopInputFormatReader(HadoopInputFormatBoundedSource<K, V> source, @Nullable SimpleFunction keyTranslationFunction, @Nullable SimpleFunction valueTranslationFunction, SerializableSplit split, InputFormat inputFormatObj, TaskAttemptContext taskAttemptContext) { this.source = source; this.keyTranslationFunction = keyTranslationFunction; this.valueTranslationFunction = valueTranslationFunction; this.split = split; this.inputFormatObj = inputFormatObj; this.taskAttemptContext = taskAttemptContext; } @Override public HadoopInputFormatBoundedSource<K, V> getCurrentSource() { return source; } @Override public boolean start() throws IOException { try { recordsReturned.set(0L); recordReader = (RecordReader<T1, T2>) inputFormatObj.createRecordReader(split.getSplit(), taskAttemptContext); if (recordReader != null) { recordReader.initialize(split.getSplit(), taskAttemptContext); progressValue.set(getProgress()); if (recordReader.nextKeyValue()) { recordsReturned.incrementAndGet(); doneReading = false; return true; } } else { throw new IOException(String.format("Null RecordReader object returned by %s", inputFormatObj.getClass())); } recordReader = null; } catch (InterruptedException e) { throw new IOException( "Could not read because the thread got interrupted while " + "reading the records with an exception: ", e); } doneReading = true; return false; } @Override public boolean advance() throws IOException { try { progressValue.set(getProgress()); if (recordReader.nextKeyValue()) { recordsReturned.incrementAndGet(); return true; } doneReading = true; } catch (InterruptedException e) { throw new IOException("Unable to read data: ", e); } return false; } @Override public KV<K, V> getCurrent() { K key = null; V value = null; try { // Transform key if translation function is provided. key = transformKeyOrValue((T1) recordReader.getCurrentKey(), keyTranslationFunction, keyCoder); // Transform value if translation function is provided. value = transformKeyOrValue((T2) recordReader.getCurrentValue(), valueTranslationFunction, valueCoder); } catch (IOException | InterruptedException e) { LOG.error("Unable to read data: " + "{}", e); throw new IllegalStateException("Unable to read data: " + "{}", e); } return KV.of(key, value); } /** * Returns the serialized output of transformed key or value object. * @throws ClassCastException * @throws CoderException */ private <T, T3> T3 transformKeyOrValue(T input, @Nullable SimpleFunction<T, T3> simpleFunction, Coder<T3> coder) throws CoderException, ClassCastException { T3 output; if (null != simpleFunction) { output = simpleFunction.apply(input); } else { output = (T3) input; } return cloneIfPossiblyMutable((T3) output, coder); } /** * Beam expects immutable objects, but the Hadoop InputFormats tend to re-use the same object * when returning them. Hence, mutable objects returned by Hadoop InputFormats are cloned. */ private <T> T cloneIfPossiblyMutable(T input, Coder<T> coder) throws CoderException, ClassCastException { // If the input object is not of known immutable type, clone the object. if (!isKnownImmutable(input)) { input = CoderUtils.clone(coder, input); } return input; } /** * Utility method to check if the passed object is of a known immutable type. */ private boolean isKnownImmutable(Object o) { return immutableTypes.contains(o.getClass()); } @Override public void close() throws IOException { LOG.info("Closing reader after reading {} records.", recordsReturned); if (recordReader != null) { recordReader.close(); recordReader = null; } } @Override public Double getFractionConsumed() { if (doneReading) { return 1.0; } else if (recordReader == null || recordsReturned.get() == 0L) { return 0.0; } if (progressValue.get() == 0.0) { return null; } return progressValue.doubleValue(); } /** * Returns RecordReader's progress. * @throws IOException * @throws InterruptedException */ private Double getProgress() throws IOException, InterruptedException { try { float progress = recordReader.getProgress(); return (double) progress < 0 || progress > 1 ? 0.0 : progress; } catch (IOException e) { LOG.error( "Error in computing the fractions consumed as RecordReader.getProgress() throws an " + "exception : " + "{}", e); throw new IOException( "Error in computing the fractions consumed as RecordReader.getProgress() throws an " + "exception : " + e.getMessage(), e); } } @Override public final long getSplitPointsRemaining() { if (doneReading) { return 0; } /** * This source does not currently support dynamic work rebalancing, so remaining parallelism * is always 1. */ return 1; } } } /** * A wrapper to allow Hadoop {@link org.apache.hadoop.mapreduce.InputSplit} to be serialized using * Java's standard serialization mechanisms. */ public static class SerializableSplit implements Serializable { InputSplit inputSplit; public SerializableSplit() {} public SerializableSplit(InputSplit split) { checkArgument(split instanceof Writable, String.format("Split is not of type Writable: %s", split)); this.inputSplit = split; } public InputSplit getSplit() { return inputSplit; } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { ObjectWritable ow = new ObjectWritable(); ow.setConf(new Configuration(false)); ow.readFields(in); this.inputSplit = (InputSplit) ow.get(); } private void writeObject(ObjectOutputStream out) throws IOException { new ObjectWritable(inputSplit).write(out); } } }
/* * DBeaver - Universal Database Manager * Copyright (C) 2010-2019 Serge Rider (serge@jkiss.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ui.dialogs; import org.eclipse.jface.dialogs.ControlEnableState; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.dialogs.IDialogPage; import org.eclipse.jface.dialogs.TitleAreaDialog; import org.eclipse.jface.operation.IRunnableWithProgress; import org.eclipse.jface.operation.ModalContext; import org.eclipse.jface.viewers.IStructuredSelection; import org.eclipse.jface.wizard.IWizard; import org.eclipse.jface.wizard.IWizardContainer; import org.eclipse.jface.wizard.IWizardPage; import org.eclipse.jface.wizard.ProgressMonitorPart; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Point; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; import org.eclipse.ui.IWorkbenchWindow; import org.eclipse.ui.IWorkbenchWizard; import org.jkiss.dbeaver.ui.ICompositeDialogPage; import org.jkiss.dbeaver.ui.UIUtils; import org.jkiss.utils.ArrayUtils; import java.lang.reflect.InvocationTargetException; /** * MultiPageWizardDialog */ public class MultiPageWizardDialog extends TitleAreaDialog implements IWizardContainer { private IWizard wizard; private Composite pageArea; private Tree pagesTree; private IDialogPage prevPage; private ProgressMonitorPart monitorPart; private SashForm wizardSash; private volatile int runningOperations = 0; public MultiPageWizardDialog(IWorkbenchWindow window, IWizard wizard) { this(window, wizard, null); } public MultiPageWizardDialog(IWorkbenchWindow window, IWizard wizard, IStructuredSelection selection) { super(window.getShell()); this.wizard = wizard; this.wizard.setContainer(this); // Initialize wizard if (wizard instanceof IWorkbenchWizard) { if (selection == null) { if (window.getSelectionService().getSelection() instanceof IStructuredSelection) { selection = (IStructuredSelection)window.getSelectionService().getSelection(); } } ((IWorkbenchWizard)wizard).init(window.getWorkbench(), selection); } } public IWizard getWizard() { return wizard; } @Override protected boolean isResizable() { return true; } @Override protected int getShellStyle() { return SWT.TITLE | SWT.MAX | SWT.RESIZE | SWT.APPLICATION_MODAL; } @Override protected Control createContents(Composite parent) { Control contents = super.createContents(parent); updateButtons(); return contents; } @Override protected Control createDialogArea(Composite parent) { Composite composite = (Composite)super.createDialogArea(parent); wizard.addPages(); wizardSash = new SashForm(composite, SWT.HORIZONTAL); wizardSash.setLayoutData(new GridData(GridData.FILL_BOTH)); pagesTree = new Tree(wizardSash, SWT.SINGLE); pagesTree.setLayoutData(new GridData(GridData.FILL_BOTH)); Composite pageContainer = UIUtils.createPlaceholder(wizardSash, 2); // Vertical separator new Label(pageContainer, SWT.SEPARATOR | SWT.VERTICAL) .setLayoutData(new GridData(SWT.LEFT, SWT.FILL, false, true)); pageArea = UIUtils.createPlaceholder(pageContainer, 1); GridData gd = new GridData(GridData.FILL_BOTH); pageArea.setLayoutData(gd); pageArea.setLayout(new GridLayout(1, true)); wizardSash.setWeights(new int[]{300, 700}); Point maxSize = new Point(0, 0); IWizardPage[] pages = wizard.getPages(); for (IWizardPage page : pages) { addPage(null, page, maxSize); } pagesTree.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { changePage(); } }); // Select first page pagesTree.select(pagesTree.getItem(0)); changePage(); // Set title and image from first page IDialogPage firstPage = (IDialogPage) pagesTree.getItem(0).getData(); setTitle(firstPage.getTitle()); setTitleImage(firstPage.getImage()); setMessage(firstPage.getMessage()); // Horizontal separator new Label(composite, SWT.HORIZONTAL | SWT.SEPARATOR) .setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); // Progress monitor monitorPart = new ProgressMonitorPart(composite, null, true) { @Override public void setCanceled(boolean b) { super.setCanceled(b); if (b) { cancelCurrentOperation(); } } }; gd = new GridData(GridData.FILL_HORIZONTAL); gd.grabExcessHorizontalSpace = true; gd.horizontalIndent = 20; gd.verticalIndent = 0; monitorPart.setLayoutData(gd); monitorPart.setVisible(false); return composite; } protected void cancelCurrentOperation() { } private TreeItem addPage(TreeItem parentItem, IDialogPage page, Point maxSize) { TreeItem item = parentItem == null ? new TreeItem(pagesTree, SWT.NONE) : new TreeItem(parentItem, SWT.NONE); item.setText(page.getTitle()); item.setData(page); // Ad sub pages if (page instanceof ICompositeDialogPage) { IDialogPage[] subPages = ((ICompositeDialogPage) page).getSubPages(); if (!ArrayUtils.isEmpty(subPages)) { for (IDialogPage subPage : subPages) { addPage(item, subPage, maxSize); } item.setExpanded(true); } } return item; } private void changePage() { pageArea.setRedraw(false); try { TreeItem[] selection = pagesTree.getSelection(); if (selection.length != 1) { return; } TreeItem newItem = selection[0]; if (prevPage == newItem.getData()) { return; } GridData gd; if (prevPage != null) { gd = (GridData) prevPage.getControl().getLayoutData(); gd.exclude = true; prevPage.setVisible(false); if (prevPage instanceof ActiveWizardPage) { ((ActiveWizardPage) prevPage).deactivatePage(); } } boolean pageCreated = false; IDialogPage page = (IDialogPage) newItem.getData(); Control pageControl = page.getControl(); if (pageControl == null) { // Create page contents page.createControl(pageArea); pageControl = page.getControl(); //Point pageSize = pageControl.computeSize(SWT.DEFAULT, SWT.DEFAULT); //if (pageSize.x > maxSize.x) maxSize.x = pageSize.x; //if (pageSize.y > maxSize.y) maxSize.y = pageSize.y; gd = (GridData) pageControl.getLayoutData(); if (gd == null) { gd = new GridData(GridData.FILL_BOTH); pageControl.setLayoutData(gd); } gd.exclude = false; pageCreated = true; } gd = (GridData) pageControl.getLayoutData(); gd.exclude = false; page.setVisible(true); if (page instanceof ActiveWizardPage) { ((ActiveWizardPage) page).activatePage(); } setTitle(page.getTitle()); setMessage(page.getDescription()); prevPage = page; pageArea.layout(); if (pageCreated) { UIUtils.resizeShell(getWizard().getContainer().getShell()); } } finally { pageArea.setRedraw(true); } } @Override protected void buttonPressed(int buttonId) { if (buttonId == IDialogConstants.CANCEL_ID) { getWizard().performCancel(); } else if (buttonId == IDialogConstants.OK_ID) { if (!getWizard().performFinish()) { return; } } super.buttonPressed(buttonId); } @Override public IWizardPage getCurrentPage() { TreeItem[] selection = pagesTree.getSelection(); if (ArrayUtils.isEmpty(selection)) { return null; } IDialogPage page = (IDialogPage)selection[0].getData(); return page instanceof IWizardPage ? (IWizardPage) page : null; } @Override public void showPage(IWizardPage page) { for (TreeItem item : pagesTree.getItems()) { if (item.getData() == page) { pagesTree.setSelection(item); changePage(); break; } for (TreeItem child : item.getItems()) { if (child.getData() == page) { pagesTree.setSelection(child); changePage(); return; } } } } @Override public void updateButtons() { boolean complete = true; for (TreeItem item : pagesTree.getItems()) { if (item.getData() instanceof IWizardPage) { IWizardPage page = (IWizardPage) item.getData(); if (page.getControl() != null && !page.isPageComplete()) { complete = false; break; } } } Button button = getButton(IDialogConstants.OK_ID); if (button != null && !button.isDisposed()) { button.setEnabled(complete); } } @Override public void updateMessage() { } @Override public void updateTitleBar() { setTitleImage(getCurrentPage().getImage()); } @Override public void updateWindowTitle() { } public boolean close() { if (runningOperations > 0) { return false; } return super.close(); } @Override public void run(boolean fork, boolean cancelable, IRunnableWithProgress runnable) throws InvocationTargetException, InterruptedException { // Code copied from WizardDialog if (monitorPart != null) { monitorPart.setVisible(true); monitorPart.layout(); monitorPart.attachToCancelComponent(null); } ControlEnableState pageEnableState = ControlEnableState.disable(wizardSash); ControlEnableState buttonsEnableState = ControlEnableState.disable(getButtonBar()); try { runningOperations++; ModalContext.run(runnable, true, monitorPart, getShell().getDisplay()); } finally { runningOperations--; buttonsEnableState.restore(); pageEnableState.restore(); if (monitorPart != null) { monitorPart.done(); monitorPart.setVisible(false); } } } }
/* * Copyright 2014 LinkedIn Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package azkaban.executor; import java.io.File; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.log4j.Logger; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Ignore; import org.junit.Test; import azkaban.executor.ExecutionOptions.FailureAction; import azkaban.flow.Flow; import azkaban.project.Project; import azkaban.utils.DirectoryFlowLoader; import azkaban.utils.JSONUtils; import azkaban.utils.Props; public class ExecutableFlowTest { private Project project; @Before public void setUp() throws Exception { project = new Project(11, "myTestProject"); Logger logger = Logger.getLogger(this.getClass()); DirectoryFlowLoader loader = new DirectoryFlowLoader(new Props(), logger); loader.loadProjectFlow(project, new File("unit/executions/embedded")); Assert.assertEquals(0, loader.getErrors().size()); project.setFlows(loader.getFlowMap()); project.setVersion(123); } @After public void tearDown() throws Exception { } @Ignore @Test public void testExecutorFlowCreation() throws Exception { Flow flow = project.getFlow("jobe"); Assert.assertNotNull(flow); ExecutableFlow exFlow = new ExecutableFlow(project, flow); Assert.assertNotNull(exFlow.getExecutableNode("joba")); Assert.assertNotNull(exFlow.getExecutableNode("jobb")); Assert.assertNotNull(exFlow.getExecutableNode("jobc")); Assert.assertNotNull(exFlow.getExecutableNode("jobd")); Assert.assertNotNull(exFlow.getExecutableNode("jobe")); Assert.assertFalse(exFlow.getExecutableNode("joba") instanceof ExecutableFlowBase); Assert.assertTrue(exFlow.getExecutableNode("jobb") instanceof ExecutableFlowBase); Assert.assertTrue(exFlow.getExecutableNode("jobc") instanceof ExecutableFlowBase); Assert.assertTrue(exFlow.getExecutableNode("jobd") instanceof ExecutableFlowBase); Assert.assertFalse(exFlow.getExecutableNode("jobe") instanceof ExecutableFlowBase); ExecutableFlowBase jobbFlow = (ExecutableFlowBase) exFlow.getExecutableNode("jobb"); ExecutableFlowBase jobcFlow = (ExecutableFlowBase) exFlow.getExecutableNode("jobc"); ExecutableFlowBase jobdFlow = (ExecutableFlowBase) exFlow.getExecutableNode("jobd"); Assert.assertEquals("innerFlow", jobbFlow.getFlowId()); Assert.assertEquals("jobb", jobbFlow.getId()); Assert.assertEquals(4, jobbFlow.getExecutableNodes().size()); Assert.assertEquals("innerFlow", jobcFlow.getFlowId()); Assert.assertEquals("jobc", jobcFlow.getId()); Assert.assertEquals(4, jobcFlow.getExecutableNodes().size()); Assert.assertEquals("innerFlow", jobdFlow.getFlowId()); Assert.assertEquals("jobd", jobdFlow.getId()); Assert.assertEquals(4, jobdFlow.getExecutableNodes().size()); } @Ignore @Test public void testExecutorFlowJson() throws Exception { Flow flow = project.getFlow("jobe"); Assert.assertNotNull(flow); ExecutableFlow exFlow = new ExecutableFlow(project, flow); Object obj = exFlow.toObject(); String exFlowJSON = JSONUtils.toJSON(obj); @SuppressWarnings("unchecked") Map<String, Object> flowObjMap = (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON); ExecutableFlow parsedExFlow = ExecutableFlow.createExecutableFlowFromObject(flowObjMap); testEquals(exFlow, parsedExFlow); } @Ignore @Test public void testExecutorFlowJson2() throws Exception { Flow flow = project.getFlow("jobe"); Assert.assertNotNull(flow); ExecutableFlow exFlow = new ExecutableFlow(project, flow); exFlow.setExecutionId(101); exFlow.setAttempt(2); exFlow.setDelayedExecution(1000); ExecutionOptions options = new ExecutionOptions(); options.setConcurrentOption("blah"); options.setDisabledJobs(Arrays.asList(new Object[] { "bee", null, "boo" })); options.setFailureAction(FailureAction.CANCEL_ALL); options .setFailureEmails(Arrays.asList(new String[] { "doo", null, "daa" })); options .setSuccessEmails(Arrays.asList(new String[] { "dee", null, "dae" })); options.setPipelineLevel(2); options.setPipelineExecutionId(3); options.setNotifyOnFirstFailure(true); options.setNotifyOnLastFailure(true); HashMap<String, String> flowProps = new HashMap<String, String>(); flowProps.put("la", "fa"); options.addAllFlowParameters(flowProps); exFlow.setExecutionOptions(options); Object obj = exFlow.toObject(); String exFlowJSON = JSONUtils.toJSON(obj); @SuppressWarnings("unchecked") Map<String, Object> flowObjMap = (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON); ExecutableFlow parsedExFlow = ExecutableFlow.createExecutableFlowFromObject(flowObjMap); testEquals(exFlow, parsedExFlow); } @SuppressWarnings("rawtypes") @Ignore @Test public void testExecutorFlowUpdates() throws Exception { Flow flow = project.getFlow("jobe"); ExecutableFlow exFlow = new ExecutableFlow(project, flow); exFlow.setExecutionId(101); // Create copy of flow Object obj = exFlow.toObject(); String exFlowJSON = JSONUtils.toJSON(obj); @SuppressWarnings("unchecked") Map<String, Object> flowObjMap = (Map<String, Object>) JSONUtils.parseJSONFromString(exFlowJSON); ExecutableFlow copyFlow = ExecutableFlow.createExecutableFlowFromObject(flowObjMap); testEquals(exFlow, copyFlow); ExecutableNode joba = exFlow.getExecutableNode("joba"); ExecutableFlowBase jobb = (ExecutableFlowBase) (exFlow.getExecutableNode("jobb")); ExecutableFlowBase jobc = (ExecutableFlowBase) (exFlow.getExecutableNode("jobc")); ExecutableFlowBase jobd = (ExecutableFlowBase) (exFlow.getExecutableNode("jobd")); ExecutableNode jobe = exFlow.getExecutableNode("jobe"); assertNotNull(joba, jobb, jobc, jobd, jobe); ExecutableNode jobbInnerFlowA = jobb.getExecutableNode("innerJobA"); ExecutableNode jobbInnerFlowB = jobb.getExecutableNode("innerJobB"); ExecutableNode jobbInnerFlowC = jobb.getExecutableNode("innerJobC"); ExecutableNode jobbInnerFlow = jobb.getExecutableNode("innerFlow"); assertNotNull(jobbInnerFlowA, jobbInnerFlowB, jobbInnerFlowC, jobbInnerFlow); ExecutableNode jobcInnerFlowA = jobc.getExecutableNode("innerJobA"); ExecutableNode jobcInnerFlowB = jobc.getExecutableNode("innerJobB"); ExecutableNode jobcInnerFlowC = jobc.getExecutableNode("innerJobC"); ExecutableNode jobcInnerFlow = jobc.getExecutableNode("innerFlow"); assertNotNull(jobcInnerFlowA, jobcInnerFlowB, jobcInnerFlowC, jobcInnerFlow); ExecutableNode jobdInnerFlowA = jobd.getExecutableNode("innerJobA"); ExecutableNode jobdInnerFlowB = jobd.getExecutableNode("innerJobB"); ExecutableNode jobdInnerFlowC = jobd.getExecutableNode("innerJobC"); ExecutableNode jobdInnerFlow = jobd.getExecutableNode("innerFlow"); assertNotNull(jobdInnerFlowA, jobdInnerFlowB, jobdInnerFlowC, jobdInnerFlow); exFlow.setEndTime(1000); exFlow.setStartTime(500); exFlow.setStatus(Status.RUNNING); exFlow.setUpdateTime(133); // Change one job and see if it updates long time = System.currentTimeMillis(); jobe.setEndTime(time); jobe.setUpdateTime(time); jobe.setStatus(Status.DISABLED); jobe.setStartTime(time - 1); // Should be one node that was changed Map<String, Object> updateObject = exFlow.toUpdateObject(0); Assert.assertEquals(1, ((List) (updateObject.get("nodes"))).size()); // Reapplying should give equal results. copyFlow.applyUpdateObject(updateObject); testEquals(exFlow, copyFlow); // This update shouldn't provide any results updateObject = exFlow.toUpdateObject(System.currentTimeMillis()); Assert.assertNull(updateObject.get("nodes")); // Change inner flow long currentTime = time + 1; jobbInnerFlowA.setEndTime(currentTime); jobbInnerFlowA.setUpdateTime(currentTime); jobbInnerFlowA.setStatus(Status.DISABLED); jobbInnerFlowA.setStartTime(currentTime - 100); // We should get 2 updates if we do a toUpdateObject using 0 as the start // time updateObject = exFlow.toUpdateObject(0); Assert.assertEquals(2, ((List) (updateObject.get("nodes"))).size()); // This should provide 1 update. That we can apply updateObject = exFlow.toUpdateObject(jobe.getUpdateTime()); Assert.assertNotNull(updateObject.get("nodes")); Assert.assertEquals(1, ((List) (updateObject.get("nodes"))).size()); copyFlow.applyUpdateObject(updateObject); testEquals(exFlow, copyFlow); // This shouldn't give any results anymore updateObject = exFlow.toUpdateObject(jobbInnerFlowA.getUpdateTime()); Assert.assertNull(updateObject.get("nodes")); } private void assertNotNull(ExecutableNode... nodes) { for (ExecutableNode node : nodes) { Assert.assertNotNull(node); } } public static void testEquals(ExecutableNode a, ExecutableNode b) { if (a instanceof ExecutableFlow) { if (b instanceof ExecutableFlow) { ExecutableFlow exA = (ExecutableFlow) a; ExecutableFlow exB = (ExecutableFlow) b; Assert.assertEquals(exA.getScheduleId(), exB.getScheduleId()); Assert.assertEquals(exA.getProjectId(), exB.getProjectId()); Assert.assertEquals(exA.getVersion(), exB.getVersion()); Assert.assertEquals(exA.getSubmitTime(), exB.getSubmitTime()); Assert.assertEquals(exA.getSubmitUser(), exB.getSubmitUser()); Assert.assertEquals(exA.getExecutionPath(), exB.getExecutionPath()); testEquals(exA.getExecutionOptions(), exB.getExecutionOptions()); } else { Assert.fail("A is ExecutableFlow, but B is not"); } } if (a instanceof ExecutableFlowBase) { if (b instanceof ExecutableFlowBase) { ExecutableFlowBase exA = (ExecutableFlowBase) a; ExecutableFlowBase exB = (ExecutableFlowBase) b; Assert.assertEquals(exA.getFlowId(), exB.getFlowId()); Assert.assertEquals(exA.getExecutableNodes().size(), exB .getExecutableNodes().size()); for (ExecutableNode nodeA : exA.getExecutableNodes()) { ExecutableNode nodeB = exB.getExecutableNode(nodeA.getId()); Assert.assertNotNull(nodeB); Assert.assertEquals(a, nodeA.getParentFlow()); Assert.assertEquals(b, nodeB.getParentFlow()); testEquals(nodeA, nodeB); } } else { Assert.fail("A is ExecutableFlowBase, but B is not"); } } Assert.assertEquals(a.getId(), b.getId()); Assert.assertEquals(a.getStatus(), b.getStatus()); Assert.assertEquals(a.getStartTime(), b.getStartTime()); Assert.assertEquals(a.getEndTime(), b.getEndTime()); Assert.assertEquals(a.getUpdateTime(), b.getUpdateTime()); Assert.assertEquals(a.getAttempt(), b.getAttempt()); Assert.assertEquals(a.getJobSource(), b.getJobSource()); Assert.assertEquals(a.getPropsSource(), b.getPropsSource()); Assert.assertEquals(a.getInNodes(), a.getInNodes()); Assert.assertEquals(a.getOutNodes(), a.getOutNodes()); } public static void testEquals(ExecutionOptions optionsA, ExecutionOptions optionsB) { Assert.assertEquals(optionsA.getConcurrentOption(), optionsB.getConcurrentOption()); Assert.assertEquals(optionsA.getNotifyOnFirstFailure(), optionsB.getNotifyOnFirstFailure()); Assert.assertEquals(optionsA.getNotifyOnLastFailure(), optionsB.getNotifyOnLastFailure()); Assert.assertEquals(optionsA.getFailureAction(), optionsB.getFailureAction()); Assert.assertEquals(optionsA.getPipelineExecutionId(), optionsB.getPipelineExecutionId()); Assert.assertEquals(optionsA.getPipelineLevel(), optionsB.getPipelineLevel()); Assert.assertEquals(optionsA.isFailureEmailsOverridden(), optionsB.isFailureEmailsOverridden()); Assert.assertEquals(optionsA.isSuccessEmailsOverridden(), optionsB.isSuccessEmailsOverridden()); testDisabledEquals(optionsA.getDisabledJobs(), optionsB.getDisabledJobs()); testEquals(optionsA.getSuccessEmails(), optionsB.getSuccessEmails()); testEquals(optionsA.getFailureEmails(), optionsB.getFailureEmails()); testEquals(optionsA.getFlowParameters(), optionsB.getFlowParameters()); } public static void testEquals(Set<String> a, Set<String> b) { if (a == b) { return; } if (a == null || b == null) { Assert.fail(); } Assert.assertEquals(a.size(), b.size()); Iterator<String> iterA = a.iterator(); while (iterA.hasNext()) { String aStr = iterA.next(); Assert.assertTrue(b.contains(aStr)); } } public static void testEquals(List<String> a, List<String> b) { if (a == b) { return; } if (a == null || b == null) { Assert.fail(); } Assert.assertEquals(a.size(), b.size()); Iterator<String> iterA = a.iterator(); Iterator<String> iterB = b.iterator(); while (iterA.hasNext()) { String aStr = iterA.next(); String bStr = iterB.next(); Assert.assertEquals(aStr, bStr); } } @SuppressWarnings("unchecked") public static void testDisabledEquals(List<Object> a, List<Object> b) { if (a == b) { return; } if (a == null || b == null) { Assert.fail(); } Assert.assertEquals(a.size(), b.size()); Iterator<Object> iterA = a.iterator(); Iterator<Object> iterB = b.iterator(); while (iterA.hasNext()) { Object aStr = iterA.next(); Object bStr = iterB.next(); if (aStr instanceof Map && bStr instanceof Map) { Map<String, Object> aMap = (Map<String, Object>) aStr; Map<String, Object> bMap = (Map<String, Object>) bStr; Assert.assertEquals((String) aMap.get("id"), (String) bMap.get("id")); testDisabledEquals((List<Object>) aMap.get("children"), (List<Object>) bMap.get("children")); } else { Assert.assertEquals(aStr, bStr); } } } public static void testEquals(Map<String, String> a, Map<String, String> b) { if (a == b) { return; } if (a == null || b == null) { Assert.fail(); } Assert.assertEquals(a.size(), b.size()); for (String key : a.keySet()) { Assert.assertEquals(a.get(key), b.get(key)); } } }
package redis.clients.jedis.tests.commands; import static redis.clients.jedis.ScanParams.SCAN_POINTER_START; import static redis.clients.jedis.ScanParams.SCAN_POINTER_START_BINARY; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import org.junit.Test; import redis.clients.jedis.ScanParams; import redis.clients.jedis.ScanResult; import redis.clients.jedis.Tuple; import redis.clients.jedis.ZParams; import redis.clients.jedis.params.sortedset.ZAddParams; import redis.clients.jedis.params.sortedset.ZIncrByParams; import redis.clients.util.SafeEncoder; public class SortedSetCommandsTest extends JedisCommandTestBase { final byte[] bfoo = { 0x01, 0x02, 0x03, 0x04 }; final byte[] bbar = { 0x05, 0x06, 0x07, 0x08 }; final byte[] bcar = { 0x09, 0x0A, 0x0B, 0x0C }; final byte[] ba = { 0x0A }; final byte[] bb = { 0x0B }; final byte[] bc = { 0x0C }; final byte[] bInclusiveB = { 0x5B, 0x0B }; final byte[] bExclusiveC = { 0x28, 0x0C }; final byte[] bLexMinusInf = { 0x2D }; final byte[] bLexPlusInf = { 0x2B }; final byte[] bbar1 = { 0x05, 0x06, 0x07, 0x08, 0x0A }; final byte[] bbar2 = { 0x05, 0x06, 0x07, 0x08, 0x0B }; final byte[] bbar3 = { 0x05, 0x06, 0x07, 0x08, 0x0C }; final byte[] bbarstar = { 0x05, 0x06, 0x07, 0x08, '*' }; @Test public void zadd() { long status = jedis.zadd("foo", 1d, "a"); assertEquals(1, status); status = jedis.zadd("foo", 10d, "b"); assertEquals(1, status); status = jedis.zadd("foo", 0.1d, "c"); assertEquals(1, status); status = jedis.zadd("foo", 2d, "a"); assertEquals(0, status); // Binary long bstatus = jedis.zadd(bfoo, 1d, ba); assertEquals(1, bstatus); bstatus = jedis.zadd(bfoo, 10d, bb); assertEquals(1, bstatus); bstatus = jedis.zadd(bfoo, 0.1d, bc); assertEquals(1, bstatus); bstatus = jedis.zadd(bfoo, 2d, ba); assertEquals(0, bstatus); } @Test public void zaddWithParams() { jedis.del("foo"); // xx: never add new member long status = jedis.zadd("foo", 1d, "a", ZAddParams.zAddParams().xx()); assertEquals(0L, status); jedis.zadd("foo", 1d, "a"); // nx: never update current member status = jedis.zadd("foo", 2d, "a", ZAddParams.zAddParams().nx()); assertEquals(0L, status); assertEquals(Double.valueOf(1d), jedis.zscore("foo", "a")); Map<String, Double> scoreMembers = new HashMap<String, Double>(); scoreMembers.put("a", 2d); scoreMembers.put("b", 1d); // ch: return count of members not only added, but also updated status = jedis.zadd("foo", scoreMembers, ZAddParams.zAddParams().ch()); assertEquals(2L, status); // binary jedis.del(bfoo); // xx: never add new member status = jedis.zadd(bfoo, 1d, ba, ZAddParams.zAddParams().xx()); assertEquals(0L, status); jedis.zadd(bfoo, 1d, ba); // nx: never update current member status = jedis.zadd(bfoo, 2d, ba, ZAddParams.zAddParams().nx()); assertEquals(0L, status); assertEquals(Double.valueOf(1d), jedis.zscore(bfoo, ba)); Map<byte[], Double> binaryScoreMembers = new HashMap<byte[], Double>(); binaryScoreMembers.put(ba, 2d); binaryScoreMembers.put(bb, 1d); // ch: return count of members not only added, but also updated status = jedis.zadd(bfoo, binaryScoreMembers, ZAddParams.zAddParams().ch()); assertEquals(2L, status); } @Test public void zrange() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); Set<String> expected = new LinkedHashSet<String>(); expected.add("c"); expected.add("a"); Set<String> range = jedis.zrange("foo", 0, 1); assertEquals(expected, range); expected.add("b"); range = jedis.zrange("foo", 0, 100); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bc); bexpected.add(ba); Set<byte[]> brange = jedis.zrange(bfoo, 0, 1); assertEquals(bexpected, brange); bexpected.add(bb); brange = jedis.zrange(bfoo, 0, 100); assertEquals(bexpected, brange); } @Test public void zrangeByLex() { jedis.zadd("foo", 1, "aa"); jedis.zadd("foo", 1, "c"); jedis.zadd("foo", 1, "bb"); jedis.zadd("foo", 1, "d"); Set<String> expected = new LinkedHashSet<String>(); expected.add("bb"); expected.add("c"); // exclusive aa ~ inclusive c assertEquals(expected, jedis.zrangeByLex("foo", "(aa", "[c")); expected.clear(); expected.add("bb"); expected.add("c"); // with LIMIT assertEquals(expected, jedis.zrangeByLex("foo", "-", "+", 1, 2)); } @Test public void zrangeByLexBinary() { // binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 1, bc); jedis.zadd(bfoo, 1, bb); Set<byte[]> bExpected = new LinkedHashSet<byte[]>(); bExpected.add(bb); assertEquals(bExpected, jedis.zrangeByLex(bfoo, bInclusiveB, bExclusiveC)); bExpected.clear(); bExpected.add(ba); bExpected.add(bb); // with LIMIT assertEquals(bExpected, jedis.zrangeByLex(bfoo, bLexMinusInf, bLexPlusInf, 0, 2)); } @Test public void zrevrangeByLex() { jedis.zadd("foo", 1, "aa"); jedis.zadd("foo", 1, "c"); jedis.zadd("foo", 1, "bb"); jedis.zadd("foo", 1, "d"); Set<String> expected = new LinkedHashSet<String>(); expected.add("c"); expected.add("bb"); // exclusive aa ~ inclusive c assertEquals(expected, jedis.zrevrangeByLex("foo", "[c", "(aa")); expected.clear(); expected.add("c"); expected.add("bb"); // with LIMIT assertEquals(expected, jedis.zrevrangeByLex("foo", "+", "-", 1, 2)); } @Test public void zrevrangeByLexBinary() { // binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 1, bc); jedis.zadd(bfoo, 1, bb); Set<byte[]> bExpected = new LinkedHashSet<byte[]>(); bExpected.add(bb); assertEquals(bExpected, jedis.zrevrangeByLex(bfoo, bExclusiveC, bInclusiveB)); bExpected.clear(); bExpected.add(bb); bExpected.add(ba); // with LIMIT assertEquals(bExpected, jedis.zrevrangeByLex(bfoo, bLexPlusInf, bLexMinusInf, 0, 2)); } @Test public void zrevrange() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); Set<String> expected = new LinkedHashSet<String>(); expected.add("b"); expected.add("a"); Set<String> range = jedis.zrevrange("foo", 0, 1); assertEquals(expected, range); expected.add("c"); range = jedis.zrevrange("foo", 0, 100); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bb); bexpected.add(ba); Set<byte[]> brange = jedis.zrevrange(bfoo, 0, 1); assertEquals(bexpected, brange); bexpected.add(bc); brange = jedis.zrevrange(bfoo, 0, 100); assertEquals(bexpected, brange); } @Test public void zrem() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 2d, "b"); long status = jedis.zrem("foo", "a"); Set<String> expected = new LinkedHashSet<String>(); expected.add("b"); assertEquals(1, status); assertEquals(expected, jedis.zrange("foo", 0, 100)); status = jedis.zrem("foo", "bar"); assertEquals(0, status); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 2d, bb); long bstatus = jedis.zrem(bfoo, ba); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bb); assertEquals(1, bstatus); assertEquals(bexpected, jedis.zrange(bfoo, 0, 100)); bstatus = jedis.zrem(bfoo, bbar); assertEquals(0, bstatus); } @Test public void zincrby() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 2d, "b"); double score = jedis.zincrby("foo", 2d, "a"); Set<String> expected = new LinkedHashSet<String>(); expected.add("a"); expected.add("b"); assertEquals(3d, score, 0); assertEquals(expected, jedis.zrange("foo", 0, 100)); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 2d, bb); double bscore = jedis.zincrby(bfoo, 2d, ba); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bb); bexpected.add(ba); assertEquals(3d, bscore, 0); assertEquals(bexpected, jedis.zrange(bfoo, 0, 100)); } @Test public void zincrbyWithParams() { jedis.del("foo"); // xx: never add new member Double score = jedis.zincrby("foo", 2d, "a", ZIncrByParams.zIncrByParams().xx()); assertNull(score); jedis.zadd("foo", 2d, "a"); // nx: never update current member score = jedis.zincrby("foo", 1d, "a", ZIncrByParams.zIncrByParams().nx()); assertNull(score); assertEquals(Double.valueOf(2d), jedis.zscore("foo", "a")); // Binary jedis.del(bfoo); // xx: never add new member score = jedis.zincrby(bfoo, 2d, ba, ZIncrByParams.zIncrByParams().xx()); assertNull(score); jedis.zadd(bfoo, 2d, ba); // nx: never update current member score = jedis.zincrby(bfoo, 1d, ba, ZIncrByParams.zIncrByParams().nx()); assertNull(score); assertEquals(Double.valueOf(2d), jedis.zscore(bfoo, ba)); } @Test public void zrank() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 2d, "b"); long rank = jedis.zrank("foo", "a"); assertEquals(0, rank); rank = jedis.zrank("foo", "b"); assertEquals(1, rank); assertNull(jedis.zrank("car", "b")); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 2d, bb); long brank = jedis.zrank(bfoo, ba); assertEquals(0, brank); brank = jedis.zrank(bfoo, bb); assertEquals(1, brank); assertNull(jedis.zrank(bcar, bb)); } @Test public void zrevrank() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 2d, "b"); long rank = jedis.zrevrank("foo", "a"); assertEquals(1, rank); rank = jedis.zrevrank("foo", "b"); assertEquals(0, rank); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 2d, bb); long brank = jedis.zrevrank(bfoo, ba); assertEquals(1, brank); brank = jedis.zrevrank(bfoo, bb); assertEquals(0, brank); } @Test public void zrangeWithScores() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("c", 0.1d)); expected.add(new Tuple("a", 2d)); Set<Tuple> range = jedis.zrangeWithScores("foo", 0, 1); assertEquals(expected, range); expected.add(new Tuple("b", 10d)); range = jedis.zrangeWithScores("foo", 0, 100); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bc, 0.1d)); bexpected.add(new Tuple(ba, 2d)); Set<Tuple> brange = jedis.zrangeWithScores(bfoo, 0, 1); assertEquals(bexpected, brange); bexpected.add(new Tuple(bb, 10d)); brange = jedis.zrangeWithScores(bfoo, 0, 100); assertEquals(bexpected, brange); } @Test public void zrevrangeWithScores() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("b", 10d)); expected.add(new Tuple("a", 2d)); Set<Tuple> range = jedis.zrevrangeWithScores("foo", 0, 1); assertEquals(expected, range); expected.add(new Tuple("c", 0.1d)); range = jedis.zrevrangeWithScores("foo", 0, 100); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bb, 10d)); bexpected.add(new Tuple(ba, 2d)); Set<Tuple> brange = jedis.zrevrangeWithScores(bfoo, 0, 1); assertEquals(bexpected, brange); bexpected.add(new Tuple(bc, 0.1d)); brange = jedis.zrevrangeWithScores(bfoo, 0, 100); assertEquals(bexpected, brange); } @Test public void zcard() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); long size = jedis.zcard("foo"); assertEquals(3, size); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); long bsize = jedis.zcard(bfoo); assertEquals(3, bsize); } @Test public void zscore() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); Double score = jedis.zscore("foo", "b"); assertEquals((Double) 10d, score); score = jedis.zscore("foo", "c"); assertEquals((Double) 0.1d, score); score = jedis.zscore("foo", "s"); assertNull(score); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Double bscore = jedis.zscore(bfoo, bb); assertEquals((Double) 10d, bscore); bscore = jedis.zscore(bfoo, bc); assertEquals((Double) 0.1d, bscore); bscore = jedis.zscore(bfoo, SafeEncoder.encode("s")); assertNull(bscore); } @Test public void zcount() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); long result = jedis.zcount("foo", 0.01d, 2.1d); assertEquals(2, result); result = jedis.zcount("foo", "(0.01", "+inf"); assertEquals(3, result); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); long bresult = jedis.zcount(bfoo, 0.01d, 2.1d); assertEquals(2, bresult); bresult = jedis.zcount(bfoo, SafeEncoder.encode("(0.01"), SafeEncoder.encode("+inf")); assertEquals(3, bresult); } @Test public void zlexcount() { jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 1, "b"); jedis.zadd("foo", 1, "c"); jedis.zadd("foo", 1, "aa"); long result = jedis.zlexcount("foo", "[aa", "(c"); assertEquals(2, result); result = jedis.zlexcount("foo", "-", "+"); assertEquals(4, result); result = jedis.zlexcount("foo", "-", "(c"); assertEquals(3, result); result = jedis.zlexcount("foo", "[aa", "+"); assertEquals(3, result); } @Test public void zlexcountBinary() { // Binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 1, bc); jedis.zadd(bfoo, 1, bb); long result = jedis.zlexcount(bfoo, bInclusiveB, bExclusiveC); assertEquals(1, result); result = jedis.zlexcount(bfoo, bLexMinusInf, bLexPlusInf); assertEquals(3, result); } @Test public void zrangebyscore() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); Set<String> range = jedis.zrangeByScore("foo", 0d, 2d); Set<String> expected = new LinkedHashSet<String>(); expected.add("c"); expected.add("a"); assertEquals(expected, range); range = jedis.zrangeByScore("foo", 0d, 2d, 0, 1); expected = new LinkedHashSet<String>(); expected.add("c"); assertEquals(expected, range); range = jedis.zrangeByScore("foo", 0d, 2d, 1, 1); Set<String> range2 = jedis.zrangeByScore("foo", "-inf", "(2"); assertEquals(expected, range2); expected = new LinkedHashSet<String>(); expected.add("a"); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<byte[]> brange = jedis.zrangeByScore(bfoo, 0d, 2d); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bc); bexpected.add(ba); assertEquals(bexpected, brange); brange = jedis.zrangeByScore(bfoo, 0d, 2d, 0, 1); bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bc); assertEquals(bexpected, brange); brange = jedis.zrangeByScore(bfoo, 0d, 2d, 1, 1); Set<byte[]> brange2 = jedis.zrangeByScore(bfoo, SafeEncoder.encode("-inf"), SafeEncoder.encode("(2")); assertEquals(bexpected, brange2); bexpected = new LinkedHashSet<byte[]>(); bexpected.add(ba); assertEquals(bexpected, brange); } @Test public void zrevrangebyscore() { jedis.zadd("foo", 1.0d, "a"); jedis.zadd("foo", 2.0d, "b"); jedis.zadd("foo", 3.0d, "c"); jedis.zadd("foo", 4.0d, "d"); jedis.zadd("foo", 5.0d, "e"); Set<String> range = jedis.zrevrangeByScore("foo", 3d, Double.NEGATIVE_INFINITY, 0, 1); Set<String> expected = new LinkedHashSet<String>(); expected.add("c"); assertEquals(expected, range); range = jedis.zrevrangeByScore("foo", 3.5d, Double.NEGATIVE_INFINITY, 0, 2); expected = new LinkedHashSet<String>(); expected.add("c"); expected.add("b"); assertEquals(expected, range); range = jedis.zrevrangeByScore("foo", 3.5d, Double.NEGATIVE_INFINITY, 1, 1); expected = new LinkedHashSet<String>(); expected.add("b"); assertEquals(expected, range); range = jedis.zrevrangeByScore("foo", 4d, 2d); expected = new LinkedHashSet<String>(); expected.add("d"); expected.add("c"); expected.add("b"); assertEquals(expected, range); range = jedis.zrevrangeByScore("foo", "+inf", "(4"); expected = new LinkedHashSet<String>(); expected.add("e"); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<byte[]> brange = jedis.zrevrangeByScore(bfoo, 2d, 0d); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bc); bexpected.add(ba); assertEquals(bexpected, brange); brange = jedis.zrevrangeByScore(bfoo, 2d, 0d, 0, 1); bexpected = new LinkedHashSet<byte[]>(); bexpected.add(ba); assertEquals(bexpected, brange); Set<byte[]> brange2 = jedis.zrevrangeByScore(bfoo, SafeEncoder.encode("+inf"), SafeEncoder.encode("(2")); bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bb); assertEquals(bexpected, brange2); brange = jedis.zrevrangeByScore(bfoo, 2d, 0d, 1, 1); bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bc); assertEquals(bexpected, brange); } @Test public void zrangebyscoreWithScores() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); Set<Tuple> range = jedis.zrangeByScoreWithScores("foo", 0d, 2d); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("c", 0.1d)); expected.add(new Tuple("a", 2d)); assertEquals(expected, range); range = jedis.zrangeByScoreWithScores("foo", 0d, 2d, 0, 1); expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("c", 0.1d)); assertEquals(expected, range); range = jedis.zrangeByScoreWithScores("foo", 0d, 2d, 1, 1); expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("a", 2d)); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<Tuple> brange = jedis.zrangeByScoreWithScores(bfoo, 0d, 2d); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bc, 0.1d)); bexpected.add(new Tuple(ba, 2d)); assertEquals(bexpected, brange); brange = jedis.zrangeByScoreWithScores(bfoo, 0d, 2d, 0, 1); bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bc, 0.1d)); assertEquals(bexpected, brange); brange = jedis.zrangeByScoreWithScores(bfoo, 0d, 2d, 1, 1); bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(ba, 2d)); assertEquals(bexpected, brange); } @Test public void zrevrangebyscoreWithScores() { jedis.zadd("foo", 1.0d, "a"); jedis.zadd("foo", 2.0d, "b"); jedis.zadd("foo", 3.0d, "c"); jedis.zadd("foo", 4.0d, "d"); jedis.zadd("foo", 5.0d, "e"); Set<Tuple> range = jedis.zrevrangeByScoreWithScores("foo", 3d, Double.NEGATIVE_INFINITY, 0, 1); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("c", 3.0d)); assertEquals(expected, range); range = jedis.zrevrangeByScoreWithScores("foo", 3.5d, Double.NEGATIVE_INFINITY, 0, 2); expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("c", 3.0d)); expected.add(new Tuple("b", 2.0d)); assertEquals(expected, range); range = jedis.zrevrangeByScoreWithScores("foo", 3.5d, Double.NEGATIVE_INFINITY, 1, 1); expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("b", 2.0d)); assertEquals(expected, range); range = jedis.zrevrangeByScoreWithScores("foo", 4d, 2d); expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("d", 4.0d)); expected.add(new Tuple("c", 3.0d)); expected.add(new Tuple("b", 2.0d)); assertEquals(expected, range); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); Set<Tuple> brange = jedis.zrevrangeByScoreWithScores(bfoo, 2d, 0d); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bc, 0.1d)); bexpected.add(new Tuple(ba, 2d)); assertEquals(bexpected, brange); brange = jedis.zrevrangeByScoreWithScores(bfoo, 2d, 0d, 0, 1); bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(ba, 2d)); assertEquals(bexpected, brange); brange = jedis.zrevrangeByScoreWithScores(bfoo, 2d, 0d, 1, 1); bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bc, 0.1d)); assertEquals(bexpected, brange); } @Test public void zremrangeByRank() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); long result = jedis.zremrangeByRank("foo", 0, 0); assertEquals(1, result); Set<String> expected = new LinkedHashSet<String>(); expected.add("a"); expected.add("b"); assertEquals(expected, jedis.zrange("foo", 0, 100)); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); long bresult = jedis.zremrangeByRank(bfoo, 0, 0); assertEquals(1, bresult); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(ba); bexpected.add(bb); assertEquals(bexpected, jedis.zrange(bfoo, 0, 100)); } @Test public void zremrangeByScore() { jedis.zadd("foo", 1d, "a"); jedis.zadd("foo", 10d, "b"); jedis.zadd("foo", 0.1d, "c"); jedis.zadd("foo", 2d, "a"); long result = jedis.zremrangeByScore("foo", 0, 2); assertEquals(2, result); Set<String> expected = new LinkedHashSet<String>(); expected.add("b"); assertEquals(expected, jedis.zrange("foo", 0, 100)); // Binary jedis.zadd(bfoo, 1d, ba); jedis.zadd(bfoo, 10d, bb); jedis.zadd(bfoo, 0.1d, bc); jedis.zadd(bfoo, 2d, ba); long bresult = jedis.zremrangeByScore(bfoo, 0, 2); assertEquals(2, bresult); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(bb); assertEquals(bexpected, jedis.zrange(bfoo, 0, 100)); } @Test public void zremrangeByLex() { jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 1, "b"); jedis.zadd("foo", 1, "c"); jedis.zadd("foo", 1, "aa"); long result = jedis.zremrangeByLex("foo", "[aa", "(c"); assertEquals(2, result); Set<String> expected = new LinkedHashSet<String>(); expected.add("a"); expected.add("c"); assertEquals(expected, jedis.zrangeByLex("foo", "-", "+")); } @Test public void zremrangeByLexBinary() { jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 1, bc); jedis.zadd(bfoo, 1, bb); long bresult = jedis.zremrangeByLex(bfoo, bInclusiveB, bExclusiveC); assertEquals(1, bresult); Set<byte[]> bexpected = new LinkedHashSet<byte[]>(); bexpected.add(ba); bexpected.add(bc); assertEquals(bexpected, jedis.zrangeByLex(bfoo, bLexMinusInf, bLexPlusInf)); } @Test public void zunionstore() { jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 2, "b"); jedis.zadd("bar", 2, "a"); jedis.zadd("bar", 2, "b"); long result = jedis.zunionstore("dst", "foo", "bar"); assertEquals(2, result); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("b", new Double(4))); expected.add(new Tuple("a", new Double(3))); assertEquals(expected, jedis.zrangeWithScores("dst", 0, 100)); // Binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 2, bb); jedis.zadd(bbar, 2, ba); jedis.zadd(bbar, 2, bb); long bresult = jedis.zunionstore(SafeEncoder.encode("dst"), bfoo, bbar); assertEquals(2, bresult); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bb, new Double(4))); bexpected.add(new Tuple(ba, new Double(3))); assertEquals(bexpected, jedis.zrangeWithScores(SafeEncoder.encode("dst"), 0, 100)); } @Test public void zunionstoreParams() { jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 2, "b"); jedis.zadd("bar", 2, "a"); jedis.zadd("bar", 2, "b"); ZParams params = new ZParams(); params.weights(2, 2.5); params.aggregate(ZParams.Aggregate.SUM); long result = jedis.zunionstore("dst", params, "foo", "bar"); assertEquals(2, result); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("b", new Double(9))); expected.add(new Tuple("a", new Double(7))); assertEquals(expected, jedis.zrangeWithScores("dst", 0, 100)); // Binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 2, bb); jedis.zadd(bbar, 2, ba); jedis.zadd(bbar, 2, bb); ZParams bparams = new ZParams(); bparams.weights(2, 2.5); bparams.aggregate(ZParams.Aggregate.SUM); long bresult = jedis.zunionstore(SafeEncoder.encode("dst"), bparams, bfoo, bbar); assertEquals(2, bresult); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(bb, new Double(9))); bexpected.add(new Tuple(ba, new Double(7))); assertEquals(bexpected, jedis.zrangeWithScores(SafeEncoder.encode("dst"), 0, 100)); } @Test public void zinterstore() { jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 2, "b"); jedis.zadd("bar", 2, "a"); long result = jedis.zinterstore("dst", "foo", "bar"); assertEquals(1, result); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("a", new Double(3))); assertEquals(expected, jedis.zrangeWithScores("dst", 0, 100)); // Binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 2, bb); jedis.zadd(bbar, 2, ba); long bresult = jedis.zinterstore(SafeEncoder.encode("dst"), bfoo, bbar); assertEquals(1, bresult); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(ba, new Double(3))); assertEquals(bexpected, jedis.zrangeWithScores(SafeEncoder.encode("dst"), 0, 100)); } @Test public void zintertoreParams() { jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 2, "b"); jedis.zadd("bar", 2, "a"); ZParams params = new ZParams(); params.weights(2, 2.5); params.aggregate(ZParams.Aggregate.SUM); long result = jedis.zinterstore("dst", params, "foo", "bar"); assertEquals(1, result); Set<Tuple> expected = new LinkedHashSet<Tuple>(); expected.add(new Tuple("a", new Double(7))); assertEquals(expected, jedis.zrangeWithScores("dst", 0, 100)); // Binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 2, bb); jedis.zadd(bbar, 2, ba); ZParams bparams = new ZParams(); bparams.weights(2, 2.5); bparams.aggregate(ZParams.Aggregate.SUM); long bresult = jedis.zinterstore(SafeEncoder.encode("dst"), bparams, bfoo, bbar); assertEquals(1, bresult); Set<Tuple> bexpected = new LinkedHashSet<Tuple>(); bexpected.add(new Tuple(ba, new Double(7))); assertEquals(bexpected, jedis.zrangeWithScores(SafeEncoder.encode("dst"), 0, 100)); } @Test public void tupleCompare() { Tuple t1 = new Tuple("foo", 1d); Tuple t2 = new Tuple("bar", 2d); assertEquals(-1, t1.compareTo(t2)); assertEquals(1, t2.compareTo(t1)); assertEquals(0, t2.compareTo(t2)); } @Test public void zscan() { jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 2, "b"); ScanResult<Tuple> result = jedis.zscan("foo", SCAN_POINTER_START); assertEquals(SCAN_POINTER_START, result.getCursor()); assertFalse(result.getResult().isEmpty()); // binary jedis.zadd(bfoo, 1, ba); jedis.zadd(bfoo, 1, bb); ScanResult<Tuple> bResult = jedis.zscan(bfoo, SCAN_POINTER_START_BINARY); assertArrayEquals(SCAN_POINTER_START_BINARY, bResult.getCursorAsBytes()); assertFalse(bResult.getResult().isEmpty()); } @Test public void zscanMatch() { ScanParams params = new ScanParams(); params.match("a*"); jedis.zadd("foo", 2, "b"); jedis.zadd("foo", 1, "a"); jedis.zadd("foo", 11, "aa"); ScanResult<Tuple> result = jedis.zscan("foo", SCAN_POINTER_START, params); assertEquals(SCAN_POINTER_START, result.getCursor()); assertFalse(result.getResult().isEmpty()); // binary params = new ScanParams(); params.match(bbarstar); jedis.zadd(bfoo, 2, bbar1); jedis.zadd(bfoo, 1, bbar2); jedis.zadd(bfoo, 11, bbar3); ScanResult<Tuple> bResult = jedis.zscan(bfoo, SCAN_POINTER_START_BINARY, params); assertArrayEquals(SCAN_POINTER_START_BINARY, bResult.getCursorAsBytes()); assertFalse(bResult.getResult().isEmpty()); } @Test public void zscanCount() { ScanParams params = new ScanParams(); params.count(2); jedis.zadd("foo", 1, "a1"); jedis.zadd("foo", 2, "a2"); jedis.zadd("foo", 3, "a3"); jedis.zadd("foo", 4, "a4"); jedis.zadd("foo", 5, "a5"); ScanResult<Tuple> result = jedis.zscan("foo", SCAN_POINTER_START, params); assertFalse(result.getResult().isEmpty()); // binary params = new ScanParams(); params.count(2); jedis.zadd(bfoo, 2, bbar1); jedis.zadd(bfoo, 1, bbar2); jedis.zadd(bfoo, 11, bbar3); ScanResult<Tuple> bResult = jedis.zscan(bfoo, SCAN_POINTER_START_BINARY, params); assertFalse(bResult.getResult().isEmpty()); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.runtime; import com.google.devtools.build.lib.events.EventKind; import com.google.devtools.build.lib.runtime.UiStateTracker.ProgressMode; import com.google.devtools.common.options.Converter; import com.google.devtools.common.options.Converters.CommaSeparatedOptionListConverter; import com.google.devtools.common.options.EnumConverter; import com.google.devtools.common.options.Option; import com.google.devtools.common.options.OptionDocumentationCategory; import com.google.devtools.common.options.OptionEffectTag; import com.google.devtools.common.options.OptionMetadataTag; import com.google.devtools.common.options.OptionsBase; import com.google.devtools.common.options.OptionsParsingException; import java.util.ArrayList; import java.util.HashSet; import java.util.List; /** Command-line UI options. */ public class UiOptions extends OptionsBase { /** Enum to select whether color output is enabled or not. */ public enum UseColor { YES, NO, AUTO } /** Enum to select whether curses output is enabled or not. */ public enum UseCurses { YES, NO, AUTO } /** Converter for {@link EventKind} filters * */ public static class EventFiltersConverter implements Converter<List<EventKind>> { /** A converter for event kinds. */ public static class EventKindConverter extends EnumConverter<EventKind> { public EventKindConverter(String typeName) { super(EventKind.class, typeName); } } private final CommaSeparatedOptionListConverter delegate; public EventFiltersConverter() { this.delegate = new CommaSeparatedOptionListConverter(); } @Override public List<EventKind> convert(String input) throws OptionsParsingException { if (input.isEmpty()) { // This method is not called to convert the default value // Empty list means that the user wants to filter all events return new ArrayList<>(EventKind.ALL_EVENTS); } List<String> filters = this.delegate.convert(input); EnumConverter<EventKind> eventKindConverter = new EventKindConverter(input); HashSet<EventKind> filteredEvents = new HashSet<>(); for (String filter : filters) { if (!filter.startsWith("+") && !filter.startsWith("-")) { filteredEvents.addAll(EventKind.ALL_EVENTS); break; } } for (String filter : filters) { if (filter.startsWith("+")) { filteredEvents.remove(eventKindConverter.convert(filter.substring(1))); } else if (filter.startsWith("-")) { filteredEvents.add(eventKindConverter.convert(filter.substring(1))); } else { filteredEvents.remove(eventKindConverter.convert(filter)); } } return new ArrayList<>(filteredEvents); } @Override public String getTypeDescription() { return "Convert list of comma separated event kind to list of filters"; } } /** Converter for {@link UseColor}. */ public static class UseColorConverter extends EnumConverter<UseColor> { public UseColorConverter() { super(UseColor.class, "--color setting"); } } /** Converter for {@link UseCurses}. */ public static class UseCursesConverter extends EnumConverter<UseCurses> { public UseCursesConverter() { super(UseCurses.class, "--curses setting"); } } /** Progress mode converter. */ public static class ProgressModeConverter extends EnumConverter<ProgressMode> { public ProgressModeConverter() { super(ProgressMode.class, "--experimental_ui_mode setting"); } } @Option( name = "show_progress", defaultValue = "true", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Display progress messages during a build.") public boolean showProgress; @Option( name = "show_task_finish", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Display progress messages when tasks complete, not just when they start.") public boolean showTaskFinish; @Option( name = "show_progress_rate_limit", defaultValue = "0.2", // A nice middle ground; snappy but not too spammy in logs. documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Minimum number of seconds between progress messages in the output.") public double showProgressRateLimit; @Option( name = "color", defaultValue = "auto", converter = UseColorConverter.class, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Use terminal controls to colorize output.") public UseColor useColorEnum; @Option( name = "curses", defaultValue = "auto", converter = UseCursesConverter.class, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Use terminal cursor controls to minimize scrolling output.") public UseCurses useCursesEnum; @Option( name = "terminal_columns", defaultValue = "80", metadataTags = {OptionMetadataTag.HIDDEN}, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "A system-generated parameter which specifies the terminal width in columns.") public int terminalColumns; @Option( name = "isatty", // TODO(b/137881511): Old name should be removed after 2020-01-01, or whenever is // reasonable. oldName = "is_stderr_atty", defaultValue = "false", metadataTags = {OptionMetadataTag.HIDDEN}, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "A system-generated parameter which is used to notify the " + "server whether this client is running in a terminal. " + "If this is set to false, then '--color=auto' will be treated as '--color=no'. " + "If this is set to true, then '--color=auto' will be treated as '--color=yes'.") public boolean isATty; // This lives here (as opposed to the more logical BuildRequest.Options) // because the client passes it to the server *always*. We don't want the // client to have to figure out when it should or shouldn't to send it. @Option( name = "emacs", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "A system-generated parameter which is true iff EMACS=t or INSIDE_EMACS is set " + "in the environment of the client. This option controls certain display " + "features.") public boolean runningInEmacs; @Option( name = "show_timestamps", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Include timestamps in messages") public boolean showTimestamp; @Option( name = "progress_in_terminal_title", defaultValue = "false", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Show the command progress in the terminal title. " + "Useful to see what bazel is doing when having multiple terminal tabs.") public boolean progressInTermTitle; @Option( name = "attempt_to_print_relative_paths", oldName = "experimental_ui_attempt_to_print_relative_paths", defaultValue = "false", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.TERMINAL_OUTPUT}, help = "When printing the location part of messages, attempt to use a path relative to the " + "workspace directory or one of the directories specified by --package_path.") public boolean attemptToPrintRelativePaths; @Option( name = "experimental_ui_debug_all_events", defaultValue = "false", metadataTags = {OptionMetadataTag.HIDDEN}, documentationCategory = OptionDocumentationCategory.UNDOCUMENTED, effectTags = {OptionEffectTag.UNKNOWN}, help = "Report all events known to the Bazel UI.") public boolean experimentalUiDebugAllEvents; @Option( name = "ui_event_filters", converter = EventFiltersConverter.class, defaultValue = "null", documentationCategory = OptionDocumentationCategory.LOGGING, effectTags = {OptionEffectTag.TERMINAL_OUTPUT}, help = "Specifies which events to show in the UI. It is possible to add or remove events " + "to the default ones using leading +/-, or override the default " + "set completely with direct assignment. The set of supported event kinds " + "include INFO, DEBUG, ERROR and more.", allowMultiple = true) public List<EventKind> eventFilters; @Option( name = "experimental_ui_mode", defaultValue = "oldest_actions", converter = ProgressModeConverter.class, documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.TERMINAL_OUTPUT}, help = "Determines what kind of data is shown in the detailed progress bar. By default, it is " + "set to show the oldest actions and their running time. The underlying data " + "source is usually sampled in a mode-dependend way to fit within the number of " + "lines given by --ui_actions_shown.") public ProgressMode uiProgressMode; @Option( name = "ui_actions_shown", oldName = "experimental_ui_actions_shown", defaultValue = "8", documentationCategory = OptionDocumentationCategory.UNCATEGORIZED, effectTags = {OptionEffectTag.TERMINAL_OUTPUT}, help = "Number of concurrent actions shown in the detailed progress bar; each " + "action is shown on a separate line. The progress bar always shows " + "at least one one, all numbers less than 1 are mapped to 1. " + "This option has no effect if --noui is set.") public int uiSamplesShown; public boolean useColor() { return useColorEnum == UseColor.YES || (useColorEnum == UseColor.AUTO && isATty); } public boolean useCursorControl() { return useCursesEnum == UseCurses.YES || (useCursesEnum == UseCurses.AUTO && isATty); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents a Nat resource. It enables the VMs within the specified subnetworks to access Internet * without external IP addresses. It specifies a list of subnetworks (and the ranges within) that * want to use NAT. Customers can also provide the external IPs that would be used for NAT. GCP * would auto-allocate ephemeral IPs if no external IPs are provided. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class RouterNat extends com.google.api.client.json.GenericJson { /** * A list of URLs of the IP resources to be drained. These IPs must be valid static external IPs * that have been assigned to the NAT. These IPs should be used for updating/patching a NAT only. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> drainNatIps; /** * Enable Dynamic Port Allocation. If not specified, it is disabled by default. If set to true, - * Dynamic Port Allocation will be enabled on this NAT config. - enableEndpointIndependentMapping * cannot be set to true. - If minPorts is set, minPortsPerVm must be set to a power of two * greater than or equal to 32. If minPortsPerVm is not set, a minimum of 32 ports will be * allocated to a VM from this NAT config. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableDynamicPortAllocation; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean enableEndpointIndependentMapping; /** * List of Natted endpoint types supported by the Nat Gateway. If the list is empty, then it will * be equivalent to include ENDPOINT_TYPE_VM * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> endpointTypes; /** * Timeout (in seconds) for ICMP connections. Defaults to 30s if not set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer icmpIdleTimeoutSec; /** * Configure logging on this NAT. * The value may be {@code null}. */ @com.google.api.client.util.Key private RouterNatLogConfig logConfig; /** * Maximum number of ports allocated to a VM from this NAT config when Dynamic Port Allocation is * enabled. If Dynamic Port Allocation is not enabled, this field has no effect. If Dynamic Port * Allocation is enabled, and this field is set, it must be set to a power of two greater than * minPortsPerVm, or 64 if minPortsPerVm is not set. If Dynamic Port Allocation is enabled and * this field is not set, a maximum of 65536 ports will be allocated to a VM from this NAT config. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer maxPortsPerVm; /** * Minimum number of ports allocated to a VM from this NAT config. If not set, a default number of * ports is allocated to a VM. This is rounded up to the nearest power of 2. For example, if the * value of this field is 50, at least 64 ports are allocated to a VM. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer minPortsPerVm; /** * Unique name of this Nat service. The name must be 1-63 characters long and comply with RFC1035. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * Specify the NatIpAllocateOption, which can take one of the following values: - MANUAL_ONLY: * Uses only Nat IP addresses provided by customers. When there are not enough specified Nat IPs, * the Nat service fails for new VMs. - AUTO_ONLY: Nat IPs are allocated by Google Cloud Platform; * customers can't specify any Nat IPs. When choosing AUTO_ONLY, then nat_ip should be empty. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String natIpAllocateOption; /** * A list of URLs of the IP resources used for this Nat service. These IP addresses must be valid * static external IP addresses assigned to the project. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<java.lang.String> natIps; /** * A list of rules associated with this NAT. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<RouterNatRule> rules; /** * Specify the Nat option, which can take one of the following values: - * ALL_SUBNETWORKS_ALL_IP_RANGES: All of the IP ranges in every Subnetwork are allowed to Nat. - * ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES: All of the primary IP ranges in every Subnetwork are * allowed to Nat. - LIST_OF_SUBNETWORKS: A list of Subnetworks are allowed to Nat (specified in * the field subnetwork below) The default is SUBNETWORK_IP_RANGE_TO_NAT_OPTION_UNSPECIFIED. Note * that if this field contains ALL_SUBNETWORKS_ALL_IP_RANGES or * ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES, then there should not be any other Router.Nat section in * any Router for this network in this region. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceSubnetworkIpRangesToNat; /** * A list of Subnetwork resources whose traffic should be translated by NAT Gateway. It is used * only when LIST_OF_SUBNETWORKS is selected for the SubnetworkIpRangeToNatOption above. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<RouterNatSubnetworkToNat> subnetworks; /** * Timeout (in seconds) for TCP established connections. Defaults to 1200s if not set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer tcpEstablishedIdleTimeoutSec; /** * Timeout (in seconds) for TCP connections that are in TIME_WAIT state. Defaults to 120s if not * set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer tcpTimeWaitTimeoutSec; /** * Timeout (in seconds) for TCP transitory connections. Defaults to 30s if not set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer tcpTransitoryIdleTimeoutSec; /** * Indicates whether this NAT is used for public or private IP translation. If unspecified, it * defaults to PUBLIC. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String type; /** * Timeout (in seconds) for UDP connections. Defaults to 30s if not set. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Integer udpIdleTimeoutSec; /** * A list of URLs of the IP resources to be drained. These IPs must be valid static external IPs * that have been assigned to the NAT. These IPs should be used for updating/patching a NAT only. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getDrainNatIps() { return drainNatIps; } /** * A list of URLs of the IP resources to be drained. These IPs must be valid static external IPs * that have been assigned to the NAT. These IPs should be used for updating/patching a NAT only. * @param drainNatIps drainNatIps or {@code null} for none */ public RouterNat setDrainNatIps(java.util.List<java.lang.String> drainNatIps) { this.drainNatIps = drainNatIps; return this; } /** * Enable Dynamic Port Allocation. If not specified, it is disabled by default. If set to true, - * Dynamic Port Allocation will be enabled on this NAT config. - enableEndpointIndependentMapping * cannot be set to true. - If minPorts is set, minPortsPerVm must be set to a power of two * greater than or equal to 32. If minPortsPerVm is not set, a minimum of 32 ports will be * allocated to a VM from this NAT config. * @return value or {@code null} for none */ public java.lang.Boolean getEnableDynamicPortAllocation() { return enableDynamicPortAllocation; } /** * Enable Dynamic Port Allocation. If not specified, it is disabled by default. If set to true, - * Dynamic Port Allocation will be enabled on this NAT config. - enableEndpointIndependentMapping * cannot be set to true. - If minPorts is set, minPortsPerVm must be set to a power of two * greater than or equal to 32. If minPortsPerVm is not set, a minimum of 32 ports will be * allocated to a VM from this NAT config. * @param enableDynamicPortAllocation enableDynamicPortAllocation or {@code null} for none */ public RouterNat setEnableDynamicPortAllocation(java.lang.Boolean enableDynamicPortAllocation) { this.enableDynamicPortAllocation = enableDynamicPortAllocation; return this; } /** * @return value or {@code null} for none */ public java.lang.Boolean getEnableEndpointIndependentMapping() { return enableEndpointIndependentMapping; } /** * @param enableEndpointIndependentMapping enableEndpointIndependentMapping or {@code null} for none */ public RouterNat setEnableEndpointIndependentMapping(java.lang.Boolean enableEndpointIndependentMapping) { this.enableEndpointIndependentMapping = enableEndpointIndependentMapping; return this; } /** * List of Natted endpoint types supported by the Nat Gateway. If the list is empty, then it will * be equivalent to include ENDPOINT_TYPE_VM * @return value or {@code null} for none */ public java.util.List<java.lang.String> getEndpointTypes() { return endpointTypes; } /** * List of Natted endpoint types supported by the Nat Gateway. If the list is empty, then it will * be equivalent to include ENDPOINT_TYPE_VM * @param endpointTypes endpointTypes or {@code null} for none */ public RouterNat setEndpointTypes(java.util.List<java.lang.String> endpointTypes) { this.endpointTypes = endpointTypes; return this; } /** * Timeout (in seconds) for ICMP connections. Defaults to 30s if not set. * @return value or {@code null} for none */ public java.lang.Integer getIcmpIdleTimeoutSec() { return icmpIdleTimeoutSec; } /** * Timeout (in seconds) for ICMP connections. Defaults to 30s if not set. * @param icmpIdleTimeoutSec icmpIdleTimeoutSec or {@code null} for none */ public RouterNat setIcmpIdleTimeoutSec(java.lang.Integer icmpIdleTimeoutSec) { this.icmpIdleTimeoutSec = icmpIdleTimeoutSec; return this; } /** * Configure logging on this NAT. * @return value or {@code null} for none */ public RouterNatLogConfig getLogConfig() { return logConfig; } /** * Configure logging on this NAT. * @param logConfig logConfig or {@code null} for none */ public RouterNat setLogConfig(RouterNatLogConfig logConfig) { this.logConfig = logConfig; return this; } /** * Maximum number of ports allocated to a VM from this NAT config when Dynamic Port Allocation is * enabled. If Dynamic Port Allocation is not enabled, this field has no effect. If Dynamic Port * Allocation is enabled, and this field is set, it must be set to a power of two greater than * minPortsPerVm, or 64 if minPortsPerVm is not set. If Dynamic Port Allocation is enabled and * this field is not set, a maximum of 65536 ports will be allocated to a VM from this NAT config. * @return value or {@code null} for none */ public java.lang.Integer getMaxPortsPerVm() { return maxPortsPerVm; } /** * Maximum number of ports allocated to a VM from this NAT config when Dynamic Port Allocation is * enabled. If Dynamic Port Allocation is not enabled, this field has no effect. If Dynamic Port * Allocation is enabled, and this field is set, it must be set to a power of two greater than * minPortsPerVm, or 64 if minPortsPerVm is not set. If Dynamic Port Allocation is enabled and * this field is not set, a maximum of 65536 ports will be allocated to a VM from this NAT config. * @param maxPortsPerVm maxPortsPerVm or {@code null} for none */ public RouterNat setMaxPortsPerVm(java.lang.Integer maxPortsPerVm) { this.maxPortsPerVm = maxPortsPerVm; return this; } /** * Minimum number of ports allocated to a VM from this NAT config. If not set, a default number of * ports is allocated to a VM. This is rounded up to the nearest power of 2. For example, if the * value of this field is 50, at least 64 ports are allocated to a VM. * @return value or {@code null} for none */ public java.lang.Integer getMinPortsPerVm() { return minPortsPerVm; } /** * Minimum number of ports allocated to a VM from this NAT config. If not set, a default number of * ports is allocated to a VM. This is rounded up to the nearest power of 2. For example, if the * value of this field is 50, at least 64 ports are allocated to a VM. * @param minPortsPerVm minPortsPerVm or {@code null} for none */ public RouterNat setMinPortsPerVm(java.lang.Integer minPortsPerVm) { this.minPortsPerVm = minPortsPerVm; return this; } /** * Unique name of this Nat service. The name must be 1-63 characters long and comply with RFC1035. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * Unique name of this Nat service. The name must be 1-63 characters long and comply with RFC1035. * @param name name or {@code null} for none */ public RouterNat setName(java.lang.String name) { this.name = name; return this; } /** * Specify the NatIpAllocateOption, which can take one of the following values: - MANUAL_ONLY: * Uses only Nat IP addresses provided by customers. When there are not enough specified Nat IPs, * the Nat service fails for new VMs. - AUTO_ONLY: Nat IPs are allocated by Google Cloud Platform; * customers can't specify any Nat IPs. When choosing AUTO_ONLY, then nat_ip should be empty. * @return value or {@code null} for none */ public java.lang.String getNatIpAllocateOption() { return natIpAllocateOption; } /** * Specify the NatIpAllocateOption, which can take one of the following values: - MANUAL_ONLY: * Uses only Nat IP addresses provided by customers. When there are not enough specified Nat IPs, * the Nat service fails for new VMs. - AUTO_ONLY: Nat IPs are allocated by Google Cloud Platform; * customers can't specify any Nat IPs. When choosing AUTO_ONLY, then nat_ip should be empty. * @param natIpAllocateOption natIpAllocateOption or {@code null} for none */ public RouterNat setNatIpAllocateOption(java.lang.String natIpAllocateOption) { this.natIpAllocateOption = natIpAllocateOption; return this; } /** * A list of URLs of the IP resources used for this Nat service. These IP addresses must be valid * static external IP addresses assigned to the project. * @return value or {@code null} for none */ public java.util.List<java.lang.String> getNatIps() { return natIps; } /** * A list of URLs of the IP resources used for this Nat service. These IP addresses must be valid * static external IP addresses assigned to the project. * @param natIps natIps or {@code null} for none */ public RouterNat setNatIps(java.util.List<java.lang.String> natIps) { this.natIps = natIps; return this; } /** * A list of rules associated with this NAT. * @return value or {@code null} for none */ public java.util.List<RouterNatRule> getRules() { return rules; } /** * A list of rules associated with this NAT. * @param rules rules or {@code null} for none */ public RouterNat setRules(java.util.List<RouterNatRule> rules) { this.rules = rules; return this; } /** * Specify the Nat option, which can take one of the following values: - * ALL_SUBNETWORKS_ALL_IP_RANGES: All of the IP ranges in every Subnetwork are allowed to Nat. - * ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES: All of the primary IP ranges in every Subnetwork are * allowed to Nat. - LIST_OF_SUBNETWORKS: A list of Subnetworks are allowed to Nat (specified in * the field subnetwork below) The default is SUBNETWORK_IP_RANGE_TO_NAT_OPTION_UNSPECIFIED. Note * that if this field contains ALL_SUBNETWORKS_ALL_IP_RANGES or * ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES, then there should not be any other Router.Nat section in * any Router for this network in this region. * @return value or {@code null} for none */ public java.lang.String getSourceSubnetworkIpRangesToNat() { return sourceSubnetworkIpRangesToNat; } /** * Specify the Nat option, which can take one of the following values: - * ALL_SUBNETWORKS_ALL_IP_RANGES: All of the IP ranges in every Subnetwork are allowed to Nat. - * ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES: All of the primary IP ranges in every Subnetwork are * allowed to Nat. - LIST_OF_SUBNETWORKS: A list of Subnetworks are allowed to Nat (specified in * the field subnetwork below) The default is SUBNETWORK_IP_RANGE_TO_NAT_OPTION_UNSPECIFIED. Note * that if this field contains ALL_SUBNETWORKS_ALL_IP_RANGES or * ALL_SUBNETWORKS_ALL_PRIMARY_IP_RANGES, then there should not be any other Router.Nat section in * any Router for this network in this region. * @param sourceSubnetworkIpRangesToNat sourceSubnetworkIpRangesToNat or {@code null} for none */ public RouterNat setSourceSubnetworkIpRangesToNat(java.lang.String sourceSubnetworkIpRangesToNat) { this.sourceSubnetworkIpRangesToNat = sourceSubnetworkIpRangesToNat; return this; } /** * A list of Subnetwork resources whose traffic should be translated by NAT Gateway. It is used * only when LIST_OF_SUBNETWORKS is selected for the SubnetworkIpRangeToNatOption above. * @return value or {@code null} for none */ public java.util.List<RouterNatSubnetworkToNat> getSubnetworks() { return subnetworks; } /** * A list of Subnetwork resources whose traffic should be translated by NAT Gateway. It is used * only when LIST_OF_SUBNETWORKS is selected for the SubnetworkIpRangeToNatOption above. * @param subnetworks subnetworks or {@code null} for none */ public RouterNat setSubnetworks(java.util.List<RouterNatSubnetworkToNat> subnetworks) { this.subnetworks = subnetworks; return this; } /** * Timeout (in seconds) for TCP established connections. Defaults to 1200s if not set. * @return value or {@code null} for none */ public java.lang.Integer getTcpEstablishedIdleTimeoutSec() { return tcpEstablishedIdleTimeoutSec; } /** * Timeout (in seconds) for TCP established connections. Defaults to 1200s if not set. * @param tcpEstablishedIdleTimeoutSec tcpEstablishedIdleTimeoutSec or {@code null} for none */ public RouterNat setTcpEstablishedIdleTimeoutSec(java.lang.Integer tcpEstablishedIdleTimeoutSec) { this.tcpEstablishedIdleTimeoutSec = tcpEstablishedIdleTimeoutSec; return this; } /** * Timeout (in seconds) for TCP connections that are in TIME_WAIT state. Defaults to 120s if not * set. * @return value or {@code null} for none */ public java.lang.Integer getTcpTimeWaitTimeoutSec() { return tcpTimeWaitTimeoutSec; } /** * Timeout (in seconds) for TCP connections that are in TIME_WAIT state. Defaults to 120s if not * set. * @param tcpTimeWaitTimeoutSec tcpTimeWaitTimeoutSec or {@code null} for none */ public RouterNat setTcpTimeWaitTimeoutSec(java.lang.Integer tcpTimeWaitTimeoutSec) { this.tcpTimeWaitTimeoutSec = tcpTimeWaitTimeoutSec; return this; } /** * Timeout (in seconds) for TCP transitory connections. Defaults to 30s if not set. * @return value or {@code null} for none */ public java.lang.Integer getTcpTransitoryIdleTimeoutSec() { return tcpTransitoryIdleTimeoutSec; } /** * Timeout (in seconds) for TCP transitory connections. Defaults to 30s if not set. * @param tcpTransitoryIdleTimeoutSec tcpTransitoryIdleTimeoutSec or {@code null} for none */ public RouterNat setTcpTransitoryIdleTimeoutSec(java.lang.Integer tcpTransitoryIdleTimeoutSec) { this.tcpTransitoryIdleTimeoutSec = tcpTransitoryIdleTimeoutSec; return this; } /** * Indicates whether this NAT is used for public or private IP translation. If unspecified, it * defaults to PUBLIC. * @return value or {@code null} for none */ public java.lang.String getType() { return type; } /** * Indicates whether this NAT is used for public or private IP translation. If unspecified, it * defaults to PUBLIC. * @param type type or {@code null} for none */ public RouterNat setType(java.lang.String type) { this.type = type; return this; } /** * Timeout (in seconds) for UDP connections. Defaults to 30s if not set. * @return value or {@code null} for none */ public java.lang.Integer getUdpIdleTimeoutSec() { return udpIdleTimeoutSec; } /** * Timeout (in seconds) for UDP connections. Defaults to 30s if not set. * @param udpIdleTimeoutSec udpIdleTimeoutSec or {@code null} for none */ public RouterNat setUdpIdleTimeoutSec(java.lang.Integer udpIdleTimeoutSec) { this.udpIdleTimeoutSec = udpIdleTimeoutSec; return this; } @Override public RouterNat set(String fieldName, Object value) { return (RouterNat) super.set(fieldName, value); } @Override public RouterNat clone() { return (RouterNat) super.clone(); } }
/* * $Id$ */ /* Copyright (c) 2013 Board of Trustees of Leland Stanford Jr. University, all rights reserved. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL STANFORD UNIVERSITY BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. Except as contained in this notice, the name of Stanford University shall not be used in advertising or otherwise to promote the sale, use or other dealings in this Software without prior written authorization from Stanford University. */ package org.lockss.test; import java.util.List; import org.apache.commons.collections.map.LinkedMap; import org.lockss.alert.AlertManager; import org.lockss.account.AccountManager; import org.lockss.app.*; import org.lockss.config.*; import org.lockss.crawler.CrawlManager; import org.lockss.daemon.*; import org.lockss.daemon.status.StatusService; import org.lockss.db.DbManager; import org.lockss.exporter.counter.CounterReportsManager; import org.lockss.hasher.HashService; import org.lockss.mail.MailService; import org.lockss.metadata.MetadataManager; import org.lockss.plugin.*; import org.lockss.truezip.*; import org.lockss.poller.PollManager; import org.lockss.protocol.*; import org.lockss.protocol.psm.*; import org.lockss.proxy.ProxyManager; import org.lockss.proxy.icp.IcpManager; import org.lockss.remote.RemoteApi; import org.lockss.repository.*; import org.lockss.scheduler.SchedService; import org.lockss.servlet.*; import org.lockss.state.*; import org.lockss.subscription.SubscriptionManager; import org.lockss.util.*; import org.lockss.clockss.*; import org.lockss.safenet.*; public class MockLockssDaemon extends LockssDaemon { private static Logger log = Logger.getLogger("MockLockssDaemon"); ResourceManager resourceManager = null; WatchdogService wdogService = null; MailService mailService = null; AlertManager alertManager = null; AccountManager accountManager = null; RandomManager randomManager = null; LockssKeyStoreManager keystoreManager = null; HashService hashService = null; SchedService schedService = null; SystemMetrics systemMetrics = null; PollManager pollManager = null; PsmManager psmManager = null; LcapDatagramComm commManager = null; LcapStreamComm scommManager = null; LcapDatagramRouter datagramRouterManager = null; LcapRouter routerManager = null; ProxyManager proxyManager = null; ServletManager servletManager = null; CrawlManager crawlManager = null; RepositoryManager repositoryManager = null; NodeManagerManager nodeManagerManager = null; PluginManager pluginManager = null; MetadataManager metadataManager = null; IdentityManager identityManager = null; TrueZipManager tzipManager = null; StatusService statusService = null; RemoteApi remoteApi = null; IcpManager icpManager = null; ClockssParams clockssParams = null; DbManager dbManager = null; CounterReportsManager counterReportsManager = null; SubscriptionManager subscriptionManager = null; Cron cron = null; EntitlementRegistryClient entitlementRegistryClient = null; private boolean suppressStartAuManagers = true; /** Unit tests that need a MockLockssDaemon should use {@link * LockssTestCase#getMockLockssDaemon()} rather than calling this * directly. Some utilities (not descended from LockssTestCase) also * need one, so this constructor is protected to allow them to directly * create an instance (of their own subclass). */ protected MockLockssDaemon() { this(null); } private MockLockssDaemon(List<String> urls) { super(urls); ConfigManager mgr = ConfigManager.getConfigManager(); mgr.registerConfigurationCallback(new Configuration.Callback() { public void configurationChanged(Configuration newConfig, Configuration prevConfig, Configuration.Differences changedKeys) { setConfig(newConfig, prevConfig, changedKeys); } }); } protected void setConfig(Configuration config, Configuration prevConfig, Configuration.Differences changedKeys) { super.setConfig(config, prevConfig, changedKeys); } /** Does nothing */ public void startDaemon() throws Exception { } public void stopDaemon() { auManagerMaps.clear(); wdogService = null; hashService = null; schedService = null; pollManager = null; psmManager = null; commManager = null; scommManager = null; proxyManager = null; crawlManager = null; pluginManager = null; metadataManager = null; identityManager = null; statusService = null; icpManager = null; dbManager = null; counterReportsManager = null; subscriptionManager = null; cron = null; //super.stopDaemon(); } /** Set the testing mode. (Normally done through config and daemon * startup.) */ public void setTestingMode(String mode) { testingMode = mode; } ManagerDesc findManagerDesc(String key) { return findDesc(managerDescs, key); } ManagerDesc findAuManagerDesc(String key) { return findDesc(getAuManagerDescs(), key); } ManagerDesc findDesc(ManagerDesc[] descs, String key) { for(int i=0; i< descs.length; i++) { ManagerDesc desc = descs[i]; if (key.equals(desc.getKey())) { return desc; } } return null; } /** Create a manager instance, mimicking what LockssDaemon does */ LockssManager newManager(String key) { log.debug2("Loading manager: " + key); ManagerDesc desc = findManagerDesc(key); if (desc == null) { throw new LockssAppException("No ManagerDesc for: " + key); } if (log.isDebug2()) { log.debug2("Manager class: " + getManagerClassName(desc)); } try { return initManager(desc); } catch (Exception e) { log.error("Error creating manager", e); throw new LockssAppException("Can't load manager: " + e.toString()); } } /** * return the watchdog service instance * @return the WatchdogService */ public WatchdogService getWatchdogService() { if (wdogService == null) { wdogService = (WatchdogService)newManager(LockssDaemon.WATCHDOG_SERVICE); managerMap.put(LockssDaemon.WATCHDOG_SERVICE, wdogService); } return wdogService; } /** * return the mail manager instance * @return the MailService */ public MailService getMailService() { if (mailService == null) { mailService = new NullMailService(); managerMap.put(LockssDaemon.MAIL_SERVICE, mailService); } return mailService; } /** * return the resource manager instance * @return the ResourceManager */ public ResourceManager getResourceManager() { if (resourceManager == null) { resourceManager = (ResourceManager)newManager(RESOURCE_MANAGER); managerMap.put(RESOURCE_MANAGER, resourceManager); } return resourceManager; } /** * return the alert manager instance * @return the AlertManager */ public AlertManager getAlertManager() { if (alertManager == null) { alertManager = new NullAlertManager(); managerMap.put(LockssDaemon.ALERT_MANAGER, alertManager); } return alertManager; } /** * return the account manager instance * @return the AccountManager */ public AccountManager getAccountManager() { if (accountManager == null) { accountManager = (AccountManager)newManager(ACCOUNT_MANAGER); managerMap.put(LockssDaemon.ACCOUNT_MANAGER, accountManager); } return accountManager; } /** * return the random manager instance * @return the RandomManager */ public RandomManager getRandomManager() { if (randomManager == null) { randomManager = (RandomManager)newManager(RANDOM_MANAGER); managerMap.put(LockssDaemon.RANDOM_MANAGER, randomManager); } return randomManager; } /** * return the keystore manager instance * @return the KeystoreManager */ public LockssKeyStoreManager getKeystoreManager() { if (keystoreManager == null) { keystoreManager = (LockssKeyStoreManager)newManager(KEYSTORE_MANAGER); managerMap.put(LockssDaemon.KEYSTORE_MANAGER, keystoreManager); } return keystoreManager; } /** * return the hash service instance * @return the HashService */ public HashService getHashService() { if (hashService == null) { hashService = (HashService)newManager(LockssDaemon.HASH_SERVICE); managerMap.put(LockssDaemon.HASH_SERVICE, hashService); } return hashService; } /** * return the sched service instance * @return the SchedService */ public SchedService getSchedService() { if (schedService == null) { schedService = (SchedService)newManager(LockssDaemon.SCHED_SERVICE); managerMap.put(LockssDaemon.SCHED_SERVICE, schedService); } return schedService; } /** * return the SystemMetrics instance * @return the SystemMetrics */ public SystemMetrics getSystemMetrics() { if (systemMetrics == null) { systemMetrics = (SystemMetrics)newManager(LockssDaemon.SYSTEM_METRICS); managerMap.put(LockssDaemon.SYSTEM_METRICS, systemMetrics); } return systemMetrics; } /** * return the poll manager instance * @return the PollManager */ public PollManager getPollManager() { if (pollManager == null) { pollManager = (PollManager)newManager(LockssDaemon.POLL_MANAGER); managerMap.put(LockssDaemon.POLL_MANAGER, pollManager); } return pollManager; } /** * return the psm manager instance * @return the PsmManager */ public PsmManager getPsmManager() { if (psmManager == null) { psmManager = (PsmManager)newManager(LockssDaemon.PSM_MANAGER); managerMap.put(LockssDaemon.PSM_MANAGER, psmManager); } return psmManager; } /** * return the datagram communication manager instance * @return the LcapDatagramComm */ public LcapDatagramComm getDatagramCommManager() { if (commManager == null) { commManager = (LcapDatagramComm)newManager(LockssDaemon.DATAGRAM_COMM_MANAGER); managerMap.put(LockssDaemon.DATAGRAM_COMM_MANAGER, commManager); } return commManager; } /** * return the stream communication manager instance * @return the LcapStreamComm */ public LcapStreamComm getStreamCommManager() { if (scommManager == null) { scommManager = (LcapStreamComm)newManager(LockssDaemon.STREAM_COMM_MANAGER); managerMap.put(LockssDaemon.STREAM_COMM_MANAGER, scommManager); } return scommManager; } /** * return the datagram router manager instance * @return the LcapDatagramRouter */ public LcapDatagramRouter getDatagramRouterManager() { if (datagramRouterManager == null) { datagramRouterManager = (LcapDatagramRouter)newManager(LockssDaemon.DATAGRAM_ROUTER_MANAGER); managerMap.put(LockssDaemon.DATAGRAM_ROUTER_MANAGER, datagramRouterManager); } return datagramRouterManager; } /** * return the router manager instance * @return the LcapRouter */ public LcapRouter getRouterManager() { if (routerManager == null) { routerManager = (LcapRouter)newManager(LockssDaemon.ROUTER_MANAGER); managerMap.put(LockssDaemon.ROUTER_MANAGER, routerManager); } return routerManager; } /** * return the proxy manager instance * @return the ProxyManager */ public ProxyManager getProxyManager() { if (proxyManager == null) { proxyManager = (ProxyManager)newManager(LockssDaemon.PROXY_MANAGER); managerMap.put(LockssDaemon.PROXY_MANAGER, proxyManager); } return proxyManager; } /** * return the servlet manager instance * @return the ServletManager */ public ServletManager getServletManager() { if (servletManager == null) { servletManager = (ServletManager)newManager(LockssDaemon.SERVLET_MANAGER); managerMap.put(LockssDaemon.SERVLET_MANAGER, servletManager); } return servletManager; } /** * return the TrueZip manager instance * @return the TrueZipManager */ public TrueZipManager getTrueZipManager() { if (tzipManager == null) { tzipManager = (TrueZipManager)newManager(LockssDaemon.TRUEZIP_MANAGER); managerMap.put(LockssDaemon.TRUEZIP_MANAGER, tzipManager); } return tzipManager; } /** * return the crawl manager instance * @return the CrawlManager */ public CrawlManager getCrawlManager() { if (crawlManager == null) { crawlManager = (CrawlManager)newManager(LockssDaemon.CRAWL_MANAGER); managerMap.put(LockssDaemon.CRAWL_MANAGER, crawlManager); } return crawlManager; } /** * return the node manager status instance * @return the TreewalkManager */ public NodeManagerManager getNodeManagerManager() { if (nodeManagerManager == null) { nodeManagerManager = (NodeManagerManager)newManager(LockssDaemon.NODE_MANAGER_MANAGER); managerMap.put(LockssDaemon.NODE_MANAGER_MANAGER, nodeManagerManager); } return nodeManagerManager; } /** * return the repository manager instance * @return the RepositoryManager */ public RepositoryManager getRepositoryManager() { if (repositoryManager == null) { repositoryManager = (RepositoryManager)newManager(LockssDaemon.REPOSITORY_MANAGER); managerMap.put(LockssDaemon.REPOSITORY_MANAGER, repositoryManager); } return repositoryManager; } /** * return the plugin manager instance * @return the PluginManager */ public PluginManager getPluginManager() { if (pluginManager == null) { pluginManager = (PluginManager)newManager(LockssDaemon.PLUGIN_MANAGER); managerMap.put(LockssDaemon.PLUGIN_MANAGER, pluginManager); } return pluginManager; } /** * return the metadata manager instance * @return the MetadataManager */ public MetadataManager getMetadataManager() { if (metadataManager == null) { metadataManager = (MetadataManager)newManager(LockssDaemon.METADATA_MANAGER); managerMap.put(LockssDaemon.METADATA_MANAGER, metadataManager); } return metadataManager; } /** * return the Identity Manager * @return IdentityManager */ public IdentityManager getIdentityManager() { if (identityManager == null) { identityManager = (IdentityManager)newManager(LockssDaemon.IDENTITY_MANAGER); managerMap.put(LockssDaemon.IDENTITY_MANAGER, identityManager); } return identityManager; } public boolean hasIdentityManager() { return identityManager != null; } /** * return the database manager instance * @return the DbManager */ public DbManager getDbManager() { if (dbManager == null) { dbManager = (DbManager)newManager(LockssDaemon.DB_MANAGER); managerMap.put(LockssDaemon.DB_MANAGER, dbManager); } return dbManager; } /** * return the COUNTER reports manager instance * @return the CounterReportsManager */ public CounterReportsManager getCounterReportsManager() { if (counterReportsManager == null) { counterReportsManager = (CounterReportsManager)newManager(LockssDaemon.COUNTER_REPORTS_MANAGER); managerMap.put(LockssDaemon.COUNTER_REPORTS_MANAGER, counterReportsManager); } return counterReportsManager; } /** * return the subscription manager instance * @return the SusbcriptionManager */ public SubscriptionManager getSusbcriptionManager() { if (subscriptionManager == null) { subscriptionManager = (SubscriptionManager)newManager(LockssDaemon.SUBSCRIPTION_MANAGER); managerMap.put(LockssDaemon.SUBSCRIPTION_MANAGER, subscriptionManager); } return subscriptionManager; } /** * return the cron instance * @return the Cron */ public Cron getCron() { if (cron == null) { cron = (Cron)newManager(LockssDaemon.CRON); managerMap.put(LockssDaemon.CRON, cron); } return cron; } public StatusService getStatusService() { if (statusService == null) { statusService = (StatusService)newManager(LockssDaemon.STATUS_SERVICE); managerMap.put(LockssDaemon.STATUS_SERVICE, statusService); } return statusService; } /** * return the RemoteApi instance * @return the RemoteApi */ public RemoteApi getRemoteApi() { if (remoteApi == null) { remoteApi = (RemoteApi)newManager(LockssDaemon.REMOTE_API); managerMap.put(LockssDaemon.REMOTE_API, remoteApi); } return remoteApi; } /** * return the ClockssParams instance * @return the ClockssParams */ public ClockssParams getClockssParams() { if (clockssParams == null) { clockssParams = (ClockssParams)newManager(LockssDaemon.CLOCKSS_PARAMS); managerMap.put(LockssDaemon.CLOCKSS_PARAMS, clockssParams); } return clockssParams; } private boolean forceIsClockss = false; public void setClockss(boolean val) { forceIsClockss = val; } public boolean isClockss() { return forceIsClockss || super.isClockss(); } /** * Set the datagram CommManager * @param commMan the new manager */ public void setDatagramCommManager(LcapDatagramComm commMan) { commManager = commMan; managerMap.put(LockssDaemon.DATAGRAM_COMM_MANAGER, commManager); } /** * Set the stream CommManager * @param scommMan the new manager */ public void setStreamCommManager(LcapStreamComm scommMan) { scommManager = scommMan; managerMap.put(LockssDaemon.STREAM_COMM_MANAGER, scommManager); } /** * Set the DatagramRouterManager * @param datagramRouterMan the new manager */ public void setDatagramRouterManager(LcapDatagramRouter datagramRouterMan) { datagramRouterManager = datagramRouterMan; managerMap.put(LockssDaemon.DATAGRAM_ROUTER_MANAGER, datagramRouterManager); } /** * Set the RouterManager * @param routerMan the new manager */ public void setRouterManager(LcapRouter routerMan) { routerManager = routerMan; managerMap.put(LockssDaemon.ROUTER_MANAGER, routerManager); } /** * Set the CrawlManager * @param crawlMan the new manager */ public void setCrawlManager(CrawlManager crawlMan) { crawlManager = crawlMan; managerMap.put(LockssDaemon.CRAWL_MANAGER, crawlManager); } /** * Set the RepositoryManager * @param repositoryMan the new manager */ public void setRepositoryManager(RepositoryManager repositoryMan) { repositoryManager = repositoryMan; managerMap.put(LockssDaemon.REPOSITORY_MANAGER, repositoryManager); } /** * Set the NodeManagerManager * @param nodeManMan the new manager */ public void setNodeManagerManager(NodeManagerManager nodeManMan) { nodeManagerManager = nodeManMan; managerMap.put(LockssDaemon.NODE_MANAGER_MANAGER, nodeManMan); } /** * Set the WatchdogService * @param wdogService the new service */ public void setWatchdogService(WatchdogService wdogService) { this.wdogService = wdogService; managerMap.put(LockssDaemon.WATCHDOG_SERVICE, wdogService); } /** * Set the MailService * @param mailMan the new manager */ public void setMailService(MailService mailMan) { mailService = mailMan; managerMap.put(LockssDaemon.MAIL_SERVICE, mailService); } /** * Set the AlertManager * @param alertMan the new manager */ public void setAlertManager(AlertManager alertMan) { alertManager = alertMan; managerMap.put(LockssDaemon.ALERT_MANAGER, alertManager); } /** * Set the AccountManager * @param accountMan the new manager */ public void setAccountManager(AccountManager accountMan) { accountManager = accountMan; managerMap.put(LockssDaemon.ACCOUNT_MANAGER, accountManager); } /** * Set the RandomManager * @param randomMan the new manager */ public void setRandomManager(RandomManager randomMan) { randomManager = randomMan; managerMap.put(LockssDaemon.RANDOM_MANAGER, randomManager); } /** * Set the KeystoreManager * @param keystoreMan the new manager */ public void setKeystoreManager(LockssKeyStoreManager keystoreMan) { keystoreManager = keystoreMan; managerMap.put(LockssDaemon.KEYSTORE_MANAGER, keystoreManager); } /** * Set the HashService * @param hashServ the new service */ public void setHashService(HashService hashServ) { hashService = hashServ; managerMap.put(LockssDaemon.HASH_SERVICE, hashService); } /** * Set the SchedService * @param schedServ the new service */ public void setSchedService(SchedService schedServ) { schedService = schedServ; managerMap.put(LockssDaemon.SCHED_SERVICE, schedService); } /** * Set the IdentityManager * @param idMan the new manager */ public void setIdentityManager(IdentityManager idMan) { identityManager = idMan; managerMap.put(LockssDaemon.IDENTITY_MANAGER, identityManager); } /** * Set the MetadataManager * @param metadataMan the new manager */ public void setMetadataManager(MetadataManager metadataMan) { metadataManager = metadataMan; managerMap.put(LockssDaemon.METADATA_MANAGER, metadataManager); } /** * Set the PluginManager * @param pluginMan the new manager */ public void setPluginManager(PluginManager pluginMan) { pluginManager = pluginMan; managerMap.put(LockssDaemon.PLUGIN_MANAGER, pluginManager); } /** * Set the PollManager * @param pollMan the new manager */ public void setPollManager(PollManager pollMan) { pollManager = pollMan; managerMap.put(LockssDaemon.POLL_MANAGER, pollManager); } /** * Set the ProxyManager * @param proxyMgr the new manager */ public void setProxyManager(ProxyManager proxyMgr) { proxyManager = proxyMgr; managerMap.put(LockssDaemon.PROXY_MANAGER, proxyManager); } /** * Set the ServletManager * @param servletMgr the new manager */ public void setServletManager(ServletManager servletMgr) { servletManager = servletMgr; managerMap.put(LockssDaemon.SERVLET_MANAGER, servletManager); } /** * Set the TrueZipManager * @param tzMgr the new manager */ public void setTrueZipManager(TrueZipManager tzMgr) { tzipManager = tzMgr; managerMap.put(LockssDaemon.TRUEZIP_MANAGER, tzipManager); } /** * Set the DbManager * @param dbMan the new manager */ public void setDbManager(DbManager dbMan) { dbManager = dbMan; managerMap.put(LockssDaemon.DB_MANAGER, dbManager); } /** * Set the CounterReportsManager * @param counterReportsMan the new manager */ public void setCounterReportsManager(CounterReportsManager counterReportsMan) { counterReportsManager = counterReportsMan; managerMap.put(LockssDaemon.COUNTER_REPORTS_MANAGER, counterReportsManager); } /** * Set the SubscriptionManager * @param subscriptionMan the new manager */ public void setSubscriptionManager(SubscriptionManager subscriptionMan) { subscriptionManager = subscriptionMan; managerMap.put(LockssDaemon.SUBSCRIPTION_MANAGER, subscriptionManager); } /** * Set the SystemMetrics * @param sysMetrics the new metrics */ public void setSystemMetrics(SystemMetrics sysMetrics) { systemMetrics = sysMetrics; managerMap.put(LockssDaemon.SYSTEM_METRICS, sysMetrics); } /** * Set the RemoteApi * @param sysMetrics the new metrics */ public void setRemoteApi(RemoteApi sysMetrics) { remoteApi = sysMetrics; managerMap.put(LockssDaemon.REMOTE_API, sysMetrics); } /** * Set the Cron * @param cron the new cron */ public void setCron(Cron cron) { this.cron = cron; managerMap.put(LockssDaemon.CRON, cron); } /** * Set the EntitlementRegistryClient * @param pluginMan the new manager */ public void setEntitlementRegistryClient(EntitlementRegistryClient entitlementRegistryClient) { this.entitlementRegistryClient = entitlementRegistryClient; managerMap.put(LockssDaemon.SAFENET_MANAGER, entitlementRegistryClient); } // AU managers /** Create an AU manager instance, mimicking what LockssDaemon does */ public LockssAuManager newAuManager(String key, ArchivalUnit au) { ManagerDesc desc = findAuManagerDesc(key); if (desc == null) { throw new LockssAppException("No AU ManagerDesc for: " + key); } log.debug2("Loading manager: " + desc.getKey() + " for " + au); try { LockssAuManager mgr = initAuManager(desc, au); setAuManager(desc, au, mgr); return mgr; } catch (Exception e) { log.error("Error starting au manager", e); throw new LockssAppException("Can't load au manager: " + e.toString()); } } public void setAuManager(String key, ArchivalUnit au, LockssAuManager mgr) { setAuManager(findAuManagerDesc(key), au, mgr); } void setAuManager(ManagerDesc desc, ArchivalUnit au, LockssAuManager mgr) { LinkedMap auMgrMap = (LinkedMap)auManagerMaps.get(au); if (auMgrMap == null) { auMgrMap = new LinkedMap(); auManagerMaps.put(au, auMgrMap); } auMgrMap.put(desc.getKey(), mgr); } /** AU managers are normally not started on AU creation. Call this with * false to cause them to be started. */ public void suppressStartAuManagers(boolean val) { suppressStartAuManagers = val; } /** Overridden to prevent managers from being started. See {@link * #suppressStartAuManagers(boolean)} to cause them to be started. */ public void startOrReconfigureAuManagers(ArchivalUnit au, Configuration auConfig) throws Exception { if (!suppressStartAuManagers) { super.startOrReconfigureAuManagers(au, auConfig); } } /** For tests that override startOrReconfigureAuManagers and want to * conditionally start them. */ public void reallyStartOrReconfigureAuManagers(ArchivalUnit au, Configuration auConfig) throws Exception { super.startOrReconfigureAuManagers(au, auConfig); } /** Return ActivityRegulator for AU */ public ActivityRegulator getActivityRegulator(ArchivalUnit au) { try { return super.getActivityRegulator(au); } catch (IllegalArgumentException e) { return (ActivityRegulator)newAuManager(LockssDaemon.ACTIVITY_REGULATOR, au); } } /** Return LockssRepository for AU */ public LockssRepository getLockssRepository(ArchivalUnit au) { try { return super.getLockssRepository(au); } catch (IllegalArgumentException e) { return (LockssRepository)newAuManager(LockssDaemon.LOCKSS_REPOSITORY, au); } } /** Return NodeManager for AU */ public NodeManager getNodeManager(ArchivalUnit au) { try { return super.getNodeManager(au); } catch (IllegalArgumentException e) { return (NodeManager)newAuManager(LockssDaemon.NODE_MANAGER, au); } } /** Return HistoryRepository for AU */ public HistoryRepository getHistoryRepository(ArchivalUnit au) { try { return super.getHistoryRepository(au); } catch (IllegalArgumentException e) { return (HistoryRepository)newAuManager(LockssDaemon.HISTORY_REPOSITORY, au); } } /** * Set the ActivityRegulator for a given AU. * @param actReg the new regulator * @param au the ArchivalUnit */ public void setActivityRegulator(ActivityRegulator actReg, ArchivalUnit au) { setAuManager(ACTIVITY_REGULATOR, au, actReg); } /** * Set the LockssRepository for a given AU. * @param repo the new repository * @param au the ArchivalUnit */ public void setLockssRepository(LockssRepository repo, ArchivalUnit au) { setAuManager(LOCKSS_REPOSITORY, au, repo); } /** * Set the NodeManager for a given AU. * @param nodeMan the new manager * @param au the ArchivalUnit */ public void setNodeManager(NodeManager nodeMan, ArchivalUnit au) { setAuManager(NODE_MANAGER, au, nodeMan); } /** * Set the HistoryRepository for a given AU. * @param histRepo the new repository * @param au the ArchivalUnit */ public void setHistoryRepository(HistoryRepository histRepo, ArchivalUnit au) { setAuManager(HISTORY_REPOSITORY, au, histRepo); } /** * <p>Forcibly sets the ICP manager to a new value.</p> * @param icpManager A new ICP manager to use. */ public void setIcpManager(IcpManager icpManager) { this.icpManager = icpManager; managerMap.put(LockssDaemon.ICP_MANAGER, icpManager); } private boolean daemonInited = false; private boolean daemonRunning = false; /** * @return true iff all managers have been inited */ public boolean isDaemonInited() { return daemonInited; } // need to override this one too, inherited from LockssApp public boolean isAppInited() { return isDaemonInited(); } /** * @return true iff all managers have been started */ public boolean isDaemonRunning() { return daemonRunning; } // need to override this one too, inherited from LockssApp public boolean isAppRunning() { return isDaemonRunning(); } /** set daemonInited * @param val true if inited */ public void setDaemonInited(boolean val) { daemonInited = val; } /** set daemonRunning * @param val true if running */ public void setDaemonRunning(boolean val) { daemonRunning = val; } public void setAusStarted(boolean val) { if (val) { ausStarted.fill(); } else { ausStarted = new OneShotSemaphore(); } } }
/** * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.tool; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HBaseAdmin; /** * HBase Canary Tool, that that can be used to do * "canary monitoring" of a running HBase cluster. * * Foreach region tries to get one row per column family * and outputs some information about failure or latency. */ public final class Canary implements Tool { // Sink interface used by the canary to outputs information public interface Sink { public void publishReadFailure(HRegionInfo region); public void publishReadFailure(HRegionInfo region, HColumnDescriptor column); public void publishReadTiming(HRegionInfo region, HColumnDescriptor column, long msTime); } // Simple implementation of canary sink that allows to plot on // file or standard output timings or failures. public static class StdOutSink implements Sink { @Override public void publishReadFailure(HRegionInfo region) { LOG.error(String.format("read from region %s failed", region.getRegionNameAsString())); } @Override public void publishReadFailure(HRegionInfo region, HColumnDescriptor column) { LOG.error(String.format("read from region %s column family %s failed", region.getRegionNameAsString(), column.getNameAsString())); } @Override public void publishReadTiming(HRegionInfo region, HColumnDescriptor column, long msTime) { LOG.info(String.format("read from region %s column family %s in %dms", region.getRegionNameAsString(), column.getNameAsString(), msTime)); } } private static final long DEFAULT_INTERVAL = 6000; private static final Log LOG = LogFactory.getLog(Canary.class); private Configuration conf = null; private HBaseAdmin admin = null; private long interval = 0; private Sink sink = null; public Canary() { this(new StdOutSink()); } public Canary(Sink sink) { this.sink = sink; } @Override public Configuration getConf() { return conf; } @Override public void setConf(Configuration conf) { this.conf = conf; } @Override public int run(String[] args) throws Exception { int tables_index = -1; // Process command line args for (int i = 0; i < args.length; i++) { String cmd = args[i]; if (cmd.startsWith("-")) { if (tables_index >= 0) { // command line args must be in the form: [opts] [table 1 [table 2 ...]] System.err.println("Invalid command line options"); printUsageAndExit(); } if (cmd.equals("-help")) { // user asked for help, print the help and quit. printUsageAndExit(); } else if (cmd.equals("-daemon") && interval == 0) { // user asked for daemon mode, set a default interval between checks interval = DEFAULT_INTERVAL; } else if (cmd.equals("-interval")) { // user has specified an interval for canary breaths (-interval N) i++; if (i == args.length) { System.err.println("-interval needs a numeric value argument."); printUsageAndExit(); } try { interval = Long.parseLong(args[i]) * 1000; } catch (NumberFormatException e) { System.err.println("-interval needs a numeric value argument."); printUsageAndExit(); } } else { // no options match System.err.println(cmd + " options is invalid."); printUsageAndExit(); } } else if (tables_index < 0) { // keep track of first table name specified by the user tables_index = i; } } // initialize HBase conf and admin if (conf == null) conf = HBaseConfiguration.create(); admin = new HBaseAdmin(conf); // lets the canary monitor the cluster do { if (admin.isAborted()) { LOG.error("HBaseAdmin aborted"); return(1); } if (tables_index >= 0) { for (int i = tables_index; i < args.length; i++) { sniff(args[i]); } } else { sniff(); } Thread.sleep(interval); } while (interval > 0); return(0); } private void printUsageAndExit() { System.err.printf("Usage: bin/hbase %s [opts] [table 1 [table 2...]]\n", getClass().getName()); System.err.println(" where [opts] are:"); System.err.println(" -help Show this help and exit."); System.err.println(" -daemon Continuous check at defined intervals."); System.err.println(" -interval <N> Interval between checks (sec)"); System.exit(1); } /* * canary entry point to monitor all the tables. */ private void sniff() throws Exception { for (HTableDescriptor table : admin.listTables()) { sniff(table); } } /* * canary entry point to monitor specified table. */ private void sniff(String tableName) throws Exception { if (admin.isTableAvailable(tableName)) { sniff(admin.getTableDescriptor(tableName.getBytes())); } else { LOG.warn(String.format("Table %s is not available", tableName)); } } /* * Loops over regions that owns this table, * and output some information abouts the state. */ private void sniff(HTableDescriptor tableDesc) throws Exception { HTable table = null; try { table = new HTable(admin.getConfiguration(), tableDesc.getName()); } catch (TableNotFoundException e) { return; } for (HRegionInfo region : admin.getTableRegions(tableDesc.getName())) { try { sniffRegion(region, table); } catch (Exception e) { sink.publishReadFailure(region); } } } /* * For each column family of the region tries to get one row * and outputs the latency, or the failure. */ private void sniffRegion(HRegionInfo region, HTable table) throws Exception { HTableDescriptor tableDesc = table.getTableDescriptor(); for (HColumnDescriptor column : tableDesc.getColumnFamilies()) { Get get = new Get(region.getStartKey()); get.addFamily(column.getName()); try { long startTime = System.currentTimeMillis(); table.get(get); long time = System.currentTimeMillis() - startTime; sink.publishReadTiming(region, column, time); } catch (Exception e) { sink.publishReadFailure(region, column); } } } public static void main(String[] args) throws Exception { int exitCode = ToolRunner.run(new Canary(), args); System.exit(exitCode); } }
/* * Copyright 2015-2020 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.opencga.catalog.db.mongodb; import com.mongodb.MongoClient; import com.mongodb.client.ClientSession; import com.mongodb.client.model.Filters; import org.apache.commons.lang3.NotImplementedException; import org.apache.commons.lang3.StringUtils; import org.bson.Document; import org.bson.conversions.Bson; import org.opencb.commons.datastore.core.*; import org.opencb.commons.datastore.mongodb.MongoDBCollection; import org.opencb.commons.datastore.mongodb.MongoDBIterator; import org.opencb.opencga.catalog.db.api.DBIterator; import org.opencb.opencga.catalog.db.api.FileDBAdaptor; import org.opencb.opencga.catalog.db.api.JobDBAdaptor; import org.opencb.opencga.catalog.db.api.StudyDBAdaptor; import org.opencb.opencga.catalog.db.mongodb.converters.JobConverter; import org.opencb.opencga.catalog.db.mongodb.iterators.JobCatalogMongoDBIterator; import org.opencb.opencga.catalog.exceptions.CatalogAuthorizationException; import org.opencb.opencga.catalog.exceptions.CatalogDBException; import org.opencb.opencga.catalog.exceptions.CatalogException; import org.opencb.opencga.catalog.exceptions.CatalogParameterException; import org.opencb.opencga.catalog.utils.Constants; import org.opencb.opencga.catalog.utils.ParamUtils; import org.opencb.opencga.catalog.utils.UuidUtils; import org.opencb.opencga.core.api.ParamConstants; import org.opencb.opencga.core.common.TimeUtils; import org.opencb.opencga.core.config.Configuration; import org.opencb.opencga.core.models.common.Enums; import org.opencb.opencga.core.models.common.InternalStatus; import org.opencb.opencga.core.models.job.Job; import org.opencb.opencga.core.models.job.JobAclEntry; import org.opencb.opencga.core.models.job.JobInternalWebhook; import org.opencb.opencga.core.models.job.ToolInfo; import org.opencb.opencga.core.models.study.Study; import org.opencb.opencga.core.response.OpenCGAResult; import org.slf4j.LoggerFactory; import java.util.*; import java.util.function.Consumer; import java.util.stream.Collectors; import static org.opencb.opencga.catalog.db.api.ClinicalAnalysisDBAdaptor.QueryParams.MODIFICATION_DATE; import static org.opencb.opencga.catalog.db.mongodb.AuthorizationMongoDBUtils.getQueryForAuthorisedEntries; import static org.opencb.opencga.catalog.db.mongodb.MongoDBUtils.*; /** * Created by pfurio on 08/01/16. */ public class JobMongoDBAdaptor extends MongoDBAdaptor implements JobDBAdaptor { private final MongoDBCollection jobCollection; private final MongoDBCollection deletedJobCollection; private JobConverter jobConverter; private static final String PRIVATE_PRIORITY = "_priority"; private static final String PRIVATE_STUDY_UIDS = "_studyUids"; public JobMongoDBAdaptor(MongoDBCollection jobCollection, MongoDBCollection deletedJobCollection, Configuration configuration, MongoDBAdaptorFactory dbAdaptorFactory) { super(configuration, LoggerFactory.getLogger(JobMongoDBAdaptor.class)); this.dbAdaptorFactory = dbAdaptorFactory; this.jobCollection = jobCollection; this.deletedJobCollection = deletedJobCollection; this.jobConverter = new JobConverter(); } /** * @return MongoDB connection to the job collection. */ public MongoDBCollection getJobCollection() { return jobCollection; } public boolean exists(ClientSession clientSession, long jobUid) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return count(clientSession, new Query(QueryParams.UID.key(), jobUid)).getNumMatches() > 0; } @Override public OpenCGAResult nativeInsert(Map<String, Object> job, String userId) throws CatalogDBException { Document document = getMongoDBDocument(job, "job"); return new OpenCGAResult(jobCollection.insert(document, null)); } @Override public OpenCGAResult insert(long studyId, Job job, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { try { return runTransaction(clientSession -> { long tmpStartTime = startQuery(); logger.debug("Starting job insert transaction for job id '{}'", job.getId()); dbAdaptorFactory.getCatalogStudyDBAdaptor().checkId(clientSession, studyId); insert(clientSession, studyId, job); return endWrite(tmpStartTime, 1, 1, 0, 0, null); }); } catch (Exception e) { logger.error("Could not create job {}: {}", job.getId(), e.getMessage()); throw e; } } long insert(ClientSession clientSession, long studyId, Job job) throws CatalogDBException { List<Bson> filterList = new ArrayList<>(); filterList.add(Filters.eq(QueryParams.ID.key(), job.getId())); filterList.add(Filters.eq(PRIVATE_STUDY_UID, studyId)); Bson bson = Filters.and(filterList); DataResult<Long> count = jobCollection.count(clientSession, bson); if (count.getNumMatches() > 0) { throw new CatalogDBException("Job { id: '" + job.getId() + "'} already exists."); } long jobUid = getNewUid(); job.setUid(jobUid); job.setStudyUid(studyId); if (StringUtils.isEmpty(job.getUuid())) { job.setUuid(UuidUtils.generateOpenCgaUuid(UuidUtils.Entity.JOB)); } if (StringUtils.isEmpty(job.getCreationDate())) { job.setCreationDate(TimeUtils.getTime()); } if (job.getPriority() == null) { job.setPriority(Enums.Priority.LOW); } Document jobObject = jobConverter.convertToStorageType(job); jobObject.put(PRIVATE_CREATION_DATE, TimeUtils.toDate(job.getCreationDate())); jobObject.put(PRIVATE_MODIFICATION_DATE, TimeUtils.toDate(job.getModificationDate())); jobObject.put(PERMISSION_RULES_APPLIED, Collections.emptyList()); jobObject.put(PRIVATE_PRIORITY, job.getPriority().getValue()); jobObject.put(PRIVATE_STUDY_UIDS, Collections.singletonList(studyId)); logger.debug("Inserting job '{}' ({})...", job.getId(), job.getUid()); jobCollection.insert(clientSession, jobObject, null); logger.debug("Job '{}' successfully inserted", job.getId()); return jobUid; } @Override public OpenCGAResult<Job> getAllInStudy(long studyId, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { // Check the studyId first and throw an Exception is not found dbAdaptorFactory.getCatalogStudyDBAdaptor().checkId(studyId); // Retrieve and return Jobs Query query = new Query(QueryParams.STUDY_UID.key(), studyId); return get(query, options); } @Override public String getStatus(long jobId, String sessionId) throws CatalogDBException { // TODO remove? throw new UnsupportedOperationException("Not implemented method"); } @Override public long getStudyId(long jobId) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Query query = new Query(QueryParams.UID.key(), jobId); QueryOptions queryOptions = new QueryOptions(QueryOptions.INCLUDE, PRIVATE_STUDY_UID); OpenCGAResult<Document> queryResult = nativeGet(query, queryOptions); if (queryResult.getNumResults() != 0) { Object id = queryResult.first().get(PRIVATE_STUDY_UID); return id instanceof Number ? ((Number) id).longValue() : Long.parseLong(id.toString()); } else { throw CatalogDBException.uidNotFound("Job", jobId); } } @Override public OpenCGAResult unmarkPermissionRule(long studyId, String permissionRuleId) throws CatalogException { return unmarkPermissionRule(jobCollection, studyId, permissionRuleId); } @Override public OpenCGAResult<Long> count(Query query) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return count(null, query); } OpenCGAResult<Long> count(ClientSession clientSession, Query query) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Bson bsonDocument = parseQuery(query, QueryOptions.empty()); return new OpenCGAResult<>(jobCollection.count(clientSession, bsonDocument)); } @Override public OpenCGAResult<Long> count(Query query, String user) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Bson bson = parseQuery(query, QueryOptions.empty(), user); logger.debug("Job count: query : {}, dbTime: {}", bson.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); return new OpenCGAResult<>(jobCollection.count(bson)); } @Override public OpenCGAResult stats(Query query) { return null; } @Override public OpenCGAResult update(long jobUid, ObjectMap parameters, QueryOptions queryOptions) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { QueryOptions options = new QueryOptions(QueryOptions.INCLUDE, Arrays.asList(QueryParams.ID.key(), QueryParams.UID.key(), QueryParams.STUDY_UID.key())); OpenCGAResult<Job> dataResult = get(jobUid, options); if (dataResult.getNumResults() == 0) { throw new CatalogDBException("Could not update job. Job uid '" + jobUid + "' not found."); } try { return runTransaction(session -> privateUpdate(session, dataResult.first(), parameters, queryOptions)); } catch (CatalogDBException e) { logger.error("Could not update job {}: {}", dataResult.first().getId(), e.getMessage(), e); throw new CatalogDBException("Could not update job " + dataResult.first().getId() + ": " + e.getMessage(), e.getCause()); } } @Override public OpenCGAResult update(Query query, ObjectMap parameters, QueryOptions queryOptions) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { if (parameters.containsKey(QueryParams.ID.key())) { // We need to check that the update is only performed over 1 single job if (count(query).getNumMatches() != 1) { throw new CatalogDBException("Operation not supported: '" + QueryParams.ID.key() + "' can only be updated for one job"); } } QueryOptions options = new QueryOptions(QueryOptions.INCLUDE, Arrays.asList(QueryParams.ID.key(), QueryParams.UID.key(), QueryParams.STUDY_UID.key())); DBIterator<Job> iterator = iterator(query, options); OpenCGAResult<Job> result = OpenCGAResult.empty(); while (iterator.hasNext()) { Job job = iterator.next(); try { result.append(runTransaction(session -> privateUpdate(session, job, parameters, queryOptions))); } catch (CatalogDBException | CatalogParameterException | CatalogAuthorizationException e) { logger.error("Could not update job {}: {}", job.getId(), e.getMessage(), e); result.getEvents().add(new Event(Event.Type.ERROR, job.getId(), e.getMessage())); result.setNumMatches(result.getNumMatches() + 1); } } return result; } OpenCGAResult<Object> privateUpdate(ClientSession clientSession, Job job, ObjectMap parameters, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { long tmpStartTime = startQuery(); Document jobParameters = parseAndValidateUpdateParams(parameters, options).toFinalUpdateDocument(); if (jobParameters.isEmpty()) { if (!parameters.isEmpty()) { logger.error("Non-processed update parameters: {}", parameters.keySet()); } throw new CatalogDBException("Nothing to update. Empty 'parameters' object"); } Query tmpQuery = new Query() .append(QueryParams.STUDY_UID.key(), job.getStudyUid()) .append(QueryParams.UID.key(), job.getUid()); Bson finalQuery = parseQuery(tmpQuery, options); logger.debug("Job update: query : {}, update: {}", finalQuery.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), jobParameters.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); DataResult result = jobCollection.update(clientSession, finalQuery, jobParameters, null); if (result.getNumMatches() == 0) { throw new CatalogDBException("Job " + job.getId() + " not found"); } List<Event> events = new ArrayList<>(); if (result.getNumUpdated() == 0) { events.add(new Event(Event.Type.WARNING, job.getId(), "Job was already updated")); } logger.debug("Job {} successfully updated", job.getId()); return endWrite(tmpStartTime, 1, 1, events); } @Override public OpenCGAResult delete(Job job) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { try { Query query = new Query() .append(QueryParams.UID.key(), job.getUid()) .append(QueryParams.STUDY_UID.key(), job.getStudyUid()); OpenCGAResult<Document> result = nativeGet(query, new QueryOptions()); if (result.getNumResults() == 0) { throw new CatalogDBException("Could not find job " + job.getId() + " with uid " + job.getUid()); } return runTransaction(clientSession -> privateDelete(clientSession, result.first())); } catch (CatalogDBException e) { logger.error("Could not delete job {}: {}", job.getId(), e.getMessage(), e); throw new CatalogDBException("Could not delete job " + job.getId() + ": " + e.getMessage(), e.getCause()); } } @Override public OpenCGAResult delete(Query query) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { DBIterator<Document> iterator = nativeIterator(query, QueryOptions.empty()); OpenCGAResult<Job> result = OpenCGAResult.empty(); while (iterator.hasNext()) { Document job = iterator.next(); String jobId = job.getString(QueryParams.ID.key()); try { result.append(runTransaction(clientSession -> privateDelete(clientSession, job))); } catch (CatalogDBException | CatalogParameterException | CatalogAuthorizationException e) { logger.error("Could not delete job {}: {}", jobId, e.getMessage(), e); result.getEvents().add(new Event(Event.Type.ERROR, jobId, e.getMessage())); result.setNumMatches(result.getNumMatches() + 1); } } return result; } OpenCGAResult<Object> privateDelete(ClientSession clientSession, Document jobDocument) throws CatalogDBException { long tmpStartTime = startQuery(); String jobId = jobDocument.getString(QueryParams.ID.key()); long jobUid = jobDocument.getLong(PRIVATE_UID); long studyUid = jobDocument.getLong(PRIVATE_STUDY_UID); logger.debug("Deleting job {} ({})", jobId, jobUid); // Add status DELETED nestedPut(QueryParams.INTERNAL_STATUS.key(), getMongoDBDocument(new InternalStatus(InternalStatus.DELETED), "status"), jobDocument); // Upsert the document into the DELETED collection Bson query = new Document() .append(QueryParams.ID.key(), jobId) .append(PRIVATE_STUDY_UID, studyUid); deletedJobCollection.update(clientSession, query, new Document("$set", jobDocument), new QueryOptions(MongoDBCollection.UPSERT, true)); // Delete the document from the main COHORT collection query = new Document() .append(PRIVATE_UID, jobUid) .append(PRIVATE_STUDY_UID, studyUid); DataResult remove = jobCollection.remove(clientSession, query, null); if (remove.getNumMatches() == 0) { throw new CatalogDBException("Job " + jobId + " not found"); } if (remove.getNumDeleted() == 0) { throw new CatalogDBException("Job " + jobId + " could not be deleted"); } logger.debug("Job {} successfully deleted", jobId); return endWrite(tmpStartTime, 1, 0, 0, 1, null); } private UpdateDocument parseAndValidateUpdateParams(ObjectMap parameters, QueryOptions options) throws CatalogDBException { UpdateDocument document = new UpdateDocument(); String[] acceptedParams = {QueryParams.USER_ID.key(), QueryParams.DESCRIPTION.key(), QueryParams.COMMAND_LINE.key()}; filterStringParams(parameters, document.getSet(), acceptedParams); String[] acceptedBooleanParams = {QueryParams.VISITED.key()}; filterBooleanParams(parameters, document.getSet(), acceptedBooleanParams); String[] acceptedStringListParams = {QueryParams.TAGS.key()}; filterStringListParams(parameters, document.getSet(), acceptedStringListParams); if (parameters.containsKey(QueryParams.TOOL.key())) { if (parameters.get(QueryParams.TOOL.key()) instanceof ToolInfo) { document.getSet().put(QueryParams.TOOL.key(), getMongoDBDocument(parameters.get(QueryParams.TOOL.key()), ToolInfo.class.getName())); } else { document.getSet().put(QueryParams.TOOL.key(), parameters.get(QueryParams.TOOL.key())); } } if (parameters.containsKey(QueryParams.INTERNAL_STATUS_ID.key())) { document.getSet().put(QueryParams.INTERNAL_STATUS_ID.key(), parameters.get(QueryParams.INTERNAL_STATUS_ID.key())); document.getSet().put(QueryParams.INTERNAL_STATUS_DATE.key(), TimeUtils.getTime()); } if (parameters.containsKey(QueryParams.INTERNAL_STATUS_DESCRIPTION.key())) { document.getSet().put(QueryParams.INTERNAL_STATUS_DESCRIPTION.key(), parameters.get(QueryParams.INTERNAL_STATUS_DESCRIPTION.key())); document.getSet().put(QueryParams.INTERNAL_STATUS_DATE.key(), TimeUtils.getTime()); } if (parameters.containsKey(QueryParams.INTERNAL_WEBHOOK.key())) { Object value = parameters.get(QueryParams.INTERNAL_WEBHOOK.key()); if (value instanceof JobInternalWebhook) { document.getSet().put(QueryParams.INTERNAL_WEBHOOK.key(), getMongoDBDocument(value, "JobInternalWebhook")); } else { document.getSet().put(QueryParams.INTERNAL_WEBHOOK.key(), value); } } if (parameters.containsKey(QueryParams.INTERNAL_EVENTS.key())) { Map<String, Object> actionMap = options.getMap(Constants.ACTIONS, new HashMap<>()); ParamUtils.BasicUpdateAction operation = ParamUtils.BasicUpdateAction.from(actionMap, QueryParams.INTERNAL_EVENTS.key(), ParamUtils.BasicUpdateAction.ADD); String[] acceptedObjectParams = new String[]{QueryParams.INTERNAL_EVENTS.key()}; switch (operation) { case SET: filterObjectParams(parameters, document.getSet(), acceptedObjectParams); break; case REMOVE: filterObjectParams(parameters, document.getPullAll(), acceptedObjectParams); break; case ADD: filterObjectParams(parameters, document.getAddToSet(), acceptedObjectParams); break; default: throw new IllegalStateException("Unknown operation " + operation); } } if (parameters.containsKey(QueryParams.INPUT.key())) { List<Object> fileList = parameters.getList(QueryParams.INPUT.key()); document.getSet().put(QueryParams.INPUT.key(), jobConverter.convertFilesToDocument(fileList)); } if (parameters.containsKey(QueryParams.OUTPUT.key())) { List<Object> fileList = parameters.getList(QueryParams.OUTPUT.key()); document.getSet().put(QueryParams.OUTPUT.key(), jobConverter.convertFilesToDocument(fileList)); } if (parameters.containsKey(QueryParams.OUT_DIR.key())) { document.getSet().put(QueryParams.OUT_DIR.key(), jobConverter.convertFileToDocument(parameters.get(QueryParams.OUT_DIR.key()))); } if (parameters.containsKey(QueryParams.STDOUT.key())) { document.getSet().put(QueryParams.STDOUT.key(), jobConverter.convertFileToDocument(parameters.get(QueryParams.STDOUT.key()))); } if (parameters.containsKey(QueryParams.STDERR.key())) { document.getSet().put(QueryParams.STDERR.key(), jobConverter.convertFileToDocument(parameters.get(QueryParams.STDERR.key()))); } if (parameters.containsKey(QueryParams.PRIORITY.key())) { document.getSet().put(QueryParams.PRIORITY.key(), parameters.getString(QueryParams.PRIORITY.key())); document.getSet().put(PRIVATE_PRIORITY, Enums.Priority.getPriority(parameters.getString(QueryParams.PRIORITY.key())).getValue()); } String[] acceptedObjectParams = {QueryParams.EXECUTION.key(), QueryParams.STUDY.key()}; filterObjectParams(parameters, document.getSet(), acceptedObjectParams); if (document.getSet().containsKey(QueryParams.STUDY.key())) { List<String> studyFqns = new LinkedList<>(); studyFqns.add(parameters.getString(QueryParams.STUDY_ID.key())); studyFqns.addAll(parameters.getAsStringList(QueryParams.STUDY_OTHERS.key())); Query query = new Query(StudyDBAdaptor.QueryParams.FQN.key(), studyFqns); QueryOptions queryOptions = new QueryOptions(QueryOptions.INCLUDE, StudyDBAdaptor.QueryParams.UID.key()); OpenCGAResult<Study> studyResults = dbAdaptorFactory.getCatalogStudyDBAdaptor().get(query, queryOptions); if (studyResults.getNumResults() < studyFqns.size()) { throw new CatalogDBException("Unable to find some studies from '" + studyFqns + "'"); } // Add uids to others array document.getSet().put(PRIVATE_STUDY_UIDS, studyResults.getResults().stream().map(Study::getUid).collect(Collectors.toList())); } String[] acceptedMapParams = {QueryParams.ATTRIBUTES.key()}; filterMapParams(parameters, document.getSet(), acceptedMapParams); if (!document.toFinalUpdateDocument().isEmpty()) { String time = TimeUtils.getTime(); if (StringUtils.isEmpty(parameters.getString(MODIFICATION_DATE.key()))) { // Update modificationDate param Date date = TimeUtils.toDate(time); document.getSet().put(QueryParams.MODIFICATION_DATE.key(), time); document.getSet().put(PRIVATE_MODIFICATION_DATE, date); } document.getSet().put(INTERNAL_LAST_MODIFIED, time); } return document; } public OpenCGAResult clean(int id) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Query query = new Query(QueryParams.UID.key(), id); OpenCGAResult<Job> jobDataResult = get(query, null); if (jobDataResult.getResults().size() == 1) { DataResult delete = jobCollection.remove(parseQuery(query, QueryOptions.empty()), null); if (delete.getNumUpdated() == 0) { throw CatalogDBException.newInstance("Job id '{}' has not been deleted", id); } return new OpenCGAResult(delete); } else { throw CatalogDBException.uidNotFound("Job id '{}' does not exist (or there are too many)", id); } } @Override public OpenCGAResult remove(long id, QueryOptions queryOptions) throws CatalogDBException { throw new UnsupportedOperationException("Remove not yet implemented."); } @Override public OpenCGAResult remove(Query query, QueryOptions queryOptions) throws CatalogDBException { throw new UnsupportedOperationException("Remove not yet implemented."); } @Override public OpenCGAResult restore(Query query, QueryOptions queryOptions) throws CatalogDBException { throw new NotImplementedException("Not yet implemented"); } @Override public OpenCGAResult restore(long id, QueryOptions queryOptions) throws CatalogDBException { throw new NotImplementedException("Not yet implemented"); } /** * At the moment it does not clean external references to itself. */ @Override public OpenCGAResult<Job> get(long jobId, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { checkId(jobId); Query query = new Query(QueryParams.UID.key(), jobId) .append(QueryParams.STUDY_UID.key(), getStudyId(jobId)); return get(query, options); } @Override public OpenCGAResult<Job> get(long studyUid, Query query, QueryOptions options, String user) throws CatalogDBException, CatalogAuthorizationException, CatalogParameterException { long startTime = startQuery(); try (DBIterator<Job> dbIterator = iterator(studyUid, query, options, user)) { return endQuery(startTime, dbIterator); } } @Override public OpenCGAResult<Job> get(Query query, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { long startTime = startQuery(); try (DBIterator<Job> dbIterator = iterator(query, options)) { return endQuery(startTime, dbIterator); } } @Override public OpenCGAResult nativeGet(Query query, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { long startTime = startQuery(); try (DBIterator<Document> dbIterator = nativeIterator(query, options)) { return endQuery(startTime, dbIterator); } } @Override public OpenCGAResult nativeGet(long studyUid, Query query, QueryOptions options, String user) throws CatalogDBException, CatalogAuthorizationException, CatalogParameterException { long startTime = startQuery(); try (DBIterator<Document> dbIterator = nativeIterator(studyUid, query, options, user)) { return endQuery(startTime, dbIterator); } } @Override public DBIterator<Job> iterator(Query query, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { MongoDBIterator<Document> mongoCursor = getMongoCursor(query, options); return new JobCatalogMongoDBIterator(mongoCursor, null, jobConverter, this, dbAdaptorFactory.getCatalogFileDBAdaptor(), options); } @Override public DBIterator nativeIterator(Query query, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { QueryOptions queryOptions = options != null ? new QueryOptions(options) : new QueryOptions(); queryOptions.put(NATIVE_QUERY, true); MongoDBIterator<Document> mongoCursor = getMongoCursor(query, queryOptions); return new JobCatalogMongoDBIterator(mongoCursor, null, null, this, dbAdaptorFactory.getCatalogFileDBAdaptor(), options); } @Override public DBIterator<Job> iterator(long studyUid, Query query, QueryOptions options, String user) throws CatalogDBException, CatalogAuthorizationException, CatalogParameterException { query.put(PRIVATE_STUDY_UID, studyUid); MongoDBIterator<Document> mongoCursor = getMongoCursor(query, options, user); return new JobCatalogMongoDBIterator(mongoCursor, null, jobConverter, this, dbAdaptorFactory.getCatalogFileDBAdaptor(), options, studyUid, user); } @Override public DBIterator nativeIterator(long studyUid, Query query, QueryOptions options, String user) throws CatalogDBException, CatalogAuthorizationException, CatalogParameterException { QueryOptions queryOptions = options != null ? new QueryOptions(options) : new QueryOptions(); queryOptions.put(NATIVE_QUERY, true); query.put(PRIVATE_STUDY_UID, studyUid); MongoDBIterator<Document> mongoCursor = getMongoCursor(query, queryOptions, user); return new JobCatalogMongoDBIterator(mongoCursor, null, null, this, dbAdaptorFactory.getCatalogFileDBAdaptor(), options, studyUid, user); } private MongoDBIterator<Document> getMongoCursor(Query query, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return getMongoCursor(query, options, null); } private MongoDBIterator<Document> getMongoCursor(Query query, QueryOptions options, String user) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { QueryOptions qOptions; if (options != null) { qOptions = new QueryOptions(options); } else { qOptions = new QueryOptions(); } qOptions = fixOptions(qOptions); Bson bson = parseQuery(query, options, user); logger.debug("Job get: query : {}", bson.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); if (!query.getBoolean(QueryParams.DELETED.key())) { return jobCollection.iterator(bson, qOptions); } else { return deletedJobCollection.iterator(bson, qOptions); } } private QueryOptions fixOptions(QueryOptions queryOptions) { QueryOptions options = new QueryOptions(queryOptions); filterOptions(options, FILTER_ROUTE_JOBS); fixAclProjection(options); if (options.containsKey(QueryOptions.SORT)) { // If the user is sorting by priority, we will point to the private priority stored as integers to properly sort List<String> sortList = options.getAsStringList(QueryOptions.SORT); List<String> fixedSortList = new ArrayList<>(sortList.size()); for (String key : sortList) { if (key.startsWith(QueryParams.PRIORITY.key())) { String[] priorityArray = key.split(":"); if (priorityArray.length == 1) { fixedSortList.add(PRIVATE_PRIORITY); } else { // The order (ascending or descending) should be in priorityArray[1] fixedSortList.add(PRIVATE_PRIORITY + ":" + priorityArray[1]); } } else { fixedSortList.add(key); } } // Add new fixed sort list options.put(QueryOptions.SORT, fixedSortList); } return options; } @Override public OpenCGAResult rank(Query query, String field, int numResults, boolean asc) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Bson bsonQuery = parseQuery(query, QueryOptions.empty()); return rank(jobCollection, bsonQuery, field, "name", numResults, asc); } @Override public OpenCGAResult groupBy(Query query, String field, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Bson bsonQuery = parseQuery(query, options); return groupBy(jobCollection, bsonQuery, field, "name", fixOptions(options)); } @Override public OpenCGAResult groupBy(Query query, List<String> fields, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Bson bsonQuery = parseQuery(query, options); return groupBy(jobCollection, bsonQuery, fields, "name", fixOptions(options)); } @Override public OpenCGAResult groupBy(Query query, List<String> fields, QueryOptions options, String user) throws CatalogDBException, CatalogAuthorizationException, CatalogParameterException { Bson bsonQuery = parseQuery(query, options, user); return groupBy(jobCollection, bsonQuery, fields, QueryParams.ID.key(), fixOptions(options)); } @Override public <T> OpenCGAResult<T> distinct(long studyUid, String field, Query query, String userId, Class<T> clazz) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Query finalQuery = query != null ? new Query(query) : new Query(); finalQuery.put(QueryParams.STUDY_UID.key(), studyUid); Bson bson = parseQuery(finalQuery, null, userId); return new OpenCGAResult(jobCollection.distinct(field, bson, clazz)); } @Override public void forEach(Query query, Consumer<? super Object> action, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { Objects.requireNonNull(action); try (DBIterator<Job> catalogDBIterator = iterator(query, options)) { while (catalogDBIterator.hasNext()) { action.accept(catalogDBIterator.next()); } } } void removeFileReferences(ClientSession clientSession, long studyUid, long fileUid, Document file) { UpdateDocument document = new UpdateDocument(); String prefix = QueryParams.ATTRIBUTES.key() + "." + Constants.PRIVATE_OPENCGA_ATTRIBUTES + "."; // INPUT Document query = new Document() .append(PRIVATE_STUDY_UID, studyUid) .append(QueryParams.INPUT_UID.key(), fileUid); document.getPullAll().put(QueryParams.INPUT.key(), Collections.singletonList(new Document(FileDBAdaptor.QueryParams.UID.key(), fileUid))); document.getPush().put(prefix + Constants.JOB_DELETED_INPUT_FILES, file); Document updateDocument = document.toFinalUpdateDocument(); logger.debug("Removing file from job '{}' field. Query: {}, Update: {}", QueryParams.INPUT.key(), query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), updateDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); DataResult result = jobCollection.update(clientSession, query, updateDocument, QueryOptions.empty()); logger.debug("File '{}' removed from {} jobs", fileUid, result.getNumUpdated()); // OUTPUT query = new Document() .append(PRIVATE_STUDY_UID, studyUid) .append(QueryParams.OUTPUT_UID.key(), fileUid); document = new UpdateDocument(); document.getPullAll().put(QueryParams.OUTPUT.key(), Collections.singletonList(new Document(FileDBAdaptor.QueryParams.UID.key(), fileUid))); document.getPush().put(prefix + Constants.JOB_DELETED_OUTPUT_FILES, file); updateDocument = document.toFinalUpdateDocument(); logger.debug("Removing file from job '{}' field. Query: {}, Update: {}", QueryParams.OUTPUT.key(), query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), updateDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); result = jobCollection.update(clientSession, query, updateDocument, QueryOptions.empty()); logger.debug("File '{}' removed from {} jobs", fileUid, result.getNumUpdated()); // OUT DIR query = new Document() .append(PRIVATE_STUDY_UID, studyUid) .append(QueryParams.OUT_DIR_UID.key(), fileUid); document = new UpdateDocument(); document.getSet().put(QueryParams.OUT_DIR.key(), new Document(FileDBAdaptor.QueryParams.UID.key(), -1)); document.getSet().put(prefix + Constants.JOB_DELETED_OUTPUT_DIRECTORY, file); updateDocument = document.toFinalUpdateDocument(); logger.debug("Removing file from job '{}' field. Query: {}, Update: {}", QueryParams.OUT_DIR.key(), query.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry()), updateDocument.toBsonDocument(Document.class, MongoClient.getDefaultCodecRegistry())); result = jobCollection.update(clientSession, query, updateDocument, QueryOptions.empty()); logger.debug("File '{}' removed from {} jobs", fileUid, result.getNumUpdated()); } private Bson parseQuery(Query query, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return parseQuery(query, null, options, null); } private Bson parseQuery(Query query, QueryOptions options, String user) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return parseQuery(query, null, options, user); } protected Bson parseQuery(Query query, Document extraQuery, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return parseQuery(query, extraQuery, options, null); } private Bson parseQuery(Query query, Document extraQuery, QueryOptions options, String user) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { List<Bson> andBsonList = new ArrayList<>(); boolean mainStudy = true; if (options != null) { mainStudy = !options.getBoolean(ParamConstants.OTHER_STUDIES_FLAG, false); } if (query.containsKey(QueryParams.STUDY_UID.key()) && (StringUtils.isNotEmpty(user) || query.containsKey(ParamConstants.ACL_PARAM))) { Document studyDocument = getStudyDocument(null, query.getLong(QueryParams.STUDY_UID.key())); if (query.containsKey(ParamConstants.ACL_PARAM)) { andBsonList.addAll(AuthorizationMongoDBUtils.parseAclQuery(studyDocument, query, Enums.Resource.JOB, user, configuration)); } else { // Get the document query needed to check the permissions as well andBsonList.add(getQueryForAuthorisedEntries(studyDocument, user, JobAclEntry.JobPermissions.VIEW.name(), Enums.Resource.JOB, configuration)); } query.remove(ParamConstants.ACL_PARAM); } Query queryCopy = new Query(query); queryCopy.remove(QueryParams.DELETED.key()); for (Map.Entry<String, Object> entry : queryCopy.entrySet()) { String key = entry.getKey().split("\\.")[0]; QueryParams queryParam = (QueryParams.getParam(entry.getKey()) != null) ? QueryParams.getParam(entry.getKey()) : QueryParams.getParam(key); if (queryParam == null) { throw new CatalogDBException("Unexpected parameter " + entry.getKey() + ". The parameter does not exist or cannot be " + "queried for."); } try { switch (queryParam) { case UID: addAutoOrQuery(PRIVATE_UID, queryParam.key(), queryCopy, queryParam.type(), andBsonList); break; case STUDY_UID: if (mainStudy) { addAutoOrQuery(PRIVATE_STUDY_UID, queryParam.key(), queryCopy, queryParam.type(), andBsonList); } else { addAutoOrQuery(PRIVATE_STUDY_UIDS, queryParam.key(), queryCopy, queryParam.type(), andBsonList); } break; case TOOL: case TOOL_ID: addAutoOrQuery(QueryParams.TOOL_ID.key(), queryParam.key(), queryCopy, QueryParams.TOOL_ID.type(), andBsonList); break; case INPUT_UID: addAutoOrQuery(QueryParams.INPUT_UID.key(), queryParam.key(), queryCopy, queryParam.type(), andBsonList); break; case OUTPUT_UID: addAutoOrQuery(QueryParams.OUTPUT_UID.key(), queryParam.key(), queryCopy, queryParam.type(), andBsonList); break; case CREATION_DATE: addAutoOrQuery(PRIVATE_CREATION_DATE, queryParam.key(), queryCopy, queryParam.type(), andBsonList); break; case MODIFICATION_DATE: addAutoOrQuery(PRIVATE_MODIFICATION_DATE, queryParam.key(), queryCopy, queryParam.type(), andBsonList); break; case STATUS: case STATUS_ID: addAutoOrQuery(QueryParams.STATUS_ID.key(), queryParam.key(), queryCopy, QueryParams.STATUS_ID.type(), andBsonList); break; case INTERNAL_STATUS: case INTERNAL_STATUS_ID: // Convert the status to a positive status queryCopy.put(queryParam.key(), InternalStatus.getPositiveStatus(Enums.ExecutionStatus.STATUS_LIST, queryCopy.getString(queryParam.key()))); addAutoOrQuery(QueryParams.INTERNAL_STATUS_ID.key(), queryParam.key(), queryCopy, QueryParams.INTERNAL_STATUS_ID.type(), andBsonList); break; case ID: case UUID: case USER_ID: case TOOL_TYPE: case PRIORITY: // TODO: This filter is not indexed. We should change it and query _priority instead. // case START_TIME: // case END_TIME: // case OUTPUT_ERROR: // case EXECUTION_START: // case EXECUTION_END: // case COMMAND_LINE: case VISITED: case RELEASE: case OUT_DIR_UID: case TAGS: addAutoOrQuery(queryParam.key(), queryParam.key(), queryCopy, queryParam.type(), andBsonList); break; default: throw new CatalogDBException("Cannot query by parameter " + queryParam.key()); } } catch (Exception e) { throw new CatalogDBException(e); } } if (extraQuery != null && extraQuery.size() > 0) { andBsonList.add(extraQuery); } if (andBsonList.size() > 0) { return Filters.and(andBsonList); } else { return new Document(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.session; import java.io.IOException; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Set; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Session; import org.apache.catalina.Store; import org.apache.catalina.StoreManager; import org.apache.catalina.security.SecurityUtil; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; /** * Extends the <b>ManagerBase</b> class to implement most of the * functionality required by a Manager which supports any kind of * persistence, even if only for restarts. * <p> * <b>IMPLEMENTATION NOTE</b>: Correct behavior of session storing and * reloading depends upon external calls to the <code>start()</code> and * <code>stop()</code> methods of this class at the correct times. * * @author Craig R. McClanahan */ public abstract class PersistentManagerBase extends ManagerBase implements StoreManager { private static final Log log = LogFactory.getLog(PersistentManagerBase.class); // ---------------------------------------------------- Security Classes private class PrivilegedStoreClear implements PrivilegedExceptionAction<Void> { PrivilegedStoreClear() { // NOOP } @Override public Void run() throws Exception{ store.clear(); return null; } } private class PrivilegedStoreRemove implements PrivilegedExceptionAction<Void> { private String id; PrivilegedStoreRemove(String id) { this.id = id; } @Override public Void run() throws Exception{ store.remove(id); return null; } } private class PrivilegedStoreLoad implements PrivilegedExceptionAction<Session> { private String id; PrivilegedStoreLoad(String id) { this.id = id; } @Override public Session run() throws Exception{ return store.load(id); } } private class PrivilegedStoreSave implements PrivilegedExceptionAction<Void> { private Session session; PrivilegedStoreSave(Session session) { this.session = session; } @Override public Void run() throws Exception{ store.save(session); return null; } } private class PrivilegedStoreKeys implements PrivilegedExceptionAction<String[]> { PrivilegedStoreKeys() { // NOOP } @Override public String[] run() throws Exception{ return store.keys(); } } // ----------------------------------------------------- Instance Variables /** * The descriptive name of this Manager implementation (for logging). */ private static final String name = "PersistentManagerBase"; /** * Store object which will manage the Session store. */ protected Store store = null; /** * Whether to save and reload sessions when the Manager <code>unload</code> * and <code>load</code> methods are called. */ protected boolean saveOnRestart = true; /** * How long a session must be idle before it should be backed up. * -1 means sessions won't be backed up. */ protected int maxIdleBackup = -1; /** * Minimum time a session must be idle before it is swapped to disk. * This overrides maxActiveSessions, to prevent thrashing if there are lots * of active sessions. Setting to -1 means it's ignored. */ protected int minIdleSwap = -1; /** * The maximum time a session may be idle before it should be swapped * to file just on general principle. Setting this to -1 means sessions * should not be forced out. */ protected int maxIdleSwap = -1; /** * Sessions currently being swapped in and the associated locks */ private final Map<String,Object> sessionSwapInLocks = new HashMap<>(); // ------------------------------------------------------------- Properties /** * Indicates how many seconds old a session can get, after its last use in a * request, before it should be backed up to the store. -1 means sessions * are not backed up. */ public int getMaxIdleBackup() { return maxIdleBackup; } /** * Sets the option to back sessions up to the Store after they * are used in a request. Sessions remain available in memory * after being backed up, so they are not passivated as they are * when swapped out. The value set indicates how old a session * may get (since its last use) before it must be backed up: -1 * means sessions are not backed up. * <p> * Note that this is not a hard limit: sessions are checked * against this age limit periodically according to <b>processExpiresFrequency</b>. * This value should be considered to indicate when a session is * ripe for backing up. * <p> * So it is possible that a session may be idle for maxIdleBackup + * processExpiresFrequency * engine.backgroundProcessorDelay seconds, plus the time it takes to handle other * session expiration, swapping, etc. tasks. * * @param backup The number of seconds after their last accessed * time when they should be written to the Store. */ public void setMaxIdleBackup (int backup) { if (backup == this.maxIdleBackup) return; int oldBackup = this.maxIdleBackup; this.maxIdleBackup = backup; support.firePropertyChange("maxIdleBackup", Integer.valueOf(oldBackup), Integer.valueOf(this.maxIdleBackup)); } /** * The time in seconds after which a session should be swapped out of * memory to disk. */ public int getMaxIdleSwap() { return maxIdleSwap; } /** * Sets the time in seconds after which a session should be swapped out of * memory to disk. */ public void setMaxIdleSwap(int max) { if (max == this.maxIdleSwap) return; int oldMaxIdleSwap = this.maxIdleSwap; this.maxIdleSwap = max; support.firePropertyChange("maxIdleSwap", Integer.valueOf(oldMaxIdleSwap), Integer.valueOf(this.maxIdleSwap)); } /** * The minimum time in seconds that a session must be idle before * it can be swapped out of memory, or -1 if it can be swapped out * at any time. */ public int getMinIdleSwap() { return minIdleSwap; } /** * Sets the minimum time in seconds that a session must be idle before * it can be swapped out of memory due to maxActiveSession. Set it to -1 * if it can be swapped out at any time. */ public void setMinIdleSwap(int min) { if (this.minIdleSwap == min) return; int oldMinIdleSwap = this.minIdleSwap; this.minIdleSwap = min; support.firePropertyChange("minIdleSwap", Integer.valueOf(oldMinIdleSwap), Integer.valueOf(this.minIdleSwap)); } /** * Return true, if the session id is loaded in memory * otherwise false is returned * * @param id The session id for the session to be searched for */ public boolean isLoaded( String id ){ try { if ( super.findSession(id) != null ) return true; } catch (IOException e) { log.error("checking isLoaded for id, " + id + ", "+e.getMessage(), e); } return false; } /** * Return the descriptive short name of this Manager implementation. */ @Override public String getName() { return (name); } /** * Set the Store object which will manage persistent Session * storage for this Manager. * * @param store the associated Store */ public void setStore(Store store) { this.store = store; store.setManager(this); } /** * Return the Store object which manages persistent Session * storage for this Manager. */ @Override public Store getStore() { return (this.store); } /** * Indicates whether sessions are saved when the Manager is shut down * properly. This requires the unload() method to be called. */ public boolean getSaveOnRestart() { return saveOnRestart; } /** * Set the option to save sessions to the Store when the Manager is * shut down, then loaded when the Manager starts again. If set to * false, any sessions found in the Store may still be picked up when * the Manager is started again. * * @param saveOnRestart true if sessions should be saved on restart, false if * they should be ignored. */ public void setSaveOnRestart(boolean saveOnRestart) { if (saveOnRestart == this.saveOnRestart) return; boolean oldSaveOnRestart = this.saveOnRestart; this.saveOnRestart = saveOnRestart; support.firePropertyChange("saveOnRestart", Boolean.valueOf(oldSaveOnRestart), Boolean.valueOf(this.saveOnRestart)); } // --------------------------------------------------------- Public Methods /** * Clear all sessions from the Store. */ public void clearStore() { if (store == null) return; try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ AccessController.doPrivileged(new PrivilegedStoreClear()); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); log.error("Exception clearing the Store: " + exception, exception); } } else { store.clear(); } } catch (IOException e) { log.error("Exception clearing the Store: " + e, e); } } /** * Implements the Manager interface, direct call to processExpires and processPersistenceChecks */ @Override public void processExpires() { long timeNow = System.currentTimeMillis(); Session sessions[] = findSessions(); int expireHere = 0 ; if(log.isDebugEnabled()) log.debug("Start expire sessions " + getName() + " at " + timeNow + " sessioncount " + sessions.length); for (int i = 0; i < sessions.length; i++) { if (!sessions[i].isValid()) { expiredSessions.incrementAndGet(); expireHere++; } } processPersistenceChecks(); if ((getStore() != null) && (getStore() instanceof StoreBase)) { ((StoreBase) getStore()).processExpires(); } long timeEnd = System.currentTimeMillis(); if(log.isDebugEnabled()) log.debug("End expire sessions " + getName() + " processingTime " + (timeEnd - timeNow) + " expired sessions: " + expireHere); processingTime += (timeEnd - timeNow); } /** * Called by the background thread after active sessions have been checked * for expiration, to allow sessions to be swapped out, backed up, etc. */ public void processPersistenceChecks() { processMaxIdleSwaps(); processMaxActiveSwaps(); processMaxIdleBackups(); } /** * Return the active Session, associated with this Manager, with the * specified session id (if any); otherwise return <code>null</code>. * This method checks the persistence store if persistence is enabled, * otherwise just uses the functionality from ManagerBase. * * @param id The session id for the session to be returned * * @exception IllegalStateException if a new session cannot be * instantiated for any reason * @exception IOException if an input/output error occurs while * processing this request */ @Override public Session findSession(String id) throws IOException { Session session = super.findSession(id); // OK, at this point, we're not sure if another thread is trying to // remove the session or not so the only way around this is to lock it // (or attempt to) and then try to get it by this session id again. If // the other code ran swapOut, then we should get a null back during // this run, and if not, we lock it out so we can access the session // safely. if(session != null) { synchronized(session){ session = super.findSession(session.getIdInternal()); if(session != null){ // To keep any external calling code from messing up the // concurrency. session.access(); session.endAccess(); } } } if (session != null) return (session); // See if the Session is in the Store session = swapIn(id); return (session); } /** * Remove this Session from the active Sessions for this Manager, * but not from the Store. (Used by the PersistentValve) * * @param session Session to be removed */ @Override public void removeSuper(Session session) { super.remove (session); } /** * Load all sessions found in the persistence mechanism, assuming * they are marked as valid and have not passed their expiration * limit. If persistence is not supported, this method returns * without doing anything. * <p> * Note that by default, this method is not called by the MiddleManager * class. In order to use it, a subclass must specifically call it, * for example in the start() and/or processPersistenceChecks() methods. */ @Override public void load() { // Initialize our internal data structures sessions.clear(); if (store == null) return; String[] ids = null; try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ ids = AccessController.doPrivileged( new PrivilegedStoreKeys()); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); log.error("Exception in the Store during load: " + exception, exception); return; } } else { ids = store.keys(); } } catch (IOException e) { log.error("Can't load sessions from store, " + e.getMessage(), e); return; } int n = ids.length; if (n == 0) return; if (log.isDebugEnabled()) log.debug(sm.getString("persistentManager.loading", String.valueOf(n))); for (int i = 0; i < n; i++) try { swapIn(ids[i]); } catch (IOException e) { log.error("Failed load session from store, " + e.getMessage(), e); } } /** * Remove this Session from the active Sessions for this Manager, * and from the Store. * * @param session Session to be removed */ @Override public void remove(Session session, boolean update) { super.remove (session, update); if (store != null){ removeSession(session.getIdInternal()); } } /** * Remove this Session from the active Sessions for this Manager, * and from the Store. * * @param id Session's id to be removed */ protected void removeSession(String id){ try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ AccessController.doPrivileged(new PrivilegedStoreRemove(id)); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); log.error("Exception in the Store during removeSession: " + exception, exception); } } else { store.remove(id); } } catch (IOException e) { log.error("Exception removing session " + e.getMessage(), e); } } /** * Save all currently active sessions in the appropriate persistence * mechanism, if any. If persistence is not supported, this method * returns without doing anything. * <p> * Note that by default, this method is not called by the MiddleManager * class. In order to use it, a subclass must specifically call it, * for example in the stop() and/or processPersistenceChecks() methods. */ @Override public void unload() { if (store == null) return; Session sessions[] = findSessions(); int n = sessions.length; if (n == 0) return; if (log.isDebugEnabled()) log.debug(sm.getString("persistentManager.unloading", String.valueOf(n))); for (int i = 0; i < n; i++) try { swapOut(sessions[i]); } catch (IOException e) { // This is logged in writeSession() } } @Override public int getActiveSessionsFull() { // In memory session count int result = getActiveSessions(); try { // Store session count result += getStore().getSize(); } catch (IOException ioe) { log.warn(sm.getString("persistentManager.storeSizeException")); } return result; } @Override public Set<String> getSessionIdsFull() { Set<String> sessionIds = new HashSet<>(); // In memory session ID list sessionIds.addAll(sessions.keySet()); // Store session ID list String[] storeKeys; try { storeKeys = getStore().keys(); for (String storeKey : storeKeys) { sessionIds.add(storeKey); } } catch (IOException e) { log.warn(sm.getString("persistentManager.storeKeysException")); } return sessionIds; } // ------------------------------------------------------ Protected Methods /** * Look for a session in the Store and, if found, restore * it in the Manager's list of active sessions if appropriate. * The session will be removed from the Store after swapping * in, but will not be added to the active session list if it * is invalid or past its expiration. */ protected Session swapIn(String id) throws IOException { if (store == null) return null; Object swapInLock = null; /* * The purpose of this sync and these locks is to make sure that a * session is only loaded once. It doesn't matter if the lock is removed * and then another thread enters this method and tries to load the same * session. That thread will re-create a swapIn lock for that session, * quickly find that the session is already in sessions, use it and * carry on. */ synchronized (this) { swapInLock = sessionSwapInLocks.get(id); if (swapInLock == null) { swapInLock = new Object(); sessionSwapInLocks.put(id, swapInLock); } } Session session = null; synchronized (swapInLock) { // First check to see if another thread has loaded the session into // the manager session = sessions.get(id); if (session == null) { try { if (SecurityUtil.isPackageProtectionEnabled()){ try { session = AccessController.doPrivileged( new PrivilegedStoreLoad(id)); } catch (PrivilegedActionException ex) { Exception e = ex.getException(); log.error(sm.getString( "persistentManager.swapInException", id), e); if (e instanceof IOException){ throw (IOException)e; } else if (e instanceof ClassNotFoundException) { throw (ClassNotFoundException)e; } } } else { session = store.load(id); } } catch (ClassNotFoundException e) { String msg = sm.getString( "persistentManager.deserializeError", id); log.error(msg, e); throw new IllegalStateException(msg, e); } if (session != null && !session.isValid()) { log.error(sm.getString( "persistentManager.swapInInvalid", id)); session.expire(); removeSession(id); session = null; } if (session != null) { if(log.isDebugEnabled()) log.debug(sm.getString("persistentManager.swapIn", id)); session.setManager(this); // make sure the listeners know about it. ((StandardSession)session).tellNew(); add(session); ((StandardSession)session).activate(); // endAccess() to ensure timeouts happen correctly. // access() to keep access count correct or it will end up // negative session.access(); session.endAccess(); } } } // Make sure the lock is removed synchronized (this) { sessionSwapInLocks.remove(id); } return (session); } /** * Remove the session from the Manager's list of active * sessions and write it out to the Store. If the session * is past its expiration or invalid, this method does * nothing. * * @param session The Session to write out. */ protected void swapOut(Session session) throws IOException { if (store == null || !session.isValid()) { return; } ((StandardSession)session).passivate(); writeSession(session); super.remove(session, true); session.recycle(); } /** * Write the provided session to the Store without modifying * the copy in memory or triggering passivation events. Does * nothing if the session is invalid or past its expiration. */ protected void writeSession(Session session) throws IOException { if (store == null || !session.isValid()) { return; } try { if (SecurityUtil.isPackageProtectionEnabled()){ try{ AccessController.doPrivileged(new PrivilegedStoreSave(session)); }catch(PrivilegedActionException ex){ Exception exception = ex.getException(); if (exception instanceof IOException) { throw (IOException) exception; } log.error("Exception in the Store during writeSession: " + exception, exception); } } else { store.save(session); } } catch (IOException e) { log.error(sm.getString ("persistentManager.serializeError", session.getIdInternal(), e)); throw e; } } /** * Start this component and implement the requirements * of {@link org.apache.catalina.util.LifecycleBase#startInternal()}. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */ @Override protected synchronized void startInternal() throws LifecycleException { super.startInternal(); if (store == null) log.error("No Store configured, persistence disabled"); else if (store instanceof Lifecycle) ((Lifecycle)store).start(); setState(LifecycleState.STARTING); } /** * Stop this component and implement the requirements * of {@link org.apache.catalina.util.LifecycleBase#stopInternal()}. * * @exception LifecycleException if this component detects a fatal error * that prevents this component from being used */ @Override protected synchronized void stopInternal() throws LifecycleException { if (log.isDebugEnabled()) log.debug("Stopping"); setState(LifecycleState.STOPPING); if (getStore() != null && saveOnRestart) { unload(); } else { // Expire all active sessions Session sessions[] = findSessions(); for (int i = 0; i < sessions.length; i++) { StandardSession session = (StandardSession) sessions[i]; if (!session.isValid()) continue; session.expire(); } } if (getStore() != null && getStore() instanceof Lifecycle) ((Lifecycle)getStore()).stop(); // Require a new random number generator if we are restarted super.stopInternal(); } // ------------------------------------------------------ Protected Methods /** * Swap idle sessions out to Store if they are idle too long. */ protected void processMaxIdleSwaps() { if (!getState().isAvailable() || maxIdleSwap < 0) return; Session sessions[] = findSessions(); // Swap out all sessions idle longer than maxIdleSwap if (maxIdleSwap >= 0) { for (int i = 0; i < sessions.length; i++) { StandardSession session = (StandardSession) sessions[i]; synchronized (session) { if (!session.isValid()) continue; int timeIdle = (int) (session.getIdleTime() / 1000L); if (timeIdle > maxIdleSwap && timeIdle > minIdleSwap) { if (session.accessCount != null && session.accessCount.get() > 0) { // Session is currently being accessed - skip it continue; } if (log.isDebugEnabled()) log.debug(sm.getString ("persistentManager.swapMaxIdle", session.getIdInternal(), Integer.valueOf(timeIdle))); try { swapOut(session); } catch (IOException e) { // This is logged in writeSession() } } } } } } /** * Swap idle sessions out to Store if too many are active */ protected void processMaxActiveSwaps() { if (!getState().isAvailable() || getMaxActiveSessions() < 0) return; Session sessions[] = findSessions(); // FIXME: Smarter algorithm (LRU) if (getMaxActiveSessions() >= sessions.length) return; if(log.isDebugEnabled()) log.debug(sm.getString ("persistentManager.tooManyActive", Integer.valueOf(sessions.length))); int toswap = sessions.length - getMaxActiveSessions(); for (int i = 0; i < sessions.length && toswap > 0; i++) { StandardSession session = (StandardSession) sessions[i]; synchronized (session) { int timeIdle = (int) (session.getIdleTime() / 1000L); if (timeIdle > minIdleSwap) { if (session.accessCount != null && session.accessCount.get() > 0) { // Session is currently being accessed - skip it continue; } if(log.isDebugEnabled()) log.debug(sm.getString ("persistentManager.swapTooManyActive", session.getIdInternal(), Integer.valueOf(timeIdle))); try { swapOut(session); } catch (IOException e) { // This is logged in writeSession() } toswap--; } } } } /** * Back up idle sessions. */ protected void processMaxIdleBackups() { if (!getState().isAvailable() || maxIdleBackup < 0) return; Session sessions[] = findSessions(); // Back up all sessions idle longer than maxIdleBackup if (maxIdleBackup >= 0) { for (int i = 0; i < sessions.length; i++) { StandardSession session = (StandardSession) sessions[i]; synchronized (session) { if (!session.isValid()) continue; int timeIdle = (int) (session.getIdleTime() / 1000L); if (timeIdle > maxIdleBackup) { if (log.isDebugEnabled()) log.debug(sm.getString ("persistentManager.backupMaxIdle", session.getIdInternal(), Integer.valueOf(timeIdle))); try { writeSession(session); } catch (IOException e) { // This is logged in writeSession() } } } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.processing.store.writer.v3; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import org.apache.carbondata.common.logging.LogService; import org.apache.carbondata.common.logging.LogServiceFactory; import org.apache.carbondata.core.constants.CarbonCommonConstants; import org.apache.carbondata.core.constants.CarbonV3DataFormatConstants; import org.apache.carbondata.core.datastore.blocklet.BlockletEncodedColumnPage; import org.apache.carbondata.core.datastore.blocklet.EncodedBlocklet; import org.apache.carbondata.core.datastore.exception.CarbonDataWriterException; import org.apache.carbondata.core.datastore.page.encoding.EncodedColumnPage; import org.apache.carbondata.core.metadata.blocklet.BlockletInfo; import org.apache.carbondata.core.metadata.blocklet.index.BlockletBTreeIndex; import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex; import org.apache.carbondata.core.metadata.index.BlockIndexInfo; import org.apache.carbondata.core.util.CarbonMetadataUtil; import org.apache.carbondata.core.util.CarbonProperties; import org.apache.carbondata.core.util.CarbonUtil; import org.apache.carbondata.core.util.DataFileFooterConverterV3; import org.apache.carbondata.format.BlockletInfo3; import org.apache.carbondata.format.FileFooter3; import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel; import org.apache.carbondata.processing.store.TablePage; import org.apache.carbondata.processing.store.writer.AbstractFactDataWriter; /** * Below class will be used to write the data in V3 format * <Column1 Data ChunkV3><Column1<Page1><Page2><Page3><Page4>> * <Column2 Data ChunkV3><Column2<Page1><Page2><Page3><Page4>> * <Column3 Data ChunkV3><Column3<Page1><Page2><Page3><Page4>> * <Column4 Data ChunkV3><Column4<Page1><Page2><Page3><Page4>> */ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter { private static final LogService LOGGER = LogServiceFactory.getLogService(CarbonFactDataWriterImplV3.class.getName()); /** * persist the page data to be written in the file */ private BlockletDataHolder blockletDataHolder; /** * Threshold of blocklet size in MB */ private long blockletSizeThreshold; public CarbonFactDataWriterImplV3(CarbonFactDataHandlerModel model) { super(model); blockletSizeThreshold = Long.parseLong(CarbonProperties.getInstance() .getProperty(CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB, CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB_DEFAULT_VALUE)) * CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR * CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR; if (blockletSizeThreshold > fileSizeInBytes) { blockletSizeThreshold = fileSizeInBytes; LOGGER.info("Blocklet size configure for table is: " + blockletSizeThreshold); } blockletDataHolder = new BlockletDataHolder(fallbackExecutorService); } @Override protected void writeBlockletInfoToFile() throws CarbonDataWriterException { try { // get the current file position long currentPosition = currentOffsetInFile; // get thrift file footer instance FileFooter3 convertFileMeta = CarbonMetadataUtil .convertFileFooterVersion3(blockletMetadata, blockletIndex, localCardinality, thriftColumnSchemaList.size()); // fill the carbon index details fillBlockIndexInfoDetails(convertFileMeta.getNum_rows(), carbonDataFileName, currentPosition); // write the footer byte[] byteArray = CarbonUtil.getByteArray(convertFileMeta); ByteBuffer buffer = ByteBuffer.allocate(byteArray.length + CarbonCommonConstants.LONG_SIZE_IN_BYTE); buffer.put(byteArray); buffer.putLong(currentPosition); buffer.flip(); currentOffsetInFile += fileChannel.write(buffer); } catch (IOException e) { LOGGER.error(e, "Problem while writing the carbon file"); throw new CarbonDataWriterException("Problem while writing the carbon file: ", e); } } /** * Below method will be used to write one table page data, invoked by Consumer * @param tablePage */ @Override public void writeTablePage(TablePage tablePage) throws CarbonDataWriterException,IOException { // condition for writting all the pages if (!tablePage.isLastPage()) { boolean isAdded = false; // check if size more than blocklet size then write the page to file if (blockletDataHolder.getSize() + tablePage.getEncodedTablePage().getEncodedSize() >= blockletSizeThreshold) { // if blocklet size exceeds threshold, write blocklet data if (blockletDataHolder.getNumberOfPagesAdded() == 0) { isAdded = true; addPageData(tablePage); } LOGGER.info("Number of Pages for blocklet is: " + blockletDataHolder.getNumberOfPagesAdded() + " :Rows Added: " + blockletDataHolder.getTotalRows()); // write the data writeBlockletToFile(); } if (!isAdded) { addPageData(tablePage); } } else { //for last blocklet check if the last page will exceed the blocklet size then write // existing pages and then last page if (tablePage.getPageSize() > 0) { addPageData(tablePage); } if (blockletDataHolder.getNumberOfPagesAdded() > 0) { LOGGER.info("Number of Pages for blocklet is: " + blockletDataHolder.getNumberOfPagesAdded() + " :Rows Added: " + blockletDataHolder.getTotalRows()); writeBlockletToFile(); } } } private void addPageData(TablePage tablePage) throws IOException { blockletDataHolder.addPage(tablePage); if (listener != null && model.getDatabaseName().equalsIgnoreCase(listener.getTblIdentifier().getDatabaseName()) && model.getTableName().equalsIgnoreCase(listener.getTblIdentifier().getTableName())) { if (pageId == 0) { listener.onBlockletStart(blockletId); } listener.onPageAdded(blockletId, pageId++, tablePage); } } /** * Write the collect blocklet data (blockletDataHolder) to file */ private void writeBlockletToFile() { // get the list of all encoded table page EncodedBlocklet encodedBlocklet = blockletDataHolder.getEncodedBlocklet(); int numDimensions = encodedBlocklet.getNumberOfDimension(); int numMeasures = encodedBlocklet.getNumberOfMeasure(); // get data chunks for all the column byte[][] dataChunkBytes = new byte[numDimensions + numMeasures][]; long metadataSize = fillDataChunk(encodedBlocklet, dataChunkBytes); // calculate the total size of data to be written long blockletSize = blockletDataHolder.getSize() + metadataSize; // to check if data size will exceed the block size then create a new file createNewFileIfReachThreshold(blockletSize); // write data to file try { if (currentOffsetInFile == 0) { // write the header if file is empty writeHeaderToFile(); } writeBlockletToFile(dataChunkBytes); if (listener != null && model.getDatabaseName().equalsIgnoreCase(listener.getTblIdentifier().getDatabaseName()) && model.getTableName().equalsIgnoreCase(listener.getTblIdentifier().getTableName())) { listener.onBlockletEnd(blockletId++); } pageId = 0; } catch (IOException e) { LOGGER.error(e, "Problem while writing file"); throw new CarbonDataWriterException("Problem while writing file", e); } finally { // clear the data holder blockletDataHolder.clear(); } } /** * Fill dataChunkBytes and return total size of page metadata */ private long fillDataChunk(EncodedBlocklet encodedBlocklet, byte[][] dataChunkBytes) { int size = 0; int numDimensions = encodedBlocklet.getNumberOfDimension(); int numMeasures = encodedBlocklet.getNumberOfMeasure(); int measureStartIndex = numDimensions; // calculate the size of data chunks for (int i = 0; i < numDimensions; i++) { dataChunkBytes[i] = CarbonUtil.getByteArray(CarbonMetadataUtil.getDimensionDataChunk3(encodedBlocklet, i)); size += dataChunkBytes[i].length; } for (int i = 0; i < numMeasures; i++) { dataChunkBytes[measureStartIndex] = CarbonUtil.getByteArray(CarbonMetadataUtil.getMeasureDataChunk3(encodedBlocklet, i)); size += dataChunkBytes[measureStartIndex].length; measureStartIndex++; } return size; } /** * write file header */ private void writeHeaderToFile() throws IOException { byte[] fileHeader = CarbonUtil.getByteArray( CarbonMetadataUtil.getFileHeader( true, thriftColumnSchemaList, model.getSchemaUpdatedTimeStamp())); ByteBuffer buffer = ByteBuffer.wrap(fileHeader); currentOffsetInFile += fileChannel.write(buffer); } /** * Write one blocklet data into file * File format: * <Column1 Data ChunkV3><Column1<Page1><Page2><Page3><Page4>> * <Column2 Data ChunkV3><Column2<Page1><Page2><Page3><Page4>> * <Column3 Data ChunkV3><Column3<Page1><Page2><Page3><Page4>> * <Column4 Data ChunkV3><Column4<Page1><Page2><Page3><Page4>> */ private void writeBlockletToFile(byte[][] dataChunkBytes) throws IOException { long offset = currentOffsetInFile; // to maintain the offset of each data chunk in blocklet List<Long> currentDataChunksOffset = new ArrayList<>(); // to maintain the length of each data chunk in blocklet List<Integer> currentDataChunksLength = new ArrayList<>(); EncodedBlocklet encodedBlocklet = blockletDataHolder.getEncodedBlocklet(); int numberOfDimension = encodedBlocklet.getNumberOfDimension(); int numberOfMeasures = encodedBlocklet.getNumberOfMeasure(); ByteBuffer buffer = null; long dimensionOffset = 0; long measureOffset = 0; for (int i = 0; i < numberOfDimension; i++) { currentDataChunksOffset.add(offset); currentDataChunksLength.add(dataChunkBytes[i].length); buffer = ByteBuffer.wrap(dataChunkBytes[i]); currentOffsetInFile += fileChannel.write(buffer); offset += dataChunkBytes[i].length; BlockletEncodedColumnPage blockletEncodedColumnPage = encodedBlocklet.getEncodedDimensionColumnPages().get(i); for (EncodedColumnPage dimensionPage : blockletEncodedColumnPage .getEncodedColumnPageList()) { buffer = dimensionPage.getEncodedData(); int bufferSize = buffer.limit(); currentOffsetInFile += fileChannel.write(buffer); offset += bufferSize; } } dimensionOffset = offset; int dataChunkStartIndex = encodedBlocklet.getNumberOfDimension(); for (int i = 0; i < numberOfMeasures; i++) { currentDataChunksOffset.add(offset); currentDataChunksLength.add(dataChunkBytes[dataChunkStartIndex].length); buffer = ByteBuffer.wrap(dataChunkBytes[dataChunkStartIndex]); currentOffsetInFile += fileChannel.write(buffer); offset += dataChunkBytes[dataChunkStartIndex].length; dataChunkStartIndex++; BlockletEncodedColumnPage blockletEncodedColumnPage = encodedBlocklet.getEncodedMeasureColumnPages().get(i); for (EncodedColumnPage measurePage : blockletEncodedColumnPage .getEncodedColumnPageList()) { buffer = measurePage.getEncodedData(); int bufferSize = buffer.limit(); currentOffsetInFile += fileChannel.write(buffer); offset += bufferSize; } } measureOffset = offset; blockletIndex.add( CarbonMetadataUtil.getBlockletIndex( encodedBlocklet, model.getSegmentProperties().getMeasures())); BlockletInfo3 blockletInfo3 = new BlockletInfo3(encodedBlocklet.getBlockletSize(), currentDataChunksOffset, currentDataChunksLength, dimensionOffset, measureOffset, encodedBlocklet.getNumberOfPages()); blockletMetadata.add(blockletInfo3); } /** * Below method will be used to fill the block info details * * @param numberOfRows number of rows in file * @param carbonDataFileName The name of carbonData file * @param currentPosition current offset */ @Override protected void fillBlockIndexInfoDetails(long numberOfRows, String carbonDataFileName, long currentPosition) { int i = 0; DataFileFooterConverterV3 converterV3 = new DataFileFooterConverterV3(); for (org.apache.carbondata.format.BlockletIndex index : blockletIndex) { BlockletInfo3 blockletInfo3 = blockletMetadata.get(i); BlockletInfo blockletInfo = converterV3.getBlockletInfo(blockletInfo3, model.getSegmentProperties().getDimensions().size()); BlockletBTreeIndex bTreeIndex = new BlockletBTreeIndex(index.b_tree_index.getStart_key(), index.b_tree_index.getEnd_key()); BlockletMinMaxIndex minMaxIndex = new BlockletMinMaxIndex(); minMaxIndex.setMinValues(toByteArray(index.getMin_max_index().getMin_values())); minMaxIndex.setMaxValues(toByteArray(index.getMin_max_index().getMax_values())); org.apache.carbondata.core.metadata.blocklet.index.BlockletIndex bIndex = new org.apache.carbondata.core.metadata.blocklet.index.BlockletIndex(bTreeIndex, minMaxIndex); BlockIndexInfo biInfo = new BlockIndexInfo(numberOfRows, carbonDataFileName, currentPosition, bIndex, blockletInfo); blockIndexInfoList.add(biInfo); i++; } } private byte[][] toByteArray(List<ByteBuffer> buffers) { byte[][] arrays = new byte[buffers.size()][]; for (int i = 0; i < arrays.length; i++) { arrays[i] = buffers.get(i).array(); } return arrays; } /** * Method will be used to close the open file channel * * @throws CarbonDataWriterException */ public void closeWriter() throws CarbonDataWriterException { commitCurrentFile(true); try { writeIndexFile(); } catch (IOException e) { LOGGER.error(e, "Problem while writing the index file"); throw new CarbonDataWriterException("Problem while writing the index file", e); } closeExecutorService(); } @Override public void writeFooterToFile() throws CarbonDataWriterException { if (this.blockletMetadata.size() > 0) { writeBlockletInfoToFile(); } } }
/* * Licensed to GraphHopper GmbH under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper GmbH licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.routing.ch; import com.graphhopper.routing.Dijkstra; import com.graphhopper.routing.DijkstraBidirectionCH; import com.graphhopper.routing.Path; import com.graphhopper.routing.ev.BooleanEncodedValue; import com.graphhopper.routing.ev.EncodedValue; import com.graphhopper.routing.ev.SimpleBooleanEncodedValue; import com.graphhopper.routing.util.AllCHEdgesIterator; import com.graphhopper.routing.util.CarFlagEncoder; import com.graphhopper.routing.util.EncodingManager; import com.graphhopper.routing.util.TraversalMode; import com.graphhopper.routing.weighting.FastestWeighting; import com.graphhopper.routing.weighting.ShortestWeighting; import com.graphhopper.routing.weighting.Weighting; import com.graphhopper.storage.*; import com.graphhopper.util.CHEdgeIteratorState; import com.graphhopper.util.EdgeIteratorState; import com.graphhopper.util.PMap; import org.junit.Test; import java.util.Arrays; import java.util.HashSet; import java.util.Objects; import java.util.Set; import static org.junit.Assert.*; public class NodeBasedNodeContractorTest { // TODO integrate this into CHGraphImpl somehow public final static BooleanEncodedValue SC_ACCESS = new SimpleBooleanEncodedValue("sc_access", true); static { SC_ACCESS.init(new EncodedValue.InitializerConfig()); } private final CarFlagEncoder encoder = new CarFlagEncoder(); private final EncodingManager encodingManager = EncodingManager.create(encoder); private final Weighting weighting = new ShortestWeighting(encoder); private final GraphHopperStorage graph = new GraphBuilder(encodingManager).setCHConfigs(CHConfig.nodeBased("profile", weighting)).create(); private final CHGraph lg = graph.getCHGraph(); private final PrepareCHGraph pg = PrepareCHGraph.nodeBased(lg, weighting); private NodeContractor createNodeContractor() { return createNodeContractor(pg); } private NodeContractor createNodeContractor(PrepareCHGraph chGraph) { NodeContractor nodeContractor = new NodeBasedNodeContractor(chGraph, new PMap()); nodeContractor.initFromGraph(); nodeContractor.prepareContraction(); return nodeContractor; } private void createExampleGraph() { //5-1-----2 // \ __/| // 0 | // / | // 4-----3 // graph.edge(0, 1, 1, true); graph.edge(0, 2, 1, true); graph.edge(0, 4, 3, true); graph.edge(1, 2, 3, true); graph.edge(2, 3, 1, true); graph.edge(4, 3, 2, true); graph.edge(5, 1, 2, true); graph.freeze(); } @Test public void testShortestPathSkipNode() { createExampleGraph(); final double normalDist = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED).calcPath(4, 2).getDistance(); NodeBasedWitnessPathSearcher algo = new NodeBasedWitnessPathSearcher(pg); setMaxLevelOnAllNodes(); algo.ignoreNode(3); algo.setWeightLimit(100); int nodeEntry = algo.findEndNode(4, 2); assertTrue(algo.getWeight(nodeEntry) > normalDist); algo.clear(); algo.setMaxVisitedNodes(1); nodeEntry = algo.findEndNode(4, 2); assertEquals(-1, nodeEntry); } @Test public void testShortestPathSkipNode2() { createExampleGraph(); final double normalDist = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED).calcPath(4, 2).getDistance(); assertEquals(3, normalDist, 1e-5); NodeBasedWitnessPathSearcher algo = new NodeBasedWitnessPathSearcher(pg); setMaxLevelOnAllNodes(); algo.ignoreNode(3); algo.setWeightLimit(10); int nodeEntry = algo.findEndNode(4, 2); assertEquals(4, algo.getWeight(nodeEntry), 1e-5); nodeEntry = algo.findEndNode(4, 1); assertEquals(4, algo.getWeight(nodeEntry), 1e-5); } @Test public void testShortestPathLimit() { createExampleGraph(); NodeBasedWitnessPathSearcher algo = new NodeBasedWitnessPathSearcher(pg); setMaxLevelOnAllNodes(); algo.ignoreNode(0); algo.setWeightLimit(2); int endNode = algo.findEndNode(4, 1); // did not reach endNode assertNotEquals(1, endNode); } @Test public void testDirectedGraph() { //5 6 7 // \|/ //4-3_1<-\ 10 // \_|/ // 0___2_11 graph.edge(0, 2, 2, true); graph.edge(10, 2, 2, true); graph.edge(11, 2, 2, true); // create a longer one directional edge => no longish one-dir shortcut should be created final EdgeIteratorState edge2to1bidirected = graph.edge(2, 1, 2, true); final EdgeIteratorState edge2to1directed = graph.edge(2, 1, 10, false); final EdgeIteratorState edge1to3 = graph.edge(1, 3, 2, true); graph.edge(3, 4, 2, true); graph.edge(3, 5, 2, true); graph.edge(3, 6, 2, true); graph.edge(3, 7, 2, true); graph.freeze(); setMaxLevelOnAllNodes(); // find all shortcuts if we contract node 1 NodeContractor nodeContractor = createNodeContractor(); nodeContractor.contractNode(1); checkShortcuts( expectedShortcut(3, 2, edge1to3, edge2to1bidirected, true, true), expectedShortcut(2, 3, edge2to1directed, edge1to3, true, false) ); } @Test public void testFindShortcuts_Roundabout() { // 1 -- 3 -- 4 ---> 5 ---> 6 -- 7 // \ / // <--- 8 <--- final EdgeIteratorState iter1to3 = graph.edge(1, 3, 1, true); final EdgeIteratorState iter3to4 = graph.edge(3, 4, 1, true); final EdgeIteratorState iter4to5 = graph.edge(4, 5, 1, false); final EdgeIteratorState iter5to6 = graph.edge(5, 6, 1, false); final EdgeIteratorState iter6to8 = graph.edge(6, 8, 2, false); final EdgeIteratorState iter8to4 = graph.edge(8, 4, 1, false); graph.edge(6, 7, 1, true); graph.freeze(); int sc1to4 = lg.shortcut(1, 4, PrepareEncoder.getScDirMask(), 2, iter1to3.getEdge(), iter3to4.getEdge()); int sc4to6 = lg.shortcut(4, 6, PrepareEncoder.getScFwdDir(), 2, iter4to5.getEdge(), iter5to6.getEdge()); int sc6to4 = lg.shortcut(6, 4, PrepareEncoder.getScFwdDir(), 3, iter6to8.getEdge(), iter8to4.getEdge()); setMaxLevelOnAllNodes(); lg.setLevel(3, 3); lg.setLevel(5, 5); lg.setLevel(7, 7); lg.setLevel(8, 8); Shortcut manualSc1 = expectedShortcut(1, 4, iter1to3, iter3to4, true, true); Shortcut manualSc2 = expectedShortcut(4, 6, iter4to5, iter5to6, true, false); Shortcut manualSc3 = expectedShortcut(6, 4, iter6to8, iter8to4, true, false); checkShortcuts(manualSc1, manualSc2, manualSc3); // after 'manual contraction' of nodes 3, 5, 8 the graph looks like: // 1 -- 4 -->-- 6 -- 7 // \ | // --<---- // contract node 4! NodeContractor nodeContractor = createNodeContractor(); nodeContractor.contractNode(4); checkShortcuts(manualSc1, manualSc2, manualSc3, // there should be two different shortcuts for both directions! expectedShortcut(1, 6, lg.getEdgeIteratorState(sc1to4, 4), lg.getEdgeIteratorState(sc4to6, 6), true, false), expectedShortcut(6, 1, lg.getEdgeIteratorState(sc6to4, 4), lg.getEdgeIteratorState(sc1to4, 1), true, false) ); } @Test public void testShortcutMergeBug() { // We refer to this real world situation http://www.openstreetmap.org/#map=19/52.71205/-1.77326 // assume the following graph: // // ---1---->----2-----3 // \--------/ // // where there are two roads from 1 to 2 and the directed road has a smaller weight // leading to two shortcuts sc1 (unidir) and sc2 (bidir) where the second should NOT be rejected due to the larger weight final EdgeIteratorState edge1to2bidirected = graph.edge(1, 2, 1, true); final EdgeIteratorState edge1to2directed = graph.edge(1, 2, 1, false); final EdgeIteratorState edge2to3 = graph.edge(2, 3, 1, true); graph.freeze(); setMaxLevelOnAllNodes(); NodeContractor nodeContractor = createNodeContractor(); nodeContractor.contractNode(2); checkShortcuts( expectedShortcut(3, 1, edge2to3, edge1to2bidirected, true, false), expectedShortcut(1, 3, edge1to2directed, edge2to3, true, false) ); } @Test public void testContractNode_directed_shortcutRequired() { // 0 --> 1 --> 2 final EdgeIteratorState edge1 = graph.edge(0, 1, 1, false); final EdgeIteratorState edge2 = graph.edge(1, 2, 2, false); graph.freeze(); setMaxLevelOnAllNodes(); createNodeContractor().contractNode(1); checkShortcuts(expectedShortcut(0, 2, edge1, edge2, true, false)); } @Test public void testContractNode_directed_shortcutRequired_reverse() { // 0 <-- 1 <-- 2 final EdgeIteratorState edge1 = graph.edge(2, 1, 1, false); final EdgeIteratorState edge2 = graph.edge(1, 0, 2, false); graph.freeze(); setMaxLevelOnAllNodes(); createNodeContractor().contractNode(1); checkShortcuts(expectedShortcut(2, 0, edge1, edge2, true, false)); } @Test public void testContractNode_bidirected_shortcutsRequired() { // 0 -- 1 -- 2 final EdgeIteratorState edge1 = graph.edge(0, 1, 1, true); final EdgeIteratorState edge2 = graph.edge(1, 2, 2, true); graph.freeze(); setMaxLevelOnAllNodes(); createNodeContractor().contractNode(1); checkShortcuts(expectedShortcut(2, 0, edge2, edge1, true, true)); } @Test public void testContractNode_directed_withWitness() { // 0 --> 1 --> 2 // \_________/ graph.edge(0, 1, 1, false); graph.edge(1, 2, 2, false); graph.edge(0, 2, 1, false); graph.freeze(); setMaxLevelOnAllNodes(); createNodeContractor().contractNode(1); checkNoShortcuts(); } @Test public void testNodeContraction_shortcutDistanceRounding() { assertTrue("this test was constructed assuming we are using the ShortestWeighting", weighting instanceof ShortestWeighting); // 0 ------------> 4 // \ / // 1 --> 2 --> 3 double[] distances = {4.019, 1.006, 1.004, 1.006, 1.004}; graph.edge(0, 4, distances[0], false); EdgeIteratorState edge1 = graph.edge(0, 1, distances[1], false); EdgeIteratorState edge2 = graph.edge(1, 2, distances[2], false); EdgeIteratorState edge3 = graph.edge(2, 3, distances[3], false); EdgeIteratorState edge4 = graph.edge(3, 4, distances[4], false); graph.freeze(); setMaxLevelOnAllNodes(); // make sure that distances do not get changed in storage (they might get truncated) AllCHEdgesIterator iter = lg.getAllEdges(); double[] storedDistances = new double[iter.length()]; int count = 0; while (iter.next()) { storedDistances[count++] = iter.getDistance(); } assertArrayEquals(distances, storedDistances, 1.e-6); // perform CH contraction contractInOrder(1, 3, 2, 0, 4); // first we compare dijkstra with CH to make sure they produce the same results int from = 0; int to = 4; Dijkstra dikstra = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED); Path dijkstraPath = dikstra.calcPath(from, to); DijkstraBidirectionCH ch = new DijkstraBidirectionCH(new RoutingCHGraphImpl(lg, weighting)); Path chPath = ch.calcPath(from, to); assertEquals(dijkstraPath.calcNodes(), chPath.calcNodes()); assertEquals(dijkstraPath.getDistance(), chPath.getDistance(), 1.e-6); assertEquals(dijkstraPath.getWeight(), chPath.getWeight(), 1.e-6); // on a more detailed level we check that the right shortcuts were added // contracting nodes 1&3 will always introduce shortcuts, but contracting node 2 should not because going from // 0 to 4 directly via edge 4 is cheaper. however, if shortcut distances get truncated it appears as if going // via node 2 is better. here we check that this does not happen. checkShortcuts( expectedShortcut(0, 2, edge1, edge2, true, false), expectedShortcut(2, 4, edge3, edge4, true, false) ); } /** * similar to the previous test, but using the fastest weighting */ @Test public void testNodeContraction_shortcutWeightRounding() { CarFlagEncoder encoder = new CarFlagEncoder(); EncodingManager encodingManager = EncodingManager.create(encoder); Weighting weighting = new FastestWeighting(encoder); GraphHopperStorage graph = new GraphBuilder(encodingManager).setCHConfigs(CHConfig.nodeBased("p1", weighting)).create(); CHGraph lg = graph.getCHGraph(); PrepareCHGraph pg = PrepareCHGraph.nodeBased(lg, weighting); // 0 ------------> 4 // \ / // 1 --> 2 --> 3 double fac = 60 / 3.6; double[] distances = {fac * 4.019, fac * 1.006, fac * 1.004, fac * 1.006, fac * 1.004}; graph.edge(0, 4, distances[0], false); graph.edge(0, 1, distances[1], false); graph.edge(1, 2, distances[2], false); graph.edge(2, 3, distances[3], false); graph.edge(3, 4, distances[4], false); graph.freeze(); setMaxLevelOnAllNodes(pg); // perform CH contraction contractInOrder(pg, 1, 3, 2, 0, 4); // first we compare dijkstra with CH to make sure they produce the same results int from = 0; int to = 4; Dijkstra dikstra = new Dijkstra(graph, weighting, TraversalMode.NODE_BASED); Path dijkstraPath = dikstra.calcPath(from, to); DijkstraBidirectionCH ch = new DijkstraBidirectionCH(new RoutingCHGraphImpl(lg, weighting)); Path chPath = ch.calcPath(from, to); assertEquals(dijkstraPath.calcNodes(), chPath.calcNodes()); assertEquals(dijkstraPath.getDistance(), chPath.getDistance(), 1.e-6); assertEquals(dijkstraPath.getWeight(), chPath.getWeight(), 1.e-6); } @Test public void testNodeContraction_preventUnnecessaryShortcutWithLoop() { // there should not be shortcuts where one of the skipped edges is a loop at the node to be contracted, // see also #1583 CarFlagEncoder encoder = new CarFlagEncoder(); EncodingManager encodingManager = EncodingManager.create(encoder); Weighting weighting = new FastestWeighting(encoder); GraphHopperStorage graph = new GraphBuilder(encodingManager).setCHConfigs(CHConfig.nodeBased("p1", weighting)).create(); CHGraph lg = graph.getCHGraph(); PrepareCHGraph pg = PrepareCHGraph.nodeBased(lg, weighting); // 0 - 1 - 2 - 3 // o o graph.edge(0, 1, 1, true); graph.edge(1, 2, 1, true); graph.edge(2, 3, 1, true); graph.edge(0, 0, 1, true); graph.edge(3, 3, 1, true); graph.freeze(); setMaxLevelOnAllNodes(pg); NodeContractor nodeContractor = createNodeContractor(pg); nodeContractor.contractNode(0); nodeContractor.contractNode(3); checkNoShortcuts(pg); } private void contractInOrder(int... nodeIds) { contractInOrder(pg, nodeIds); } private void contractInOrder(PrepareCHGraph chGraph, int... nodeIds) { NodeContractor nodeContractor = createNodeContractor(chGraph); int level = 0; for (int n : nodeIds) { nodeContractor.contractNode(n); chGraph.setLevel(n, level); level++; } } /** * Queries the ch graph and checks if the graph's shortcuts match the given expected shortcuts. */ private void checkShortcuts(Shortcut... expectedShortcuts) { checkShortcuts(pg, expectedShortcuts); } private void checkShortcuts(PrepareCHGraph chGraph, Shortcut... expectedShortcuts) { Set<Shortcut> expected = setOf(expectedShortcuts); if (expected.size() != expectedShortcuts.length) { fail("was given duplicate shortcuts"); } AllCHEdgesIterator iter = chGraph.getAllEdges(); Set<Shortcut> given = new HashSet<>(); while (iter.next()) { if (iter.isShortcut()) { given.add(new Shortcut( iter.getBaseNode(), iter.getAdjNode(), iter.getWeight(), iter.get(SC_ACCESS), iter.getReverse(SC_ACCESS), iter.getSkippedEdge1(), iter.getSkippedEdge2())); } } assertEquals(expected, given); } private void checkNoShortcuts() { checkShortcuts(pg); } private void checkNoShortcuts(PrepareCHGraph chGraph) { checkShortcuts(chGraph); } private Shortcut expectedShortcut(int baseNode, int adjNode, EdgeIteratorState edge1, EdgeIteratorState edge2, boolean fwd, boolean bwd) { //todo: weight calculation might have to be adjusted for different encoders/weightings/reverse speed double weight1 = getWeight(edge1); double weight2 = getWeight(edge2); return new Shortcut(baseNode, adjNode, weight1 + weight2, fwd, bwd, edge1.getEdge(), edge2.getEdge()); } private double getWeight(EdgeIteratorState edge) { if (edge instanceof CHEdgeIteratorState) { return ((CHEdgeIteratorState) edge).getWeight(); } else { return weighting.calcEdgeWeight(edge, false); } } private Set<Shortcut> setOf(Shortcut... shortcuts) { return new HashSet<>(Arrays.asList(shortcuts)); } private void setMaxLevelOnAllNodes() { setMaxLevelOnAllNodes(pg); } private void setMaxLevelOnAllNodes(PrepareCHGraph chGraph) { int nodes = chGraph.getNodes(); for (int node = 0; node < nodes; node++) { chGraph.setLevel(node, nodes); } } private static class Shortcut { int baseNode; int adjNode; double weight; boolean fwd; boolean bwd; int skipEdge1; int skipEdge2; Shortcut(int baseNode, int adjNode, double weight, boolean fwd, boolean bwd, int skipEdge1, int skipEdge2) { this.baseNode = baseNode; this.adjNode = adjNode; this.weight = weight; this.fwd = fwd; this.bwd = bwd; this.skipEdge1 = skipEdge1; this.skipEdge2 = skipEdge2; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null || getClass() != obj.getClass()) return false; Shortcut shortcut = (Shortcut) obj; return baseNode == shortcut.baseNode && adjNode == shortcut.adjNode && Double.compare(shortcut.weight, weight) == 0 && fwd == shortcut.fwd && bwd == shortcut.bwd && skipEdge1 == shortcut.skipEdge1 && skipEdge2 == shortcut.skipEdge2; } @Override public int hashCode() { return Objects.hash(baseNode, adjNode, weight, fwd, bwd, skipEdge1, skipEdge2); } @Override public String toString() { return "Shortcut{" + "baseNode=" + baseNode + ", adjNode=" + adjNode + ", weight=" + weight + ", fwd=" + fwd + ", bwd=" + bwd + ", skipEdge1=" + skipEdge1 + ", skipEdge2=" + skipEdge2 + '}'; } } }
/** * Generated with Acceleo */ package org.wso2.developerstudio.eclipse.gmf.esb.parts.impl; // Start of user code for imports import java.util.ArrayList; import java.util.List; import org.eclipse.emf.ecore.EObject; import org.eclipse.emf.eef.runtime.api.component.IPropertiesEditionComponent; import org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent; import org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart; import org.eclipse.emf.eef.runtime.impl.notify.PropertiesEditionEvent; import org.eclipse.emf.eef.runtime.impl.parts.CompositePropertiesEditionPart; import org.eclipse.emf.eef.runtime.ui.parts.PartComposer; import org.eclipse.emf.eef.runtime.ui.parts.sequence.BindingCompositionSequence; import org.eclipse.emf.eef.runtime.ui.parts.sequence.CompositionSequence; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable; import org.eclipse.emf.eef.runtime.ui.widgets.ReferencesTable.ReferencesTableListener; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableContentProvider; import org.eclipse.emf.eef.runtime.ui.widgets.referencestable.ReferencesTableSettings; import org.eclipse.jface.viewers.ViewerFilter; import org.eclipse.swt.SWT; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.Group; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart; import org.wso2.developerstudio.eclipse.gmf.esb.parts.EsbViewsRepository; import org.wso2.developerstudio.eclipse.gmf.esb.providers.EsbMessages; // End of user code /** * * */ public class EntitlementMediatorObligationsOutputConnectorPropertiesEditionPartImpl extends CompositePropertiesEditionPart implements ISWTPropertiesEditionPart, EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart { protected ReferencesTable commentMediators; protected List<ViewerFilter> commentMediatorsBusinessFilters = new ArrayList<ViewerFilter>(); protected List<ViewerFilter> commentMediatorsFilters = new ArrayList<ViewerFilter>(); /** * Default constructor * @param editionComponent the {@link IPropertiesEditionComponent} that manage this part * */ public EntitlementMediatorObligationsOutputConnectorPropertiesEditionPartImpl(IPropertiesEditionComponent editionComponent) { super(editionComponent); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createFigure(org.eclipse.swt.widgets.Composite) * */ public Composite createFigure(final Composite parent) { view = new Composite(parent, SWT.NONE); GridLayout layout = new GridLayout(); layout.numColumns = 3; view.setLayout(layout); createControls(view); return view; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.ISWTPropertiesEditionPart# * createControls(org.eclipse.swt.widgets.Composite) * */ public void createControls(Composite view) { CompositionSequence entitlementMediatorObligationsOutputConnectorStep = new BindingCompositionSequence(propertiesEditionComponent); entitlementMediatorObligationsOutputConnectorStep .addStep(EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.class) .addStep(EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators); composer = new PartComposer(entitlementMediatorObligationsOutputConnectorStep) { @Override public Composite addToPart(Composite parent, Object key) { if (key == EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.class) { return createPropertiesGroup(parent); } if (key == EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators) { return createCommentMediatorsAdvancedTableComposition(parent); } return parent; } }; composer.compose(view); } /** * */ protected Composite createPropertiesGroup(Composite parent) { Group propertiesGroup = new Group(parent, SWT.NONE); propertiesGroup.setText(EsbMessages.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart_PropertiesGroupLabel); GridData propertiesGroupData = new GridData(GridData.FILL_HORIZONTAL); propertiesGroupData.horizontalSpan = 3; propertiesGroup.setLayoutData(propertiesGroupData); GridLayout propertiesGroupLayout = new GridLayout(); propertiesGroupLayout.numColumns = 3; propertiesGroup.setLayout(propertiesGroupLayout); return propertiesGroup; } /** * @param container * */ protected Composite createCommentMediatorsAdvancedTableComposition(Composite parent) { this.commentMediators = new ReferencesTable(getDescription(EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators, EsbMessages.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart_CommentMediatorsLabel), new ReferencesTableListener() { public void handleAdd() { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EntitlementMediatorObligationsOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.ADD, null, null)); commentMediators.refresh(); } public void handleEdit(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EntitlementMediatorObligationsOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.EDIT, null, element)); commentMediators.refresh(); } public void handleMove(EObject element, int oldIndex, int newIndex) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EntitlementMediatorObligationsOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.MOVE, element, newIndex)); commentMediators.refresh(); } public void handleRemove(EObject element) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EntitlementMediatorObligationsOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators, PropertiesEditionEvent.COMMIT, PropertiesEditionEvent.REMOVE, null, element)); commentMediators.refresh(); } public void navigateTo(EObject element) { } }); for (ViewerFilter filter : this.commentMediatorsFilters) { this.commentMediators.addFilter(filter); } this.commentMediators.setHelpText(propertiesEditionComponent.getHelpContent(EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators, EsbViewsRepository.SWT_KIND)); this.commentMediators.createControls(parent); this.commentMediators.addSelectionListener(new SelectionAdapter() { public void widgetSelected(SelectionEvent e) { if (e.item != null && e.item.getData() instanceof EObject) { propertiesEditionComponent.firePropertiesChanged(new PropertiesEditionEvent(EntitlementMediatorObligationsOutputConnectorPropertiesEditionPartImpl.this, EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators, PropertiesEditionEvent.CHANGE, PropertiesEditionEvent.SELECTION_CHANGED, null, e.item.getData())); } } }); GridData commentMediatorsData = new GridData(GridData.FILL_HORIZONTAL); commentMediatorsData.horizontalSpan = 3; this.commentMediators.setLayoutData(commentMediatorsData); this.commentMediators.setLowerBound(0); this.commentMediators.setUpperBound(-1); commentMediators.setID(EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators); commentMediators.setEEFType("eef::AdvancedTableComposition"); //$NON-NLS-1$ // Start of user code for createCommentMediatorsAdvancedTableComposition // End of user code return parent; } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionListener#firePropertiesChanged(org.eclipse.emf.eef.runtime.api.notify.IPropertiesEditionEvent) * */ public void firePropertiesChanged(IPropertiesEditionEvent event) { // Start of user code for tab synchronization // End of user code } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart#initCommentMediators(EObject current, EReference containingFeature, EReference feature) */ public void initCommentMediators(ReferencesTableSettings settings) { if (current.eResource() != null && current.eResource().getResourceSet() != null) this.resourceSet = current.eResource().getResourceSet(); ReferencesTableContentProvider contentProvider = new ReferencesTableContentProvider(); commentMediators.setContentProvider(contentProvider); commentMediators.setInput(settings); boolean eefElementEditorReadOnlyState = isReadOnly(EsbViewsRepository.EntitlementMediatorObligationsOutputConnector.Properties.commentMediators); if (eefElementEditorReadOnlyState && commentMediators.isEnabled()) { commentMediators.setEnabled(false); commentMediators.setToolTipText(EsbMessages.EntitlementMediatorObligationsOutputConnector_ReadOnly); } else if (!eefElementEditorReadOnlyState && !commentMediators.isEnabled()) { commentMediators.setEnabled(true); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart#updateCommentMediators() * */ public void updateCommentMediators() { commentMediators.refresh(); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart#addFilterCommentMediators(ViewerFilter filter) * */ public void addFilterToCommentMediators(ViewerFilter filter) { commentMediatorsFilters.add(filter); if (this.commentMediators != null) { this.commentMediators.addFilter(filter); } } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart#addBusinessFilterCommentMediators(ViewerFilter filter) * */ public void addBusinessFilterToCommentMediators(ViewerFilter filter) { commentMediatorsBusinessFilters.add(filter); } /** * {@inheritDoc} * * @see org.wso2.developerstudio.eclipse.gmf.esb.parts.EntitlementMediatorObligationsOutputConnectorPropertiesEditionPart#isContainedInCommentMediatorsTable(EObject element) * */ public boolean isContainedInCommentMediatorsTable(EObject element) { return ((ReferencesTableSettings)commentMediators.getInput()).contains(element); } /** * {@inheritDoc} * * @see org.eclipse.emf.eef.runtime.api.parts.IPropertiesEditionPart#getTitle() * */ public String getTitle() { return EsbMessages.EntitlementMediatorObligationsOutputConnector_Part_Title; } // Start of user code additional methods // End of user code }
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.net; import static io.netty.buffer.Unpooled.copiedBuffer; import static io.netty.buffer.Unpooled.unreleasableBuffer; import static io.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST; import static io.netty.handler.codec.http.HttpResponseStatus.OK; import static io.netty.handler.logging.LogLevel.INFO; import org.chromium.base.Log; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.util.HashMap; import java.util.Locale; import java.util.Map; import java.util.concurrent.CountDownLatch; import io.netty.buffer.ByteBuf; import io.netty.buffer.ByteBufUtil; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http2.AbstractHttp2ConnectionHandlerBuilder; import io.netty.handler.codec.http2.DefaultHttp2Headers; import io.netty.handler.codec.http2.Http2ConnectionDecoder; import io.netty.handler.codec.http2.Http2ConnectionEncoder; import io.netty.handler.codec.http2.Http2ConnectionHandler; import io.netty.handler.codec.http2.Http2Exception; import io.netty.handler.codec.http2.Http2Flags; import io.netty.handler.codec.http2.Http2FrameListener; import io.netty.handler.codec.http2.Http2FrameLogger; import io.netty.handler.codec.http2.Http2Headers; import io.netty.handler.codec.http2.Http2Settings; import io.netty.util.CharsetUtil; /** * HTTP/2 test handler for Cronet BidirectionalStream tests. */ public final class Http2TestHandler extends Http2ConnectionHandler implements Http2FrameListener { // Some Url Paths that have special meaning. public static final String ECHO_ALL_HEADERS_PATH = "/echoallheaders"; public static final String ECHO_HEADER_PATH = "/echoheader"; public static final String ECHO_METHOD_PATH = "/echomethod"; public static final String ECHO_STREAM_PATH = "/echostream"; public static final String ECHO_TRAILERS_PATH = "/echotrailers"; public static final String SERVE_SIMPLE_BROTLI_RESPONSE = "/simplebrotli"; public static final String REPORTING_COLLECTOR_PATH = "/reporting-collector"; public static final String SUCCESS_WITH_NEL_HEADERS_PATH = "/success-with-nel"; public static final String COMBINED_HEADERS_PATH = "/combinedheaders"; public static final String HANGING_REQUEST_PATH = "/hanging-request"; private static final String TAG = Http2TestHandler.class.getSimpleName(); private static final Http2FrameLogger sLogger = new Http2FrameLogger(INFO, Http2TestHandler.class); private static final ByteBuf RESPONSE_BYTES = unreleasableBuffer(copiedBuffer("HTTP/2 Test Server", CharsetUtil.UTF_8)); private HashMap<Integer, RequestResponder> mResponderMap = new HashMap<>(); private ReportingCollector mReportingCollector; private String mServerUrl; private CountDownLatch mHangingUrlLatch; /** * Builder for HTTP/2 test handler. */ public static final class Builder extends AbstractHttp2ConnectionHandlerBuilder<Http2TestHandler, Builder> { public Builder() { frameLogger(sLogger); } public Builder setReportingCollector(ReportingCollector reportingCollector) { mReportingCollector = reportingCollector; return this; } public Builder setServerUrl(String serverUrl) { mServerUrl = serverUrl; return this; } public Builder setHangingUrlLatch(CountDownLatch hangingUrlLatch) { mHangingUrlLatch = hangingUrlLatch; return this; } @Override public Http2TestHandler build() { return super.build(); } @Override protected Http2TestHandler build(Http2ConnectionDecoder decoder, Http2ConnectionEncoder encoder, Http2Settings initialSettings) { Http2TestHandler handler = new Http2TestHandler(decoder, encoder, initialSettings, mReportingCollector, mServerUrl, mHangingUrlLatch); frameListener(handler); return handler; } private ReportingCollector mReportingCollector; private String mServerUrl; private CountDownLatch mHangingUrlLatch; } private class RequestResponder { void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { encoder().writeHeaders(ctx, streamId, createResponseHeadersFromRequestHeaders(headers), 0, endOfStream, ctx.newPromise()); ctx.flush(); } int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) { int processed = data.readableBytes() + padding; encoder().writeData(ctx, streamId, data.retain(), 0, true, ctx.newPromise()); ctx.flush(); return processed; } void sendResponseString(ChannelHandlerContext ctx, int streamId, String responseString) { ByteBuf content = ctx.alloc().buffer(); ByteBufUtil.writeAscii(content, responseString); encoder().writeHeaders( ctx, streamId, createDefaultResponseHeaders(), 0, false, ctx.newPromise()); encoder().writeData(ctx, streamId, content, 0, true, ctx.newPromise()); ctx.flush(); } } private class EchoStreamResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { // Send a frame for the response headers. encoder().writeHeaders(ctx, streamId, createResponseHeadersFromRequestHeaders(headers), 0, endOfStream, ctx.newPromise()); ctx.flush(); } @Override int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) { int processed = data.readableBytes() + padding; encoder().writeData(ctx, streamId, data.retain(), 0, endOfStream, ctx.newPromise()); ctx.flush(); return processed; } } private class CombinedHeadersResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { ByteBuf content = ctx.alloc().buffer(); ByteBufUtil.writeAscii(content, "GET"); Http2Headers responseHeaders = new DefaultHttp2Headers().status(OK.codeAsText()); // Upon receiving, the following two headers will be jointed by '\0'. responseHeaders.add("foo", "bar"); responseHeaders.add("foo", "bar2"); encoder().writeHeaders(ctx, streamId, responseHeaders, 0, false, ctx.newPromise()); encoder().writeData(ctx, streamId, content, 0, true, ctx.newPromise()); ctx.flush(); } } private class HangingRequestResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { try { mHangingUrlLatch.await(); } catch (InterruptedException e) { } } } private class EchoHeaderResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { String[] splitPath = headers.path().toString().split("\\?"); if (splitPath.length <= 1) { sendResponseString(ctx, streamId, "Header name not found."); return; } String headerName = splitPath[1].toLowerCase(Locale.US); if (headers.get(headerName) == null) { sendResponseString(ctx, streamId, "Header not found:" + headerName); return; } sendResponseString(ctx, streamId, headers.get(headerName).toString()); } } private class EchoAllHeadersResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { StringBuilder response = new StringBuilder(); for (Map.Entry<CharSequence, CharSequence> header : headers) { response.append(header.getKey() + ": " + header.getValue() + "\r\n"); } sendResponseString(ctx, streamId, response.toString()); } } private class EchoMethodResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { sendResponseString(ctx, streamId, headers.method().toString()); } } private class EchoTrailersResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { encoder().writeHeaders( ctx, streamId, createDefaultResponseHeaders(), 0, false, ctx.newPromise()); encoder().writeData( ctx, streamId, RESPONSE_BYTES.duplicate(), 0, false, ctx.newPromise()); Http2Headers responseTrailers = createResponseHeadersFromRequestHeaders(headers).add( "trailer", "value1", "Value2"); encoder().writeHeaders(ctx, streamId, responseTrailers, 0, true, ctx.newPromise()); ctx.flush(); } } // A RequestResponder that serves a simple Brotli-encoded response. private class ServeSimpleBrotliResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { Http2Headers responseHeaders = new DefaultHttp2Headers().status(OK.codeAsText()); byte[] quickfoxCompressed = {0x0b, 0x15, -0x80, 0x54, 0x68, 0x65, 0x20, 0x71, 0x75, 0x69, 0x63, 0x6b, 0x20, 0x62, 0x72, 0x6f, 0x77, 0x6e, 0x20, 0x66, 0x6f, 0x78, 0x20, 0x6a, 0x75, 0x6d, 0x70, 0x73, 0x20, 0x6f, 0x76, 0x65, 0x72, 0x20, 0x74, 0x68, 0x65, 0x20, 0x6c, 0x61, 0x7a, 0x79, 0x20, 0x64, 0x6f, 0x67, 0x03}; ByteBuf content = copiedBuffer(quickfoxCompressed); responseHeaders.add("content-encoding", "br"); encoder().writeHeaders(ctx, streamId, responseHeaders, 0, false, ctx.newPromise()); encoder().writeData(ctx, streamId, content, 0, true, ctx.newPromise()); ctx.flush(); } } // A RequestResponder that implements a Reporting collector. private class ReportingCollectorResponder extends RequestResponder { private ByteArrayOutputStream mPartialPayload = new ByteArrayOutputStream(); @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) {} @Override int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) { int processed = data.readableBytes() + padding; try { data.readBytes(mPartialPayload, data.readableBytes()); } catch (IOException e) { } if (endOfStream) { processPayload(ctx, streamId); } return processed; } private void processPayload(ChannelHandlerContext ctx, int streamId) { boolean succeeded = false; try { String payload = mPartialPayload.toString(CharsetUtil.UTF_8.name()); succeeded = mReportingCollector.addReports(payload); } catch (UnsupportedEncodingException e) { } Http2Headers responseHeaders; if (succeeded) { responseHeaders = new DefaultHttp2Headers().status(OK.codeAsText()); } else { responseHeaders = new DefaultHttp2Headers().status(BAD_REQUEST.codeAsText()); } encoder().writeHeaders(ctx, streamId, responseHeaders, 0, true, ctx.newPromise()); ctx.flush(); } } // A RequestResponder that serves a successful response with Reporting and NEL headers private class SuccessWithNELHeadersResponder extends RequestResponder { @Override void onHeadersRead(ChannelHandlerContext ctx, int streamId, boolean endOfStream, Http2Headers headers) { Http2Headers responseHeaders = new DefaultHttp2Headers().status(OK.codeAsText()); responseHeaders.add("report-to", getReportToHeader()); responseHeaders.add("nel", getNELHeader()); encoder().writeHeaders(ctx, streamId, responseHeaders, 0, true, ctx.newPromise()); ctx.flush(); } @Override int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) { int processed = data.readableBytes() + padding; return processed; } private String getReportToHeader() { return String.format("{\"group\": \"nel\", \"max_age\": 86400, " + "\"endpoints\": [{\"url\": \"%s%s\"}]}", mServerUrl, REPORTING_COLLECTOR_PATH); } private String getNELHeader() { return "{\"report_to\": \"nel\", \"max_age\": 86400, \"success_fraction\": 1.0}"; } } private static Http2Headers createDefaultResponseHeaders() { return new DefaultHttp2Headers().status(OK.codeAsText()); } private static Http2Headers createResponseHeadersFromRequestHeaders( Http2Headers requestHeaders) { // Create response headers by echoing request headers. Http2Headers responseHeaders = new DefaultHttp2Headers().status(OK.codeAsText()); for (Map.Entry<CharSequence, CharSequence> header : requestHeaders) { if (!header.getKey().toString().startsWith(":")) { responseHeaders.add("echo-" + header.getKey(), header.getValue()); } } responseHeaders.add("echo-method", requestHeaders.get(":method").toString()); return responseHeaders; } private Http2TestHandler(Http2ConnectionDecoder decoder, Http2ConnectionEncoder encoder, Http2Settings initialSettings, ReportingCollector reportingCollector, String serverUrl, CountDownLatch hangingUrlLatch) { super(decoder, encoder, initialSettings); mReportingCollector = reportingCollector; mServerUrl = serverUrl; mHangingUrlLatch = hangingUrlLatch; } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { super.exceptionCaught(ctx, cause); Log.e(TAG, "An exception was caught", cause); ctx.close(); throw new Exception("Exception Caught", cause); } @Override public int onDataRead(ChannelHandlerContext ctx, int streamId, ByteBuf data, int padding, boolean endOfStream) throws Http2Exception { RequestResponder responder = mResponderMap.get(streamId); if (endOfStream) { mResponderMap.remove(streamId); } return responder.onDataRead(ctx, streamId, data, padding, endOfStream); } @Override public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int padding, boolean endOfStream) throws Http2Exception { String path = headers.path().toString(); RequestResponder responder; if (path.startsWith(ECHO_STREAM_PATH)) { responder = new EchoStreamResponder(); } else if (path.startsWith(ECHO_TRAILERS_PATH)) { responder = new EchoTrailersResponder(); } else if (path.startsWith(ECHO_ALL_HEADERS_PATH)) { responder = new EchoAllHeadersResponder(); } else if (path.startsWith(ECHO_HEADER_PATH)) { responder = new EchoHeaderResponder(); } else if (path.startsWith(ECHO_METHOD_PATH)) { responder = new EchoMethodResponder(); } else if (path.startsWith(SERVE_SIMPLE_BROTLI_RESPONSE)) { responder = new ServeSimpleBrotliResponder(); } else if (path.startsWith(REPORTING_COLLECTOR_PATH)) { responder = new ReportingCollectorResponder(); } else if (path.startsWith(SUCCESS_WITH_NEL_HEADERS_PATH)) { responder = new SuccessWithNELHeadersResponder(); } else if (path.startsWith(COMBINED_HEADERS_PATH)) { responder = new CombinedHeadersResponder(); } else if (path.startsWith(HANGING_REQUEST_PATH)) { responder = new HangingRequestResponder(); } else { responder = new RequestResponder(); } responder.onHeadersRead(ctx, streamId, endOfStream, headers); if (!endOfStream) { mResponderMap.put(streamId, responder); } } @Override public void onHeadersRead(ChannelHandlerContext ctx, int streamId, Http2Headers headers, int streamDependency, short weight, boolean exclusive, int padding, boolean endOfStream) throws Http2Exception { onHeadersRead(ctx, streamId, headers, padding, endOfStream); } @Override public void onPriorityRead(ChannelHandlerContext ctx, int streamId, int streamDependency, short weight, boolean exclusive) throws Http2Exception {} @Override public void onRstStreamRead(ChannelHandlerContext ctx, int streamId, long errorCode) throws Http2Exception {} @Override public void onSettingsAckRead(ChannelHandlerContext ctx) throws Http2Exception {} @Override public void onSettingsRead(ChannelHandlerContext ctx, Http2Settings settings) throws Http2Exception {} @Override public void onPingRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception {} @Override public void onPingAckRead(ChannelHandlerContext ctx, ByteBuf data) throws Http2Exception {} @Override public void onPushPromiseRead(ChannelHandlerContext ctx, int streamId, int promisedStreamId, Http2Headers headers, int padding) throws Http2Exception {} @Override public void onGoAwayRead(ChannelHandlerContext ctx, int lastStreamId, long errorCode, ByteBuf debugData) throws Http2Exception {} @Override public void onWindowUpdateRead(ChannelHandlerContext ctx, int streamId, int windowSizeIncrement) throws Http2Exception {} @Override public void onUnknownFrame(ChannelHandlerContext ctx, byte frameType, int streamId, Http2Flags flags, ByteBuf payload) throws Http2Exception {} }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/featurestore_service.proto package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Details of operations that perform create EntityType. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata} */ public final class CreateEntityTypeOperationMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) CreateEntityTypeOperationMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use CreateEntityTypeOperationMetadata.newBuilder() to construct. private CreateEntityTypeOperationMetadata( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateEntityTypeOperationMetadata() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateEntityTypeOperationMetadata(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private CreateEntityTypeOperationMetadata( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder subBuilder = null; if (genericMetadata_ != null) { subBuilder = genericMetadata_.toBuilder(); } genericMetadata_ = input.readMessage( com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(genericMetadata_); genericMetadata_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateEntityTypeOperationMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateEntityTypeOperationMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata.class, com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata.Builder.class); } public static final int GENERIC_METADATA_FIELD_NUMBER = 1; private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_; /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> * * @return Whether the genericMetadata field is set. */ @java.lang.Override public boolean hasGenericMetadata() { return genericMetadata_ != null; } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> * * @return The genericMetadata. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() { return genericMetadata_ == null ? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance() : genericMetadata_; } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder getGenericMetadataOrBuilder() { return getGenericMetadata(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (genericMetadata_ != null) { output.writeMessage(1, getGenericMetadata()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (genericMetadata_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getGenericMetadata()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata other = (com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) obj; if (hasGenericMetadata() != other.hasGenericMetadata()) return false; if (hasGenericMetadata()) { if (!getGenericMetadata().equals(other.getGenericMetadata())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasGenericMetadata()) { hash = (37 * hash) + GENERIC_METADATA_FIELD_NUMBER; hash = (53 * hash) + getGenericMetadata().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Details of operations that perform create EntityType. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateEntityTypeOperationMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateEntityTypeOperationMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata.class, com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata.Builder.class); } // Construct using // com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (genericMetadataBuilder_ == null) { genericMetadata_ = null; } else { genericMetadata_ = null; genericMetadataBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.FeaturestoreServiceProto .internal_static_google_cloud_aiplatform_v1beta1_CreateEntityTypeOperationMetadata_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata .getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata build() { com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata buildPartial() { com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata result = new com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata(this); if (genericMetadataBuilder_ == null) { result.genericMetadata_ = genericMetadata_; } else { result.genericMetadata_ = genericMetadataBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) { return mergeFrom( (com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata other) { if (other == com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata .getDefaultInstance()) return this; if (other.hasGenericMetadata()) { mergeGenericMetadata(other.getGenericMetadata()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata genericMetadata_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata, com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder, com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder> genericMetadataBuilder_; /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> * * @return Whether the genericMetadata field is set. */ public boolean hasGenericMetadata() { return genericMetadataBuilder_ != null || genericMetadata_ != null; } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> * * @return The genericMetadata. */ public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata getGenericMetadata() { if (genericMetadataBuilder_ == null) { return genericMetadata_ == null ? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance() : genericMetadata_; } else { return genericMetadataBuilder_.getMessage(); } } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder setGenericMetadata( com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata value) { if (genericMetadataBuilder_ == null) { if (value == null) { throw new NullPointerException(); } genericMetadata_ = value; onChanged(); } else { genericMetadataBuilder_.setMessage(value); } return this; } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder setGenericMetadata( com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder builderForValue) { if (genericMetadataBuilder_ == null) { genericMetadata_ = builderForValue.build(); onChanged(); } else { genericMetadataBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder mergeGenericMetadata( com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata value) { if (genericMetadataBuilder_ == null) { if (genericMetadata_ != null) { genericMetadata_ = com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.newBuilder( genericMetadata_) .mergeFrom(value) .buildPartial(); } else { genericMetadata_ = value; } onChanged(); } else { genericMetadataBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ public Builder clearGenericMetadata() { if (genericMetadataBuilder_ == null) { genericMetadata_ = null; onChanged(); } else { genericMetadata_ = null; genericMetadataBuilder_ = null; } return this; } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder getGenericMetadataBuilder() { onChanged(); return getGenericMetadataFieldBuilder().getBuilder(); } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ public com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder getGenericMetadataOrBuilder() { if (genericMetadataBuilder_ != null) { return genericMetadataBuilder_.getMessageOrBuilder(); } else { return genericMetadata_ == null ? com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.getDefaultInstance() : genericMetadata_; } } /** * * * <pre> * Operation metadata for EntityType. * </pre> * * <code>.google.cloud.aiplatform.v1beta1.GenericOperationMetadata generic_metadata = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata, com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder, com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder> getGenericMetadataFieldBuilder() { if (genericMetadataBuilder_ == null) { genericMetadataBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata, com.google.cloud.aiplatform.v1beta1.GenericOperationMetadata.Builder, com.google.cloud.aiplatform.v1beta1.GenericOperationMetadataOrBuilder>( getGenericMetadata(), getParentForChildren(), isClean()); genericMetadata_ = null; } return genericMetadataBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata) private static final com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata(); } public static com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateEntityTypeOperationMetadata> PARSER = new com.google.protobuf.AbstractParser<CreateEntityTypeOperationMetadata>() { @java.lang.Override public CreateEntityTypeOperationMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new CreateEntityTypeOperationMetadata(input, extensionRegistry); } }; public static com.google.protobuf.Parser<CreateEntityTypeOperationMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateEntityTypeOperationMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CreateEntityTypeOperationMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright (C) 2013 Jongha Kim * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.wisedog.android.whooing.ui; import java.net.MalformedURLException; import java.net.URL; import org.json.JSONArray; import org.json.JSONException; import org.json.JSONObject; import net.wisedog.android.whooing.Define; import net.wisedog.android.whooing.R; import net.wisedog.android.whooing.activity.BbsWriteFragment; import net.wisedog.android.whooing.activity.MainFragmentActivity; import net.wisedog.android.whooing.network.ThreadRestAPI; import net.wisedog.android.whooing.network.ThreadThumbnailLoader; import net.wisedog.android.whooing.utils.DateUtil; import android.annotation.SuppressLint; import android.app.AlertDialog; import android.app.Fragment; import android.app.ProgressDialog; import android.content.Context; import android.content.DialogInterface; import android.graphics.Bitmap; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.EditText; import android.widget.ImageButton; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.TextView; import android.widget.Toast; public class BbsReplyEntity extends LinearLayout { private Context mContext; private int mBbsId; /** Board type(free, counseling ...) */ private int mBoardType; /** This is reply ID. Reply is Comment in Whooing */ private String mCommentId; private ProgressDialog mProgress; public BbsReplyEntity(Context context) { super(context); mContext = context; } /** * @param activity * @param bbsArticleFragment * @param id * @param mBoardType */ public BbsReplyEntity(Context context, Fragment bbsArticleFragment, int bbs_id, int boardType) { super(context); mContext = context; mBbsId = bbs_id; mBoardType = boardType; } public void setupReply(final JSONObject obj, final JSONObject objResult) throws JSONException{ inflate(mContext, R.layout.bbs_article_chunk, this); if(obj == null){ //objResult may be null, but it's acceptance return; } mCommentId = obj.getString("comment_id"); JSONObject objWriter = obj.getJSONObject("writer"); ImageView profileImage = (ImageView)findViewById(R.id.bbs_article_chunk_img); String profileUrl = objWriter.getString("image_url"); if(profileImage != null){ URL url = null; try { url = new URL(profileUrl); } catch (MalformedURLException e) { profileImage.setImageResource(R.drawable.profile_anonymous); e.printStackTrace(); Toast.makeText(mContext, "Error - Reply-04", Toast.LENGTH_LONG).show(); return; } ThreadThumbnailLoader thread = new ThreadThumbnailLoader(mHandler, url); try{ thread.start(); } catch(IllegalThreadStateException e){ profileImage.setImageResource(R.drawable.profile_anonymous); } } TextView textName = (TextView)findViewById(R.id.bbs_article_chunk_name); if(textName != null){ textName.setText(objWriter.getString("username")); } TextView textLabel = (TextView)findViewById(R.id.bbs_article_chunk_level); if(textLabel != null){ textLabel.setText(String.valueOf(objWriter.getInt("level"))); } TextView textDate = (TextView)findViewById(R.id.bbs_article_chunk_date); if(textDate != null){ String dateString = DateUtil.getDateWithTimestamp(obj.getLong("timestamp") * 1000); textDate.setText(dateString); } final TextView textContents = (TextView)findViewById(R.id.bbs_article_chunk_contents); if(textContents != null){ textContents.setText(obj.getString("contents")); } TextView textComments = (TextView)findViewById(R.id.bbs_article_chunk_comment_num); if(textComments != null){ textComments.setText(obj.getInt("additions") + " comments"); } if(Define.USER_ID == objWriter.getInt("user_id")){ ImageButton delImg = (ImageButton)findViewById(R.id.bbs_article_chunk_delete); if(delImg != null){ delImg.setVisibility(View.VISIBLE); delImg.setOnClickListener(new OnClickListener(){ @Override public void onClick(View v) { AlertDialog.Builder alertDialogBuilder = new AlertDialog.Builder(mContext); alertDialogBuilder.setTitle(mContext.getString(R.string.bbs_delete_alert_title)); alertDialogBuilder.setMessage(mContext.getString(R.string.bbs_delete_alert_message)) .setCancelable(true) .setPositiveButton(mContext.getString(R.string.text_yes), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { Bundle b = new Bundle(); b.putInt("board_type", mBoardType); b.putInt("bbs_id", mBbsId); try { b.putString("comment_id", obj.getString("comment_id")); } catch (JSONException e) { e.printStackTrace(); Toast.makeText(mContext, "Error - Reply-03", Toast.LENGTH_LONG).show(); return; } mProgress = ProgressDialog.show(mContext, "", mContext.getString(R.string.text_deleting)); ThreadRestAPI thread = new ThreadRestAPI(mHandler, Define.API_DELETE_BOARD_REPLY, b); thread.start(); } }) .setNegativeButton(mContext.getString(R.string.text_no), new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); AlertDialog alertDialog = alertDialogBuilder.create(); alertDialog.show(); } }); } ImageButton modifyImg = (ImageButton)findViewById(R.id.bbs_article_chunk_modify); if(modifyImg != null){ modifyImg.setVisibility(View.VISIBLE); modifyImg.setOnClickListener(new OnClickListener(){ @SuppressLint("NewApi") @Override public void onClick(View v) { MainFragmentActivity activity = (MainFragmentActivity)mContext; try { activity.addBbsWriteFragment(BbsWriteFragment.MODE_MODIFY_REPLY, null, textContents.getText().toString(), mBbsId, obj.getString("comment_id"), 0); } catch (JSONException e) { e.printStackTrace(); return; } } }); } } Button confirmBtn = (Button) findViewById(R.id.bbs_article_chunk_comment_confirm_btn); confirmBtn.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { setLoadingStatus(true); EditText editText = (EditText)findViewById(R.id.bbs_article_chunk_comment_edittext); Bundle b = new Bundle(); try { b.putString("category", objResult.getString("category")); b.putInt("bbs_id", objResult.getInt("bbs_id")); b.putString("comment_id", obj.getString("comment_id")); } catch (JSONException e) { Toast.makeText(mContext, "Error - Reply-01", Toast.LENGTH_LONG).show(); e.printStackTrace(); return; } b.putString("contents", editText.getText().toString()); ThreadRestAPI thread = new ThreadRestAPI(mHandler, Define.API_POST_BOARD_COMMENT, b); thread.start(); } }); if(obj.getInt("additions") > 0){ Bundle b = new Bundle(); try { b.putString("category", objResult.getString("category")); b.putInt("bbs_id", objResult.getInt("bbs_id")); b.putString("comment_id", obj.getString("comment_id")); } catch (JSONException e) { e.printStackTrace(); return; } ThreadRestAPI thread = new ThreadRestAPI(mHandler, Define.API_GET_BOARD_COMMENT, b); thread.start(); } } public void setLoadingStatus(boolean loading){ Button confirmBtn = (Button)findViewById(R.id.bbs_article_chunk_comment_confirm_btn); confirmBtn.setEnabled(!loading); EditText editText = (EditText)findViewById(R.id.bbs_article_chunk_comment_edittext); editText.setEnabled(!loading); ProgressBar progress = (ProgressBar)findViewById(R.id.bbs_article_chunk_comment_progress); if(loading){ progress.setVisibility(View.VISIBLE); }else{ progress.setVisibility(View.INVISIBLE); } } protected Handler mHandler = new Handler() { @Override public void handleMessage(Message msg) { if (msg.what == Define.MSG_API_OK) { JSONObject obj = (JSONObject) msg.obj; if (msg.arg1 == Define.API_POST_BOARD_COMMENT) { setLoadingStatus(false); LinearLayout ll = (LinearLayout) findViewById(R.id.bbs_article_chunk_comment_container); BbsCommentEntity entity = new BbsCommentEntity(mContext, mBoardType, mBbsId, mCommentId); try { entity.setup(obj.getJSONObject("results")); ll.addView(entity, 0); } catch (JSONException e) { e.printStackTrace(); Toast.makeText(mContext, "Error - Reply-02", Toast.LENGTH_LONG).show(); } EditText editText = (EditText)findViewById(R.id.bbs_article_chunk_comment_edittext); if(editText != null){ editText.setText(""); } } else if (msg.arg1 == Define.API_GET_BOARD_COMMENT) { LinearLayout ll = (LinearLayout) findViewById(R.id.bbs_article_chunk_comment_container); try { JSONObject objResults = obj.getJSONObject("results"); JSONArray commentArray = objResults.getJSONArray("rows"); int len = commentArray.length(); for (int i = 0; i < len; i++) { BbsCommentEntity entity = new BbsCommentEntity(mContext, mBoardType, mBbsId, mCommentId); entity.setup(commentArray.getJSONObject(i)); ll.addView(entity); } } catch (JSONException e) { e.printStackTrace(); } } else if(msg.arg1 == Define.API_DELETE_BOARD_REPLY){ if(Define.DEBUG){ Log.d("wisedog", "API_DELETE_BOARD_REPLY : " + obj.toString()); } mProgress.dismiss(); } } else if(msg.what == 0){ ImageView image = (ImageView)findViewById(R.id.bbs_article_chunk_img); if(msg.obj == null){ image.setImageResource(R.drawable.profile_anonymous); } else{ image.setImageBitmap((Bitmap)msg.obj); } } super.handleMessage(msg); } }; }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.examples.complete.game; import static org.hamcrest.Matchers.hasItem; import static org.junit.Assert.assertThat; import com.google.common.collect.ImmutableMap; import java.io.Serializable; import org.apache.beam.examples.complete.game.LeaderBoard.CalculateTeamScores; import org.apache.beam.examples.complete.game.LeaderBoard.CalculateUserScores; import org.apache.beam.examples.complete.game.UserScore.GameActionInfo; import org.apache.beam.sdk.coders.AvroCoder; import org.apache.beam.sdk.options.PipelineOptionsFactory; import org.apache.beam.sdk.testing.PAssert; import org.apache.beam.sdk.testing.TestPipeline; import org.apache.beam.sdk.testing.TestStream; import org.apache.beam.sdk.transforms.PTransform; import org.apache.beam.sdk.transforms.SerializableFunction; import org.apache.beam.sdk.transforms.windowing.BoundedWindow; import org.apache.beam.sdk.transforms.windowing.GlobalWindow; import org.apache.beam.sdk.transforms.windowing.IntervalWindow; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; import org.joda.time.Duration; import org.joda.time.Instant; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Tests for {@link LeaderBoard}. */ @RunWith(JUnit4.class) public class LeaderBoardTest implements Serializable { private static final Duration ALLOWED_LATENESS = Duration.standardHours(1); private static final Duration TEAM_WINDOW_DURATION = Duration.standardMinutes(20); private Instant baseTime = new Instant(0); @Rule public TestPipeline p = TestPipeline.create(); /** * Some example users, on two separate teams. */ private enum TestUser { RED_ONE("scarlet", "red"), RED_TWO("burgundy", "red"), BLUE_ONE("navy", "blue"), BLUE_TWO("sky", "blue"); private final String userName; private final String teamName; TestUser(String userName, String teamName) { this.userName = userName; this.teamName = teamName; } public String getUser() { return userName; } public String getTeam() { return teamName; } } /** * A test of the {@link CalculateTeamScores} {@link PTransform} when all of the elements arrive * on time (ahead of the watermark). */ @Test public void testTeamScoresOnTime() { TestStream<GameActionInfo> createEvents = TestStream.create(AvroCoder.of(GameActionInfo.class)) // Start at the epoch .advanceWatermarkTo(baseTime) // add some elements ahead of the watermark .addElements(event(TestUser.BLUE_ONE, 3, Duration.standardSeconds(3)), event(TestUser.BLUE_ONE, 2, Duration.standardMinutes(1)), event(TestUser.RED_TWO, 3, Duration.standardSeconds(22)), event(TestUser.BLUE_TWO, 5, Duration.standardMinutes(3))) // The watermark advances slightly, but not past the end of the window .advanceWatermarkTo(baseTime.plus(Duration.standardMinutes(3))) // Add some more on time elements .addElements(event(TestUser.RED_ONE, 1, Duration.standardMinutes(4)), event(TestUser.BLUE_ONE, 2, Duration.standardSeconds(270))) // The window should close and emit an ON_TIME pane .advanceWatermarkToInfinity(); PCollection<KV<String, Integer>> teamScores = p.apply(createEvents) .apply(new CalculateTeamScores(TEAM_WINDOW_DURATION, ALLOWED_LATENESS)); String blueTeam = TestUser.BLUE_ONE.getTeam(); String redTeam = TestUser.RED_ONE.getTeam(); PAssert.that(teamScores) .inOnTimePane(new IntervalWindow(baseTime, TEAM_WINDOW_DURATION)) .containsInAnyOrder(KV.of(blueTeam, 12), KV.of(redTeam, 4)); p.run().waitUntilFinish(); } /** * A test of the {@link CalculateTeamScores} {@link PTransform} when all of the elements arrive * on time, and the processing time advances far enough for speculative panes. */ @Test public void testTeamScoresSpeculative() { TestStream<GameActionInfo> createEvents = TestStream.create(AvroCoder.of(GameActionInfo.class)) // Start at the epoch .advanceWatermarkTo(baseTime) .addElements(event(TestUser.BLUE_ONE, 3, Duration.standardSeconds(3)), event(TestUser.BLUE_ONE, 2, Duration.standardMinutes(1))) // Some time passes within the runner, which causes a speculative pane containing the blue // team's score to be emitted .advanceProcessingTime(Duration.standardMinutes(10)) .addElements(event(TestUser.RED_TWO, 5, Duration.standardMinutes(3))) // Some additional time passes and we get a speculative pane for the red team .advanceProcessingTime(Duration.standardMinutes(12)) .addElements(event(TestUser.BLUE_TWO, 3, Duration.standardSeconds(22))) // More time passes and a speculative pane containing a refined value for the blue pane is // emitted .advanceProcessingTime(Duration.standardMinutes(10)) // Some more events occur .addElements(event(TestUser.RED_ONE, 4, Duration.standardMinutes(4)), event(TestUser.BLUE_TWO, 2, Duration.standardMinutes(2))) // The window closes and we get an ON_TIME pane that contains all of the updates .advanceWatermarkToInfinity(); PCollection<KV<String, Integer>> teamScores = p.apply(createEvents) .apply(new CalculateTeamScores(TEAM_WINDOW_DURATION, ALLOWED_LATENESS)); String blueTeam = TestUser.BLUE_ONE.getTeam(); String redTeam = TestUser.RED_ONE.getTeam(); IntervalWindow window = new IntervalWindow(baseTime, TEAM_WINDOW_DURATION); // The window contains speculative panes alongside the on-time pane PAssert.that(teamScores) .inWindow(window) .containsInAnyOrder(KV.of(blueTeam, 10) /* The on-time blue pane */, KV.of(redTeam, 9) /* The on-time red pane */, KV.of(blueTeam, 5) /* The first blue speculative pane */, KV.of(blueTeam, 8) /* The second blue speculative pane */, KV.of(redTeam, 5) /* The red speculative pane */); PAssert.that(teamScores) .inOnTimePane(window) .containsInAnyOrder(KV.of(blueTeam, 10), KV.of(redTeam, 9)); p.run().waitUntilFinish(); } /** * A test where elements arrive behind the watermark (late data), but before the end of the * window. These elements are emitted on time. */ @Test public void testTeamScoresUnobservablyLate() { BoundedWindow window = new IntervalWindow(baseTime, TEAM_WINDOW_DURATION); TestStream<GameActionInfo> createEvents = TestStream.create(AvroCoder.of(GameActionInfo.class)) .advanceWatermarkTo(baseTime) .addElements(event(TestUser.BLUE_ONE, 3, Duration.standardSeconds(3)), event(TestUser.BLUE_TWO, 5, Duration.standardMinutes(8)), event(TestUser.RED_ONE, 4, Duration.standardMinutes(2)), event(TestUser.BLUE_ONE, 3, Duration.standardMinutes(5))) .advanceWatermarkTo(baseTime.plus(TEAM_WINDOW_DURATION).minus(Duration.standardMinutes(1))) // These events are late, but the window hasn't closed yet, so the elements are in the // on-time pane .addElements(event(TestUser.RED_TWO, 2, Duration.ZERO), event(TestUser.RED_TWO, 5, Duration.standardMinutes(1)), event(TestUser.BLUE_TWO, 2, Duration.standardSeconds(90)), event(TestUser.RED_TWO, 3, Duration.standardMinutes(3))) .advanceWatermarkTo(baseTime.plus(TEAM_WINDOW_DURATION).plus(Duration.standardMinutes(1))) .advanceWatermarkToInfinity(); PCollection<KV<String, Integer>> teamScores = p.apply(createEvents) .apply(new CalculateTeamScores(TEAM_WINDOW_DURATION, ALLOWED_LATENESS)); String blueTeam = TestUser.BLUE_ONE.getTeam(); String redTeam = TestUser.RED_ONE.getTeam(); // The On Time pane contains the late elements that arrived before the end of the window PAssert.that(teamScores) .inOnTimePane(window) .containsInAnyOrder(KV.of(redTeam, 14), KV.of(blueTeam, 13)); p.run().waitUntilFinish(); } /** * A test where elements arrive behind the watermark (late data) after the watermark passes the * end of the window, but before the maximum allowed lateness. These elements are emitted in a * late pane. */ @Test public void testTeamScoresObservablyLate() { Instant firstWindowCloses = baseTime.plus(ALLOWED_LATENESS).plus(TEAM_WINDOW_DURATION); TestStream<GameActionInfo> createEvents = TestStream.create(AvroCoder.of(GameActionInfo.class)) .advanceWatermarkTo(baseTime) .addElements(event(TestUser.BLUE_ONE, 3, Duration.standardSeconds(3)), event(TestUser.BLUE_TWO, 5, Duration.standardMinutes(8))) .advanceProcessingTime(Duration.standardMinutes(10)) .advanceWatermarkTo(baseTime.plus(Duration.standardMinutes(3))) .addElements(event(TestUser.RED_ONE, 3, Duration.standardMinutes(1)), event(TestUser.RED_ONE, 4, Duration.standardMinutes(2)), event(TestUser.BLUE_ONE, 3, Duration.standardMinutes(5))) .advanceWatermarkTo(firstWindowCloses.minus(Duration.standardMinutes(1))) // These events are late but should still appear in a late pane .addElements(event(TestUser.RED_TWO, 2, Duration.ZERO), event(TestUser.RED_TWO, 5, Duration.standardMinutes(1)), event(TestUser.RED_TWO, 3, Duration.standardMinutes(3))) // A late refinement is emitted due to the advance in processing time, but the window has // not yet closed because the watermark has not advanced .advanceProcessingTime(Duration.standardMinutes(12)) // These elements should appear in the final pane .addElements(event(TestUser.RED_TWO, 9, Duration.standardMinutes(1)), event(TestUser.RED_TWO, 1, Duration.standardMinutes(3))) .advanceWatermarkToInfinity(); PCollection<KV<String, Integer>> teamScores = p.apply(createEvents) .apply(new CalculateTeamScores(TEAM_WINDOW_DURATION, ALLOWED_LATENESS)); BoundedWindow window = new IntervalWindow(baseTime, TEAM_WINDOW_DURATION); String blueTeam = TestUser.BLUE_ONE.getTeam(); String redTeam = TestUser.RED_ONE.getTeam(); PAssert.that(teamScores) .inWindow(window) .satisfies((SerializableFunction<Iterable<KV<String, Integer>>, Void>) input -> { // The final sums need not exist in the same pane, but must appear in the output // PCollection assertThat(input, hasItem(KV.of(blueTeam, 11))); assertThat(input, hasItem(KV.of(redTeam, 27))); return null; }); PAssert.thatMap(teamScores) // The closing behavior of CalculateTeamScores precludes an inFinalPane matcher .inOnTimePane(window) .isEqualTo(ImmutableMap.<String, Integer>builder().put(redTeam, 7) .put(blueTeam, 11) .build()); // No final pane is emitted for the blue team, as all of their updates have been taken into // account in earlier panes PAssert.that(teamScores).inFinalPane(window).containsInAnyOrder(KV.of(redTeam, 27)); p.run().waitUntilFinish(); } /** * A test where elements arrive beyond the maximum allowed lateness. These elements are dropped * within {@link CalculateTeamScores} and do not impact the final result. */ @Test public void testTeamScoresDroppablyLate() { BoundedWindow window = new IntervalWindow(baseTime, TEAM_WINDOW_DURATION); TestStream<GameActionInfo> infos = TestStream.create(AvroCoder.of(GameActionInfo.class)) .addElements(event(TestUser.BLUE_ONE, 12, Duration.ZERO), event(TestUser.RED_ONE, 3, Duration.ZERO)) .advanceWatermarkTo(window.maxTimestamp()) .addElements(event(TestUser.RED_ONE, 4, Duration.standardMinutes(2)), event(TestUser.BLUE_TWO, 3, Duration.ZERO), event(TestUser.BLUE_ONE, 3, Duration.standardMinutes(3))) // Move the watermark to the end of the window to output on time .advanceWatermarkTo(baseTime.plus(TEAM_WINDOW_DURATION)) // Move the watermark past the end of the allowed lateness plus the end of the window .advanceWatermarkTo(baseTime.plus(ALLOWED_LATENESS) .plus(TEAM_WINDOW_DURATION).plus(Duration.standardMinutes(1))) // These elements within the expired window are droppably late, and will not appear in the // output .addElements( event(TestUser.BLUE_TWO, 3, TEAM_WINDOW_DURATION.minus(Duration.standardSeconds(5))), event(TestUser.RED_ONE, 7, Duration.standardMinutes(4))) .advanceWatermarkToInfinity(); PCollection<KV<String, Integer>> teamScores = p.apply(infos) .apply(new CalculateTeamScores(TEAM_WINDOW_DURATION, ALLOWED_LATENESS)); String blueTeam = TestUser.BLUE_ONE.getTeam(); String redTeam = TestUser.RED_ONE.getTeam(); // Only one on-time pane and no late panes should be emitted PAssert.that(teamScores) .inWindow(window) .containsInAnyOrder(KV.of(redTeam, 7), KV.of(blueTeam, 18)); // No elements are added before the watermark passes the end of the window plus the allowed // lateness, so no refinement should be emitted PAssert.that(teamScores).inFinalPane(window).empty(); p.run().waitUntilFinish(); } /** * A test where elements arrive both on-time and late in {@link CalculateUserScores}, which emits * output into the {@link GlobalWindow}. All elements that arrive should be taken into account, * even if they arrive later than the maximum allowed lateness. */ @Test public void testUserScore() { TestStream<GameActionInfo> infos = TestStream.create(AvroCoder.of(GameActionInfo.class)) .addElements( event(TestUser.BLUE_ONE, 12, Duration.ZERO), event(TestUser.RED_ONE, 3, Duration.ZERO)) .advanceProcessingTime(Duration.standardMinutes(7)) .addElements( event(TestUser.RED_ONE, 4, Duration.standardMinutes(2)), event(TestUser.BLUE_TWO, 3, Duration.ZERO), event(TestUser.BLUE_ONE, 3, Duration.standardMinutes(3))) .advanceProcessingTime(Duration.standardMinutes(5)) .advanceWatermarkTo(baseTime.plus(ALLOWED_LATENESS).plus(Duration.standardHours(12))) // Late elements are always observable within the global window - they arrive before // the window closes, so they will appear in a pane, even if they arrive after the // allowed lateness, and are taken into account alongside on-time elements .addElements( event(TestUser.RED_ONE, 3, Duration.standardMinutes(7)), event(TestUser.RED_ONE, 2, (ALLOWED_LATENESS).plus(Duration.standardHours(13)))) .advanceProcessingTime(Duration.standardMinutes(6)) .addElements(event(TestUser.BLUE_TWO, 5, Duration.standardMinutes(12))) .advanceProcessingTime(Duration.standardMinutes(20)) .advanceWatermarkToInfinity(); PCollection<KV<String, Integer>> userScores = p.apply(infos).apply(new CalculateUserScores(ALLOWED_LATENESS)); // User scores are emitted in speculative panes in the Global Window - this matcher choice // ensures that panes emitted by the watermark advancing to positive infinity are not included, // as that will not occur outside of tests PAssert.that(userScores) .inEarlyGlobalWindowPanes() .containsInAnyOrder(KV.of(TestUser.BLUE_ONE.getUser(), 15), KV.of(TestUser.RED_ONE.getUser(), 7), KV.of(TestUser.RED_ONE.getUser(), 12), KV.of(TestUser.BLUE_TWO.getUser(), 3), KV.of(TestUser.BLUE_TWO.getUser(), 8)); p.run().waitUntilFinish(); } @Test public void testLeaderBoardOptions() { PipelineOptionsFactory.as(LeaderBoard.Options.class); } private TimestampedValue<GameActionInfo> event( TestUser user, int score, Duration baseTimeOffset) { return TimestampedValue.of(new GameActionInfo(user.getUser(), user.getTeam(), score, baseTime.plus(baseTimeOffset).getMillis()), baseTime.plus(baseTimeOffset)); } }
package org.jgroups.tests; import org.jgroups.*; import org.jgroups.AnycastAddress; import org.jgroups.util.Util; import javax.management.*; import java.io.*; import java.lang.management.ManagementFactory; import java.util.*; /** * Runs the Total Order Anycast protocol and saves the messages delivered * * Note: this is used for debugging * Note2: this needs to be clean :) * * @author Pedro Ruivo * @since 3.1 */ public class TestToaOrder { private static final String PROPS = "toa.xml"; private static final String CLUSTER = "test-toa-cluster"; private static final String OUTPUT_FILE_SUFFIX = "-messages.txt"; private static final String JMX_DOMAIN = "org.jgroups"; private JChannel jChannel; private MyReceiver receiver; private int numberOfNodes; private int numberOfMessages; private final List<Address> members = new LinkedList<Address>(); private long start; private long stop; private long sentBytes = 0; private long sentMessages = 0; private String config; public static void main(String[] args) throws InterruptedException { System.out.println("=============="); System.out.println("Test TOA Order"); System.out.println("=============="); ArgumentsParser argumentsParser = new ArgumentsParser(args); if (argumentsParser.isHelp()) { helpAndExit(); } else if(argumentsParser.isTestOrder()) { /*String[] paths = argumentsParser.getFilesPath(); int numberOfFiles = paths.length; ProcessFile[] threads = new ProcessFile[numberOfFiles]; for (int i = 0; i < threads.length; ++i) { threads[i] = new ProcessFile(paths[i]); threads[i].start(); } Map<String, MessageInfo> allMessages = new HashMap<String, MessageInfo>(); for (ProcessFile processFile : threads) { processFile.join(); for (MessageInfo messageInfo : processFile.list) { String message = messageInfo.message; if (!allMessages.containsKey(message)) { allMessages.put(message, messageInfo); } else { allMessages.get(message).join(messageInfo); } } } for (MessageInfo messageInfo : allMessages.values()) { messageInfo.check(); } System.out.println("============= FINISHED ============="); System.exit(0);*/ } TestToaOrder test = new TestToaOrder( argumentsParser.getNumberOfNodes(), argumentsParser.getNumberOfMessages(), argumentsParser.getConfig()); try { test.startTest(); } catch (Exception e) { System.err.println("Error while executing the test: " + e.getMessage()); e.printStackTrace(); System.exit(1); } finally { test.closeJChannel(); System.out.println("============= FINISHED ============="); } System.exit(0); } private static void helpAndExit() { System.out.println("usage: " + TestToaOrder.class.getCanonicalName() + " <option>"); System.out.println("Options:"); System.out.println(" -h \tshow this message"); System.out.println(" -nr-nodes <value> \tnumber of nodes"); System.out.println(" -nr-messages <values> \tnumber of messages to send by each node"); System.out.println(" -config <file> \tthe JGroup's configuration file"); System.exit(1); } // ====================== arguments parser ====================== private static class ArgumentsParser { private static final int NR_NODES = 4; private static final int NR_MESSAGES = 1000; private String[] args; private int numberOfNodes = -1; private int numberOfMessages = -1; private boolean help = false; private boolean testOrder = false; private String[] filesPath = null; private String config = PROPS; public ArgumentsParser(String[] args) { this.args = args; parse(); checkConfig(); } private void parse() { try { for (int i = 0; i < args.length; ++i) { if ("-h".equals(args[i])) { help = true; } else if ("-nr-nodes".equals(args[i])) { numberOfNodes = Integer.parseInt(args[++i]); if (numberOfNodes < NR_NODES) { System.err.println("Number of nodes must be greater or equal to " + NR_NODES); System.exit(1); } } else if ("-nr-messages".equals(args[i])) { numberOfMessages = Integer.parseInt(args[++i]); if (numberOfMessages <= 0) { System.err.println("Number of messages must be greater than 0"); System.exit(1); } } else if ("-config".equals(args[i])) { config = args[++i]; } else { System.err.println("Unknown argument: " +args[i]); helpAndExit(); } } } catch (Throwable t) { System.err.println("Error processing arguments: " + t.getMessage()); t.printStackTrace(); System.exit(1); } } private void checkConfig() { if (numberOfNodes == -1) { numberOfNodes = NR_NODES; } if (numberOfMessages == -1) { numberOfMessages = NR_MESSAGES; } } public boolean isHelp() { return help; } public boolean isTestOrder() { return testOrder; } public int getNumberOfNodes() { return numberOfNodes; } public int getNumberOfMessages() { return numberOfMessages; } public String[] getFilesPath() { return filesPath; } public String getConfig() { return config; } } // ====================== receiver ====================== private static class MyReceiver extends ReceiverAdapter { private int expectedMembers; private int members = 0; private final List<String> messageList; private final TestToaOrder testGroupMulticastOrder; private long start = 0; private long stop = 0; private long receivedBytes = 0; private int receivedMsgs = 0; public MyReceiver(int expectedMembers, TestToaOrder testGroupMulticastOrder) { this.expectedMembers = expectedMembers; this.testGroupMulticastOrder = testGroupMulticastOrder; this.messageList = new LinkedList<String>(); } @Override public void receive(Message msg) { DataMessage dataMessage = (DataMessage) msg.getObject(); switch (dataMessage.type) { case DataMessage.FINISH: testGroupMulticastOrder.memberFinished(msg.getSrc()); break; case DataMessage.DATA: if (start == 0) { start = System.nanoTime(); } synchronized (messageList) { messageList.add(dataMessage.data); } receivedBytes += (dataMessage.data.getBytes().length + 1); receivedMsgs++; stop = System.nanoTime(); break; default: break; } } @Override public void viewAccepted(View view) { System.out.println("New View: " + view); super.viewAccepted(view); synchronized (this) { members = view.getMembers().size(); this.notify(); } } public synchronized void waitUntilClusterIsFormed() throws InterruptedException { while (members < expectedMembers) { System.out.println("Number of members is not the expected: " + members + " of " + expectedMembers); this.wait(); } } public void await(int expectedMessages) throws InterruptedException { int actualSize; while (true) { synchronized (messageList) { actualSize = messageList.size(); } if (actualSize < expectedMessages) { System.out.println("waiting messages... " + actualSize + " of " + expectedMessages); Thread.sleep(10000); } else { break; } } } public List<String> getMessageList() { return messageList; } public void printReceiverInfo() { System.out.println("+++ Receiver Information +++"); double duration = stop - start; duration /= 1000000.0; //nano to millis System.out.println("+ Duration (msec) = " + duration); System.out.println("+ Received Bytes = " + receivedBytes); System.out.println("+ Received Messages = " + receivedMsgs); duration /= 1000.0; //millis to sec System.out.println("---------------------"); System.out.println("+ Receiving Throughput (bytes/sec) = " + (receivedBytes / duration)); System.out.println("+ Receiving Messages (messages/sec) = " + (receivedMsgs / duration)); System.out.println("-------------------------------------"); } } // ====================== messages info (deliver before and after) ================ /*private static class MessageInfo { private String message; private Set<String> deliveredBefore = new HashSet<String>(); private Set<String> deliveredAfter = new HashSet<String>(); public void join(MessageInfo messageInfo) { this.deliveredAfter.addAll(messageInfo.deliveredAfter); this.deliveredBefore.addAll(messageInfo.deliveredBefore); } public void check() { Set<String> intersect = new HashSet<String>(deliveredBefore); intersect.retainAll(deliveredAfter); if (!intersect.isEmpty()) { System.err.println("ERROR: WRONG ORDER! messages " + intersect + " was delivered before and after this" + " message " + message); } } } private static class MessageInfo2 extends MessageInfo { private String message; private Set<MessageInfo2> deliveredBefore = new HashSet<MessageInfo2>(); @Override public void join(MessageInfo messageInfo) { deliveredBefore.addAll(((MessageInfo2)messageInfo).deliveredBefore); } @Override public void check() { for (MessageInfo2 messageInfo2 : deliveredBefore) { if (messageInfo2.deliveredBefore.contains(this)) { System.err.println("ERROR: WRONG ORDER! This message " + message + " was delivered before and after the" + " message " + messageInfo2.message); } } } }*/ //======================= thread processing each input file ===================== /*private static class ProcessFile extends Thread { private String filepath; public List<MessageInfo> list = new LinkedList<MessageInfo>(); private ProcessFile(String filepath) { super(); this.filepath = filepath; } @Override public void run() { runV2(); } public void runV1() { try { Set<String> previously = new HashSet<String>(); BufferedReader bufferedReader = new BufferedReader(new FileReader(filepath)); String message; while ((message = bufferedReader.readLine()) != null) { MessageInfo messageInfo = new MessageInfo(); messageInfo.message = message; messageInfo.deliveredBefore.addAll(previously); for (MessageInfo aux : list) { aux.deliveredAfter.add(message); } list.add(messageInfo); previously.add(message); } } catch (FileNotFoundException e) { e.printStackTrace(); // TODO: Customise this generated block } catch (IOException e) { e.printStackTrace(); // TODO: Customise this generated block } } public void runV2() { try { Set<MessageInfo2> previously = new HashSet<MessageInfo2>(); BufferedReader bufferedReader = new BufferedReader(new FileReader(filepath)); String message; while ((message = bufferedReader.readLine()) != null) { MessageInfo2 messageInfo = new MessageInfo2(); messageInfo.message = message; messageInfo.deliveredBefore.addAll(previously); list.add(messageInfo); previously.add(messageInfo); } } catch (FileNotFoundException e) { e.printStackTrace(); // TODO: Customise this generated block } catch (IOException e) { e.printStackTrace(); // TODO: Customise this generated block } } }*/ //======================= data message ======================= private static class DataMessage implements Serializable { public transient static final byte FINISH = 1; //1 << 0 public transient static final byte DATA = 1 << 1; private static final long serialVersionUID=5946678490588947910L; private byte type; private String data; } // ====================== other methods ====================== public TestToaOrder(int numberOfNodes, int numberOfMessages, String config) { this.numberOfNodes = numberOfNodes; this.numberOfMessages = numberOfMessages; this.config = config; } private void createJChannel() throws Exception { System.out.println("Creating Channel"); receiver = new MyReceiver(numberOfNodes, this); jChannel = new JChannel(config); jChannel.setReceiver(receiver); jChannel.connect(CLUSTER); receiver.waitUntilClusterIsFormed(); Util.registerChannel(jChannel, JMX_DOMAIN); members.addAll(jChannel.getView().getMembers()); } private AnycastAddress getDestinations(List<Address> members) { int rand = members.indexOf(jChannel.getAddress()); AnycastAddress address = new AnycastAddress(); address.add(members.get(rand++ % members.size()), members.get(rand++ % members.size()), members.get(rand % members.size())); return address; } private void sendMessages() throws Exception { System.out.println("Start sending messages..."); String address = jChannel.getAddressAsString(); List<Address> mbrs = jChannel.getView().getMembers(); start = System.nanoTime(); for (int i = 0; i < numberOfMessages; ++i) { AnycastAddress dst = getDestinations(mbrs); Message message = new Message(); message.setDest(dst); DataMessage dataMessage = new DataMessage(); dataMessage.type = DataMessage.DATA; dataMessage.data = address + ":" + i; message.setObject(dataMessage); jChannel.send(message); sentBytes += (dataMessage.data.getBytes().length + 1); sentMessages++; } stop = System.nanoTime(); System.out.println("Finish sending messages..."); } private void awaitUntilAllMessagesAreReceived() throws InterruptedException { int expectedMessages = 3 * numberOfMessages; receiver.await(expectedMessages); } private void awaitUntilAllFinishes() throws Exception { DataMessage dataMessage = new DataMessage(); dataMessage.type = DataMessage.FINISH; dataMessage.data = null; jChannel.send(null, dataMessage); synchronized (members) { if (!members.isEmpty()) { members.wait(); } } } public void printSenderInfo() { System.out.println("+++ Sender Information +++"); double duration = stop - start; duration /= 1000000.0; //nano to millis System.out.println("+ Duration (msec) = " + duration); System.out.println("+ Sent Bytes = " + sentBytes); System.out.println("+ Sent Messages = " + sentMessages); duration /= 1000.0; //millis to sec System.out.println("-------------------"); System.out.println("+ Sent Throughput (bytes/sec) = " + (sentBytes / duration)); System.out.println("+ Sent Messages (messages/sec) = " + (sentMessages / duration)); System.out.println("--------------------------------"); } public void memberFinished(Address addr) { synchronized (members) { members.remove(addr); if (members.isEmpty()) { members.notify(); } } } public void closeJChannel() { System.out.println("Close channel"); jChannel.close(); } public void startTest() throws Exception { System.out.println("Start testing..."); createJChannel(); sendMessages(); awaitUntilAllMessagesAreReceived(); String filePath = jChannel.getAddressAsString() + OUTPUT_FILE_SUFFIX; System.out.println("Writing messages in " + filePath); FileWriter fileWriter = new FileWriter(filePath); for (String s : receiver.getMessageList()) { fileWriter.write(s); fileWriter.write("\n"); } fileWriter.flush(); fileWriter.close(); System.out.println("All done!"); awaitUntilAllFinishes(); printSenderInfo(); receiver.printReceiverInfo(); printJMXStats(); } private static void printJMXStats() { MBeanServer mBeanServer = ManagementFactory.getPlatformMBeanServer(); ObjectName groupMulticast = getGroupMulticastObjectName(mBeanServer); if (groupMulticast == null) { System.err.println("Unable to find the GROUP_MULTICAST protocol"); return ; } try { System.out.println("======== JMX STATS ========="); for (MBeanAttributeInfo mBeanAttributeInfo : mBeanServer.getMBeanInfo(groupMulticast).getAttributes()) { String attribute = mBeanAttributeInfo.getName(); String type = mBeanAttributeInfo.getType(); if (!type.equals("double") && !type.equals("int")) { continue; } System.out.println(attribute + "=" + mBeanServer.getAttribute(groupMulticast, attribute)); } System.out.println("======== JMX STATS ========="); } catch (Exception e) { System.err.println("Error collecting stats" + e.getLocalizedMessage()); } } private static ObjectName getGroupMulticastObjectName(MBeanServer mBeanServer) { for(ObjectName name : mBeanServer.queryNames(null, null)) { if(name.getDomain().equals(JMX_DOMAIN)) { if ("TOA".equals(name.getKeyProperty("protocol"))) { return name; } } } return null; } }
/* * Copyright 2016 JBoss, by Red Hat, Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.uberfire.ext.widgets.table.client; import java.util.List; import org.gwtbootstrap3.client.ui.Button; import org.gwtbootstrap3.client.ui.Label; import org.uberfire.ext.widgets.table.client.resources.UFTableResources; import com.google.gwt.core.client.GWT; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.dom.client.Style; import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.uibinder.client.UiBinder; import com.google.gwt.uibinder.client.UiField; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.cellview.client.ColumnSortEvent; import com.google.gwt.user.cellview.client.ColumnSortList; import com.google.gwt.user.cellview.client.RowStyles; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.HasWidgets; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.Widget; import com.google.gwt.view.client.CellPreviewEvent; import com.google.gwt.view.client.HasData; import com.google.gwt.view.client.ProvidesKey; import com.google.gwt.view.client.Range; import com.google.gwt.view.client.RangeChangeEvent; import com.google.gwt.view.client.RowCountChangeEvent; import com.google.gwt.view.client.SelectionModel; /** * A composite Widget that shows rows of data (not-paged) and a "column picker" * to allow columns to be hidden from view. Columns can also be sorted. */ public class UberfireSimpleTable<T> extends Composite implements HasData<T>{ interface Binder extends UiBinder<Widget, UberfireSimpleTable> { } private static Binder uiBinder = GWT.create( Binder.class ); @UiField(provided = true) public Button columnPickerButton; @UiField(provided = true) public DataGrid<T> dataGrid; @UiField public HorizontalPanel toolbarContainer; @UiField public HorizontalPanel rightToolbar; @UiField public FlowPanel rightActionsToolbar; @UiField public FlowPanel leftToolbar; @UiField public FlowPanel centerToolbar; private String emptyTableCaption; protected UberfireColumnPicker<T> columnPicker; public UberfireSimpleTable() { setupDataGrid( null ); setupGridTable(); } public UberfireSimpleTable( final ProvidesKey<T> providesKey ) { setupDataGrid( providesKey ); setupGridTable(); } protected void setupGridTable() { setupDataGrid(); setEmptyTableWidget(); setupColumnPicker(); columnPickerButton = getColumnPicker().createToggleButton(); initWidget(makeWidget()); } protected void setupColumnPicker() { setColumnPicker( new UberfireColumnPicker<>( dataGrid )); } protected UberfireColumnPicker<T> getColumnPicker(){ return columnPicker; } protected void setColumnPicker(UberfireColumnPicker<T> columnPicker){ this.columnPicker = columnPicker; } protected void setupDataGrid( ProvidesKey<T> providesKey ) { if ( providesKey != null ) { dataGrid = new DataGrid<T>( providesKey ); } else{ dataGrid = new DataGrid<T>( ); } } public void setEmptyTableCaption( final String emptyTableCaption ) { this.emptyTableCaption = emptyTableCaption; setEmptyTableWidget(); } protected void setupDataGrid() { dataGrid.setSkipRowHoverCheck( false ); dataGrid.setSkipRowHoverStyleUpdate( false ); dataGrid.addStyleName( UFTableResources.INSTANCE.CSS().dataGridMain() ); dataGrid.addStyleName( UFTableResources.INSTANCE.CSS().dataGrid() ); dataGrid.setRowStyles( ( row, rowIndex ) -> UFTableResources.INSTANCE.CSS().dataGridRow() ); addDataGridStyles( dataGrid.getElement(), UFTableResources.INSTANCE.CSS().dataGridHeader(), UFTableResources.INSTANCE.CSS().dataGridContent() ); } protected void setEmptyTableWidget() { String caption = "-----"; if ( !emptyCaptionIsDefined() ) { caption = emptyTableCaption; } dataGrid.setEmptyTableWidget( new Label( caption ) ); } private boolean emptyCaptionIsDefined() { return emptyTableCaption == null || emptyTableCaption.trim().isEmpty(); } protected static native void addDataGridStyles( final JavaScriptObject grid, final String header, final String content )/*-{ $wnd.jQuery(grid).find('table:first').addClass(header); $wnd.jQuery(grid).find('table:last').addClass(content); }-*/; protected Widget makeWidget() { return uiBinder.createAndBindUi( this ); } public void redraw() { dataGrid.redraw(); dataGrid.flush(); } public void refresh() { dataGrid.setVisibleRangeAndClearData( dataGrid.getVisibleRange(), true ); } @Override public HandlerRegistration addCellPreviewHandler( final CellPreviewEvent.Handler<T> handler ) { return dataGrid.addCellPreviewHandler( handler ); } @Override public HandlerRegistration addRangeChangeHandler( final RangeChangeEvent.Handler handler ) { return dataGrid.addRangeChangeHandler( handler ); } @Override public HandlerRegistration addRowCountChangeHandler( final RowCountChangeEvent.Handler handler ) { return dataGrid.addRowCountChangeHandler( handler ); } public int getColumnIndex( final Column<T, ?> column ) { return dataGrid.getColumnIndex( column ); } public HandlerRegistration addColumnSortHandler( final ColumnSortEvent.Handler handler ) { return this.dataGrid.addColumnSortHandler( handler ); } @Override public int getRowCount() { return dataGrid.getRowCount(); } @Override public Range getVisibleRange() { return dataGrid.getVisibleRange(); } @Override public boolean isRowCountExact() { return dataGrid.isRowCountExact(); } @Override public void setRowCount( final int count ) { dataGrid.setRowCount( count ); } @Override public void setRowCount( final int count, final boolean isExact ) { dataGrid.setRowCount( count, isExact ); } @Override public void setVisibleRange( final int start, final int length ) { dataGrid.setVisibleRange( start, length ); } @Override public void setVisibleRange( final Range range ) { dataGrid.setVisibleRange( range ); } @Override public SelectionModel<? super T> getSelectionModel() { return dataGrid.getSelectionModel(); } @Override public T getVisibleItem( final int indexOnPage ) { return dataGrid.getVisibleItem( indexOnPage ); } @Override public int getVisibleItemCount() { return dataGrid.getVisibleItemCount(); } @Override public Iterable<T> getVisibleItems() { return dataGrid.getVisibleItems(); } @Override public void setRowData( final int start, final List<? extends T> values ) { dataGrid.setRowData( start, values ); redraw(); } public void setRowData( final List<? extends T> values ) { dataGrid.setRowData( values ); redraw(); } @Override public void setSelectionModel( final SelectionModel<? super T> selectionModel ) { dataGrid.setSelectionModel( selectionModel ); } public void setSelectionModel( final SelectionModel<? super T> selectionModel, final CellPreviewEvent.Handler<T> selectionEventManager ) { dataGrid.setSelectionModel( selectionModel, selectionEventManager ); } @Override public void setVisibleRangeAndClearData( final Range range, final boolean forceRangeChangeEvent ) { dataGrid.setVisibleRangeAndClearData( range, forceRangeChangeEvent ); } @Override public void setHeight( final String height ) { dataGrid.setHeight( height ); } @Override public void setPixelSize( final int width, final int height ) { dataGrid.setPixelSize( width, height ); } @Override public void setSize( final String width, final String height ) { dataGrid.setSize( width, height ); } @Override public void setWidth( final String width ) { dataGrid.setWidth( width ); } public void setColumnWidth( final Column<T, ?> column, final double width, final Style.Unit unit ) { dataGrid.setColumnWidth( column, width, unit ); getColumnPicker().adjustColumnWidths(); } public void setToolBarVisible( final boolean visible ) { toolbarContainer.setVisible( visible ); } public ColumnSortList getColumnSortList() { return dataGrid.getColumnSortList(); } public HasWidgets getToolbar() { return toolbarContainer; } public HasWidgets getRightToolbar() { return rightToolbar; } public HasWidgets getRightActionsToolbar() { return rightActionsToolbar; } public HasWidgets getLeftToolbar() { return leftToolbar; } public HasWidgets getCenterToolbar() { return centerToolbar; } public void setRowStyles( final RowStyles<T> styles ) { dataGrid.setRowStyles( styles ); } public void addTableTitle( String tableTitle ) { getLeftToolbar().add( new HTML( "<h4>" + tableTitle + "</h4>" ) ); } public void setAlwaysShowScrollBars( boolean alwaysShowScrollBars ) { dataGrid.setAlwaysShowScrollBars( alwaysShowScrollBars ); } public void addColumn( final Column<T, ?> column, final String caption ) { addColumn( column, caption, true ); } public void addColumn( final Column<T, ?> column, final String caption, final boolean visible ) { ColumnMeta<T> columnMeta = new ColumnMeta<T>( column, caption, visible ); addColumn( columnMeta ); } public void addColumns( final List<ColumnMeta<T>> columnMetas ) { for ( ColumnMeta columnMeta : columnMetas ) { if ( columnMeta.getHeader() == null ) { columnMeta.setHeader( getColumnHeader( columnMeta.getCaption(), columnMeta.getColumn() ) ); } } getColumnPicker().addColumns( columnMetas ); } protected void addColumn( final ColumnMeta<T> columnMeta ) { if ( columnMeta.getHeader() == null ) { columnMeta.setHeader( getColumnHeader( columnMeta.getCaption(), columnMeta.getColumn() ) ); } getColumnPicker().addColumn( columnMeta ); } protected ResizableMovableHeader<T> getColumnHeader( final String caption, final Column column ) { final ResizableMovableHeader header = new ResizableMovableHeader<T>( caption, dataGrid, columnPicker, column ) { @Override protected int getTableBodyHeight() { return dataGrid.getOffsetHeight(); } }; header.addColumnChangedHandler( new ColumnChangedHandler() { @Override public void afterColumnChanged() { afterColumnChangedHandler(); } @Override public void beforeColumnChanged() { } } ); return header; } public void setColumnPickerButtonVisible( final boolean show ) { columnPickerButton.setVisible( show ); } protected void afterColumnChangedHandler() { } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair; import java.io.File; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.classification.InterfaceStability.Evolving; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.yarn.api.records.Resource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.resourcemanager.resource.ResourceType; import org.apache.hadoop.yarn.server.utils.BuilderUtils; import org.apache.hadoop.yarn.util.resource.Resources; @Private @Evolving public class FairSchedulerConfiguration extends Configuration { public static final Log LOG = LogFactory.getLog( FairSchedulerConfiguration.class.getName()); /** Increment request grant-able by the RM scheduler. * These properties are looked up in the yarn-site.xml */ public static final String RM_SCHEDULER_INCREMENT_ALLOCATION_MB = YarnConfiguration.YARN_PREFIX + "scheduler.increment-allocation-mb"; public static final int DEFAULT_RM_SCHEDULER_INCREMENT_ALLOCATION_MB = 1024; public static final String RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES = YarnConfiguration.YARN_PREFIX + "scheduler.increment-allocation-vcores"; public static final int DEFAULT_RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES = 1; /** Threshold for container size for making a container reservation as a * multiple of increment allocation. Only container sizes above this are * allowed to reserve a node */ public static final String RM_SCHEDULER_RESERVATION_THRESHOLD_INCREMENT_MULTIPLE = YarnConfiguration.YARN_PREFIX + "scheduler.reservation-threshold.increment-multiple"; public static final float DEFAULT_RM_SCHEDULER_RESERVATION_THRESHOLD_INCREMENT_MULTIPLE = 2f; private static final String CONF_PREFIX = "yarn.scheduler.fair."; public static final String ALLOCATION_FILE = CONF_PREFIX + "allocation.file"; protected static final String DEFAULT_ALLOCATION_FILE = "fair-scheduler.xml"; /** Whether to enable the Fair Scheduler event log */ public static final String EVENT_LOG_ENABLED = CONF_PREFIX + "event-log-enabled"; public static final boolean DEFAULT_EVENT_LOG_ENABLED = false; protected static final String EVENT_LOG_DIR = "eventlog.dir"; /** Whether pools can be created that were not specified in the FS configuration file */ protected static final String ALLOW_UNDECLARED_POOLS = CONF_PREFIX + "allow-undeclared-pools"; protected static final boolean DEFAULT_ALLOW_UNDECLARED_POOLS = true; /** Whether to use the user name as the queue name (instead of "default") if * the request does not specify a queue. */ protected static final String USER_AS_DEFAULT_QUEUE = CONF_PREFIX + "user-as-default-queue"; protected static final boolean DEFAULT_USER_AS_DEFAULT_QUEUE = true; protected static final float DEFAULT_LOCALITY_THRESHOLD = -1.0f; /** Cluster threshold for node locality. */ protected static final String LOCALITY_THRESHOLD_NODE = CONF_PREFIX + "locality.threshold.node"; protected static final float DEFAULT_LOCALITY_THRESHOLD_NODE = DEFAULT_LOCALITY_THRESHOLD; /** Cluster threshold for rack locality. */ protected static final String LOCALITY_THRESHOLD_RACK = CONF_PREFIX + "locality.threshold.rack"; protected static final float DEFAULT_LOCALITY_THRESHOLD_RACK = DEFAULT_LOCALITY_THRESHOLD; /** Delay for node locality. */ protected static final String LOCALITY_DELAY_NODE_MS = CONF_PREFIX + "locality-delay-node-ms"; protected static final long DEFAULT_LOCALITY_DELAY_NODE_MS = -1L; /** Delay for rack locality. */ protected static final String LOCALITY_DELAY_RACK_MS = CONF_PREFIX + "locality-delay-rack-ms"; protected static final long DEFAULT_LOCALITY_DELAY_RACK_MS = -1L; /** Enable continuous scheduling or not. */ protected static final String CONTINUOUS_SCHEDULING_ENABLED = CONF_PREFIX + "continuous-scheduling-enabled"; protected static final boolean DEFAULT_CONTINUOUS_SCHEDULING_ENABLED = false; /** Sleep time of each pass in continuous scheduling (5ms in default) */ protected static final String CONTINUOUS_SCHEDULING_SLEEP_MS = CONF_PREFIX + "continuous-scheduling-sleep-ms"; protected static final int DEFAULT_CONTINUOUS_SCHEDULING_SLEEP_MS = 5; /** Whether preemption is enabled. */ protected static final String PREEMPTION = CONF_PREFIX + "preemption"; protected static final boolean DEFAULT_PREEMPTION = false; protected static final String PREEMPTION_THRESHOLD = CONF_PREFIX + "preemption.cluster-utilization-threshold"; protected static final float DEFAULT_PREEMPTION_THRESHOLD = 0.8f; protected static final String WAIT_TIME_BEFORE_KILL = CONF_PREFIX + "waitTimeBeforeKill"; protected static final int DEFAULT_WAIT_TIME_BEFORE_KILL = 15000; /** * Configurable delay (ms) before an app's starvation is considered after * it is identified. This is to give the scheduler enough time to * allocate containers post preemption. This delay is added to the * {@link #WAIT_TIME_BEFORE_KILL} and enough heartbeats. * * This is intended to be a backdoor on production clusters, and hence * intentionally not documented. */ protected static final String WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS = CONF_PREFIX + "waitTimeBeforeNextStarvationCheck"; protected static final long DEFAULT_WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS = 10000; /** Whether to assign multiple containers in one check-in. */ public static final String ASSIGN_MULTIPLE = CONF_PREFIX + "assignmultiple"; protected static final boolean DEFAULT_ASSIGN_MULTIPLE = false; /** Whether to give more weight to apps requiring many resources. */ protected static final String SIZE_BASED_WEIGHT = CONF_PREFIX + "sizebasedweight"; protected static final boolean DEFAULT_SIZE_BASED_WEIGHT = false; /** Maximum number of containers to assign on each check-in. */ public static final String DYNAMIC_MAX_ASSIGN = CONF_PREFIX + "dynamic.max.assign"; private static final boolean DEFAULT_DYNAMIC_MAX_ASSIGN = true; /** * Specify exact number of containers to assign on each heartbeat, if dynamic * max assign is turned off. */ protected static final String MAX_ASSIGN = CONF_PREFIX + "max.assign"; protected static final int DEFAULT_MAX_ASSIGN = -1; /** The update interval for calculating resources in FairScheduler .*/ public static final String UPDATE_INTERVAL_MS = CONF_PREFIX + "update-interval-ms"; public static final int DEFAULT_UPDATE_INTERVAL_MS = 500; /** Ratio of nodes available for an app to make an reservation on. */ public static final String RESERVABLE_NODES = CONF_PREFIX + "reservable-nodes"; public static final float RESERVABLE_NODES_DEFAULT = 0.05f; public FairSchedulerConfiguration() { super(); } public FairSchedulerConfiguration(Configuration conf) { super(conf); } public Resource getMinimumAllocation() { int mem = getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB); int cpu = getInt( YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES, YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_VCORES); return Resources.createResource(mem, cpu); } public Resource getMaximumAllocation() { int mem = getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB); int cpu = getInt( YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES, YarnConfiguration.DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_VCORES); return Resources.createResource(mem, cpu); } public Resource getIncrementAllocation() { int incrementMemory = getInt( RM_SCHEDULER_INCREMENT_ALLOCATION_MB, DEFAULT_RM_SCHEDULER_INCREMENT_ALLOCATION_MB); int incrementCores = getInt( RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES, DEFAULT_RM_SCHEDULER_INCREMENT_ALLOCATION_VCORES); return Resources.createResource(incrementMemory, incrementCores); } public float getReservationThresholdIncrementMultiple() { return getFloat( RM_SCHEDULER_RESERVATION_THRESHOLD_INCREMENT_MULTIPLE, DEFAULT_RM_SCHEDULER_RESERVATION_THRESHOLD_INCREMENT_MULTIPLE); } public float getLocalityThresholdNode() { return getFloat(LOCALITY_THRESHOLD_NODE, DEFAULT_LOCALITY_THRESHOLD_NODE); } public float getLocalityThresholdRack() { return getFloat(LOCALITY_THRESHOLD_RACK, DEFAULT_LOCALITY_THRESHOLD_RACK); } public boolean isContinuousSchedulingEnabled() { return getBoolean(CONTINUOUS_SCHEDULING_ENABLED, DEFAULT_CONTINUOUS_SCHEDULING_ENABLED); } public int getContinuousSchedulingSleepMs() { return getInt(CONTINUOUS_SCHEDULING_SLEEP_MS, DEFAULT_CONTINUOUS_SCHEDULING_SLEEP_MS); } public long getLocalityDelayNodeMs() { return getLong(LOCALITY_DELAY_NODE_MS, DEFAULT_LOCALITY_DELAY_NODE_MS); } public long getLocalityDelayRackMs() { return getLong(LOCALITY_DELAY_RACK_MS, DEFAULT_LOCALITY_DELAY_RACK_MS); } public boolean getPreemptionEnabled() { return getBoolean(PREEMPTION, DEFAULT_PREEMPTION); } public float getPreemptionUtilizationThreshold() { return getFloat(PREEMPTION_THRESHOLD, DEFAULT_PREEMPTION_THRESHOLD); } public boolean getAssignMultiple() { return getBoolean(ASSIGN_MULTIPLE, DEFAULT_ASSIGN_MULTIPLE); } public boolean isMaxAssignDynamic() { return getBoolean(DYNAMIC_MAX_ASSIGN, DEFAULT_DYNAMIC_MAX_ASSIGN); } public int getMaxAssign() { return getInt(MAX_ASSIGN, DEFAULT_MAX_ASSIGN); } public boolean getSizeBasedWeight() { return getBoolean(SIZE_BASED_WEIGHT, DEFAULT_SIZE_BASED_WEIGHT); } public boolean isEventLogEnabled() { return getBoolean(EVENT_LOG_ENABLED, DEFAULT_EVENT_LOG_ENABLED); } public String getEventlogDir() { return get(EVENT_LOG_DIR, new File(System.getProperty("hadoop.log.dir", "/tmp/")).getAbsolutePath() + File.separator + "fairscheduler"); } public long getWaitTimeBeforeNextStarvationCheck() { return getLong(WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS, DEFAULT_WAIT_TIME_BEFORE_NEXT_STARVATION_CHECK_MS); } public int getWaitTimeBeforeKill() { return getInt(WAIT_TIME_BEFORE_KILL, DEFAULT_WAIT_TIME_BEFORE_KILL); } public boolean getUsePortForNodeName() { return getBoolean(YarnConfiguration.RM_SCHEDULER_INCLUDE_PORT_IN_NODE_NAME, YarnConfiguration.DEFAULT_RM_SCHEDULER_USE_PORT_FOR_NODE_NAME); } public float getReservableNodes() { return getFloat(RESERVABLE_NODES, RESERVABLE_NODES_DEFAULT); } /** * Parses a resource config value of a form like "1024", "1024 mb", * or "1024 mb, 3 vcores". If no units are given, megabytes are assumed. * * @throws AllocationConfigurationException */ public static ConfigurableResource parseResourceConfigValue(String val) throws AllocationConfigurationException { ConfigurableResource configurableResource; try { val = StringUtils.toLowerCase(val); if (val.contains("%")) { configurableResource = new ConfigurableResource( getResourcePercentage(val)); } else { int memory = findResource(val, "mb"); int vcores = findResource(val, "vcores"); configurableResource = new ConfigurableResource( BuilderUtils.newResource(memory, vcores)); } } catch (AllocationConfigurationException ex) { throw ex; } catch (Exception ex) { throw new AllocationConfigurationException( "Error reading resource config", ex); } return configurableResource; } private static double[] getResourcePercentage( String val) throws AllocationConfigurationException { double[] resourcePercentage = new double[ResourceType.values().length]; String[] strings = val.split(","); if (strings.length == 1) { double percentage = findPercentage(strings[0], ""); for (int i = 0; i < ResourceType.values().length; i++) { resourcePercentage[i] = percentage/100; } } else { resourcePercentage[0] = findPercentage(val, "memory")/100; resourcePercentage[1] = findPercentage(val, "cpu")/100; } return resourcePercentage; } private static double findPercentage(String val, String units) throws AllocationConfigurationException { final Pattern pattern = Pattern.compile("((\\d+)(\\.\\d*)?)\\s*%\\s*" + units); Matcher matcher = pattern.matcher(val); if (!matcher.find()) { if (units.equals("")) { throw new AllocationConfigurationException("Invalid percentage: " + val); } else { throw new AllocationConfigurationException("Missing resource: " + units); } } return Double.parseDouble(matcher.group(1)); } public long getUpdateInterval() { return getLong(UPDATE_INTERVAL_MS, DEFAULT_UPDATE_INTERVAL_MS); } private static int findResource(String val, String units) throws AllocationConfigurationException { final Pattern pattern = Pattern.compile("(\\d+)(\\.\\d*)?\\s*" + units); Matcher matcher = pattern.matcher(val); if (!matcher.find()) { throw new AllocationConfigurationException("Missing resource: " + units); } return Integer.parseInt(matcher.group(1)); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package flex2.compiler.mxml.analyzer; import flash.css.StyleParser; import flash.css.StyleSheet; import flash.css.StyleParser.StyleSheetInvalidCharset; import flash.fonts.FontManager; import flash.util.FileUtils; import flex2.compiler.CompilationUnit; import flex2.compiler.io.VirtualFile; import flex2.compiler.mxml.*; import flex2.compiler.mxml.dom.*; import flex2.compiler.mxml.lang.StandardDefs; import flex2.compiler.mxml.lang.TextParser; import flex2.compiler.util.CompilerMessage; import flex2.compiler.util.QName; import flex2.compiler.util.ThreadLocalToolkit; import flex2.compiler.util.CompilerMessage.CompilerError; import flex2.compiler.mxml.InvalidStateSpecificValue; import java.io.*; import java.util.Iterator; import java.util.List; import java.util.Set; /** * This analyzer serves two purposes: * a) verify syntax tree, e.g. checking language tag attributes * b) register includes and dependencies */ public class SyntaxAnalyzer extends AnalyzerAdapter { /** * The root node of the document for this compilation unit. */ private DocumentNode documentNode; public SyntaxAnalyzer(CompilationUnit unit, MxmlConfiguration mxmlConfiguration) { super(unit, mxmlConfiguration); } /** * At parse-time, we want to register dependent packages/classes... */ public void analyze(Node node) { if (node instanceof DocumentNode) documentNode = (DocumentNode)node; /** * NOTE: since this analyzer runs at parse time, the information that * would allow us to distinguish <mx:SomeComponent/> from * <mx:childPropertyAssignment/> is not yet (guaranteed to be) * available. As a result, both types of nodes will pass through this * method, so we can't yet raise errors when a tag name fails to resolve * to an implementing class. */ super.analyze(node); } public void analyze(LayeredNode node) { analyze((Node) node); } public void analyze(CDATANode node) { // do nothing } public void analyze(StateNode node) { String name = (String) node.getAttributeValue("name"); // Prior to Flex 4, name could be a binding expression. if (getDocumentVersion() >= 4 && name != null && TextParser.isBindingExpression(name)) { log(node, node.getLineNumber("name"), new CompileTimeAttributeBindingExpressionUnsupported()); } super.analyze(node); } public void analyze(StyleNode node) { checkForExtraAttributes(StyleNode.attributes, node); String source = (String) node.getAttributeValue("source"); CDATANode cdata = (CDATANode) node.getChildAt(0); if (source != null && cdata != null) { log(node, node.getLineNumber("source"), new IgnoreEmbeddedStylesheet()); } if (source != null) { if (TextParser.isBindingExpression(source)) { log(node, node.getLineNumber("source"), new CompileTimeAttributeBindingExpressionUnsupported()); return; } // C: Look at the problem this way, AS3 can have [Embed], MXML can have @embed, CSS can have @embed. // AS3 and MXML can "import" each others types. Does it make sense for AS3 or MXML to "import" CSS? // Currently, external CSS stylesheets are pulled in and codegen within MXML-generated classes. Can // CSS be generated in a separate class/factory and make MXML "import" it? // // Can CSS embedded within <mx:Style> be generated within the MXML-generated class as an inner class? VirtualFile file = unit.getSource().resolve(source); if (file == null) { VirtualFile[] sourcePath = mxmlConfiguration.getSourcePath(); if (sourcePath != null) { for (int i = 0; (i < sourcePath.length) && (file == null); i++) { file = sourcePath[i].resolve(source); } } } if (file == null) { log(node, node.getLineNumber("source"), new StylesheetNotFound(source)); } else { unit.getSource().addFileInclude(file); cdata = parseExternalFile(node, file); if (cdata != null) { // parseStyle(node, unit.getSource().getName(), cdata); parseStyle(node, file.getName(), file.getLastModified(), cdata); } } } else if (cdata != null) { parseStyle(node, unit.getSource().getName(), unit.getSource().getLastModified(), cdata.beginLine); } } public void analyze(ScriptNode node) { checkForExtraAttributes(ScriptNode.attributes, node); script(node); } public void analyze(MetaDataNode node) { checkForExtraAttributes(MetaDataNode.attributes, node); } public void analyze(ModelNode node) { checkForExtraAttributes(ModelNode.attributes, node); String source = (String) node.getAttributeValue("source"); int count = node.getChildCount(); if (source != null && count > 0) { log(node, node.getLineNumber("source"), new EmptyTagIfSourceSpecified()); } if (source != null) { if (TextParser.isBindingExpression(source)) { log(node, node.getLineNumber("source"), new CompileTimeAttributeBindingExpressionUnsupported()); return; } // parse external XML file... VirtualFile f = unit.getSource().resolve(source); if (f == null) { log(node, node.getLineNumber("source"), new ModelNotFound(source)); } else { unit.getSource().addFileInclude(f); Node root = parseExternalXML(node, f); // C: 2.0 behavior: don't remove the root tag for <mx:Model>. it should be similar to // <mx:XML> w.r.t. syntactical processing. if (root != null) { node.setSourceFile(new Node[] {root}); } /* C: 1.x behavior... int size = (root == null) ? 0 : root.getChildCount(); if (size > 0) { if (size == 1 && root.getChildAt(0) instanceof CDATANode) { log(node, node.getLineNumber("source"), new ScalarContentOnlyUnsupportedInExternalModel()); } else { // C: Keep the document structure intact. Add the source-based nodes to ModelNode separately // from the children... Node[] nodes = new Node[size]; for (int j = 0; j < size; j++) { nodes[j] = (Node) root.getChildAt(j); } node.setSourceFile(nodes); } } */ } } } public void analyze(XMLNode node) { checkForExtraAttributes(XMLNode.attributes, node); String source = (String) node.getAttributeValue("source"); // C: count = 0 or 1 CDATA or multiple child tags int count = node.getChildCount(); if (source != null && count > 0) { log(node, node.getLineNumber("source"), new IgnoreInlineXML()); } if (source != null) { if (TextParser.isBindingExpression(source)) { log(node, node.getLineNumber("source"), new CompileTimeAttributeBindingExpressionUnsupported()); return; } // parse external XML file... VirtualFile f = unit.getSource().resolve(source); if (f == null) { log(node, node.getLineNumber("source"), new XMLNotFound(source)); } else { unit.getSource().addFileInclude(f); Node root = parseExternalXML(node, f); if (root != null) { node.setSourceFile(new Node[] {root}); } } } } public void analyze(XMLListNode node) { checkForExtraAttributes(XMLListNode.attributes, node); } public void analyze(ArrayNode node) { checkForExtraAttributes(ArrayNode.attributes, node); super.analyze(node); } public void analyze(VectorNode node) { checkForExtraAttributes(VectorNode.attributes, node); super.analyze(node); } public void analyze(BindingNode node) { checkForExtraAttributes(BindingNode.attributes, node); String source = (String) node.getAttributeValue("source"); if (source == null || source.trim().length() == 0) { log(node, new BindingMustHaveSource()); } String destination = (String) node.getAttributeValue("destination"); if (destination == null || destination.trim().length() == 0) { log(node, new BindingMustHaveDestination()); } // source and destination attributes must be unique (whitespace counts) if (source != null && destination != null && source.equals(destination)) { log(node, new BindingMustHaveUniqueSourceDestination()); } String twoWay = (String) node.getAttributeValue("twoWay"); if (twoWay != null && TextParser.isBindingExpression(twoWay)) { log(node, node.getLineNumber("twoWay"), new CompileTimeAttributeBindingExpressionUnsupported()); } } public void analyze(ReparentNode node) { checkForExtraAttributes(ReparentNode.attributes, node); String target = (String) node.getAttributeValue("target"); if (target == null || target.trim().length() == 0) { log(node, new ReparentMustHaveTarget()); } else if (TextParser.isBindingExpression(target)) { log(node, node.getLineNumber("target"), new CompileTimeAttributeBindingExpressionUnsupported()); } String includeIn = (String) node.getAttributeValue(StandardDefs.PROP_INCLUDE_STATES); String excludeFrom = (String) node.getAttributeValue(StandardDefs.PROP_EXCLUDE_STATES); if ((includeIn == null || includeIn.trim().length() == 0) && (excludeFrom == null || excludeFrom.trim().length() == 0)) { log(node, new ReparentMustHaveStates()); } } public void analyze(LibraryNode node) { checkForExtraAttributes(LibraryNode.attributes, node); // If present, the Library tag must be the first child of a document // (which implies that there can be only one Library tag per // document), although an exception to this rule is that a special // <mask> element may precede it. int i = 0; while (i < documentNode.getChildCount()) { Node nextNode = (Node)documentNode.getChildAt(i++); if (nextNode.getLocalPart() == StandardDefs.GRAPHICS_MASK) continue; // If this node is not the particular Library node being analyzed, // log an error. if (nextNode != node) { log(node, node.beginLine, new LibraryMustBeFirstChildOfDocumentError()); } break; } // We call super here to traverse child DefinitionNodes for further // validation. super.analyze(node); } public void analyze(DeclarationsNode node) { checkForExtraAttributes(DeclarationsNode.attributes, node); super.analyze(node); } public void analyze(DefinitionNode node) { checkForExtraAttributes(DefinitionNode.attributes, node); String definitionName = (String) node.getAttributeValue(StandardDefs.GRAPHICS_DEFINITION_NAME); if (definitionName == null || definitionName.trim().length() == 0) { log(node, node.getLineNumber(StandardDefs.GRAPHICS_DEFINITION_NAME), new DefinitionMustHaveNameError()); } if (node.getChildCount() != 1) { log(node, node.beginLine, new DefinitionMustHaveOneChildError(definitionName)); } super.analyze(node); } public void analyze(PrivateNode node) { checkForExtraAttributes(PrivateNode.attributes, node); // If present, the Private tag must be the last child of a document // (which implies that there can be only one Private tag per // document). An exception to this rule is a special mask tag which // can appear anywhere in a document. int i = documentNode.getChildCount() - 1; while (i >= 0) { Node lastNode = (Node)documentNode.getChildAt(i--); if (StandardDefs.GRAPHICS_MASK.equals(lastNode.getLocalPart())) continue; // If this node is not the particular Private node being analyzed, // log an error. if (lastNode != node) { log(node, node.beginLine, new PrivateMustBeLastChildOfDocumentError()); } break; } } public void analyze(StringNode node) { checkForExtraAttributes(StringNode.attributes, node); primitive(node); } public void analyze(NumberNode node) { checkForExtraAttributes(NumberNode.attributes, node); primitive(node); } public void analyze(IntNode node) { checkForExtraAttributes(IntNode.attributes, node); primitive(node); } public void analyze(UIntNode node) { checkForExtraAttributes(UIntNode.attributes, node); primitive(node); } public void analyze(BooleanNode node) { checkForExtraAttributes(BooleanNode.attributes, node); primitive(node); } public void analyze(RequestNode node) { checkForExtraAttributes(RequestNode.attributes, node); super.analyze(node); } public void analyze(ArgumentsNode node) { checkForExtraAttributes(ArgumentsNode.attributes, node); super.analyze(node); } public void analyze(InlineComponentNode node) { checkForExtraAttributes(InlineComponentNode.attributes, node); if (node.getChildCount() == 0) { log(node, new InlineComponentMustHaveOneChild()); } super.analyze(node); } public void analyze(DesignLayerNode node) { checkForExtraAttributes(DesignLayerNode.attributes, node); super.analyze(node); } protected void traverse(Node node) { for (int i = 0; i < node.getChildCount(); i++) { Node child = (Node) node.getChildAt(i); child.analyze(this); if (child instanceof DesignLayerNode) { List<Token> designInfoChildren = child.getChildren(); // Replace the DesignLayerNode with it's children. node.replaceNode(i, designInfoChildren); // Update 'i' by adding the size of the DesignLayerNode // children and subtracting 1 for the DesignLayerNode. i += designInfoChildren.size() - 1; // Here we make sure to take note of any DesignLayer // declarations (those with ids) that aren't directly // associated with layer children. Otherwise we would // miss them when later generating our top level // declarations. if (child.getAttributeValue(StandardDefs.PROP_ID) != null && designInfoChildren.size() == 0) { documentNode.layerDeclarationNodes.add((DesignLayerNode)child); } } } } protected int getDocumentVersion() { return documentNode != null ? documentNode.getVersion() : 0; } protected String getLanguageNamespace() { return documentNode != null ? documentNode.getLanguageNamespace() : null; } private void checkForExtraAttributes(Set<QName> validAttributes, Node node) { for (Iterator<QName> attributes = node.getAttributeNames(); attributes != null && attributes.hasNext();) { QName qname = attributes.next(); String namespace = qname.getNamespace(); String localPart = qname.getLocalPart(); // If this attribute is state-specific we want to only validate against // the unqualified attribute identifier. Boolean isScoped = TextParser.isScopedName(localPart); if (isScoped) { String[] statefulName = TextParser.analyzeScopedName(localPart); qname = (statefulName != null) ? new QName(namespace, statefulName[0]) : qname; } if (!validAttributes.contains(qname)) { if (localPart.equals(StandardDefs.PROP_INCLUDE_STATES) || localPart.equals(StandardDefs.PROP_EXCLUDE_STATES)) { log(node, node.getLineNumber(qname), new InvalidStateAttributeUsage(node.getLocalPart())); } else { // Prior to Flex 4, qualified attributes were never allowed // so report anything as an unknown attribute. if (getDocumentVersion() < 4) { log(node, node.getLineNumber(qname), new UnknownAttribute(qname, node.image)); } else { // In Flex 4 (and later), qualified attributes are // allowed. If they are in the language namespace or // the component node's namespace they must be // understood. Any other namespace is simply ignored // as the attributes may serve as design time metadata // for tools. However, note that unqualified attributes // must be understood by the compiler. if (namespace == null || namespace.length() == 0 || namespace.equals(node.getNamespace()) || namespace.equals(documentNode.getLanguageNamespace())) { log(node, node.getLineNumber(qname), new UnknownAttribute(qname, node.image)); } } } } else if (isScoped && !(node instanceof DesignLayerNode) ) { // Language attributes may not be state-specific. log(node.getLineNumber(qname), new InvalidStateSpecificValue(qname.getLocalPart())); } } } private void script(ScriptNode node) { String source = (String) node.getAttributeValue("source"); CDATANode cdata = (CDATANode) node.getChildAt(0); if (source != null && cdata != null) { log(node, node.getLineNumber("source"), new IgnoreInlineScript()); } // C: Again, all source="..." must be registered to unit.includes. if (source != null) { if (TextParser.isBindingExpression(source)) { log(node, node.getLineNumber("source"), new CompileTimeAttributeBindingExpressionUnsupported()); return; } VirtualFile f = unit.getSource().resolve(source); if (f == null) { log(node, node.getLineNumber("source"), new ScriptNotFound(source)); } else { unit.getSource().addFileInclude(f); CDATANode n = parseExternalFile(node, f); // C: We want to keep the document structure intact and parse the external file up-front. Store // the source="..." content in ScriptNode. if (n != null) { cdata = n; node.setSourceFile(n); } } } } private void primitive(PrimitiveNode node) { String source = (String) node.getAttributeValue("source"); CDATANode cdata = (CDATANode) node.getChildAt(0); if (source != null && cdata != null) { log(node, node.getLineNumber("source"), new IgnoreEmbeddedString()); } if (source != null) { if (TextParser.isBindingExpression(source)) { log(node, node.getLineNumber("source"), new CompileTimeAttributeBindingExpressionUnsupported()); return; } // parse external plain text... VirtualFile f = unit.getSource().resolve(source); if (f == null) { log(node, node.getLineNumber("source"), new PrimitiveFileNotFound(source)); } else { unit.getSource().addFileInclude(f); CDATANode n = parseExternalFile(node, f); // C: We want to keep the document structure intact and parse the external file up-front. Store // the source="..." content in PrimitiveNode. if (n != null) { cdata = n; node.setSourceFile(n); } } } } private Node parseExternalXML(Node node, VirtualFile f) { BufferedInputStream in = null; Node anonymousObject = null; try { in = new BufferedInputStream(f.getInputStream()); MxmlScanner s = new MxmlScanner(in, mxmlConfiguration.enableRuntimeDesignLayers()); Parser p = new Parser(s); MxmlVisitor v = new SyntaxTreeBuilder(); p.setVisitor(v); anonymousObject = (Node) p.parseAnonymousObject(); } catch (ScannerError se) { log(node, new XMLParseProblem1(f.getName(), se.getLineNumber(), se.getReason())); } catch (ParseException ex) { log(node, new XMLParseProblem2(f.getName())); Token token = ex.currentToken.next; logError(node, token.beginLine, ex.getMessage()); } catch (IOException ex) { log(node, new XMLParseProblem3(f.getName(), ex.getMessage())); } finally { if (in != null) { try { in.close(); } catch (IOException ex) { } } } return anonymousObject; } private CDATANode parseExternalFile(Node node, VirtualFile f) { BufferedReader reader = null; CDATANode cdata = null; try { BufferedInputStream bufferedInputStream = new BufferedInputStream(f.getInputStream()); String charsetName = null; // special handling to get the charset for CSS files. if (f.getName().toLowerCase().endsWith(".css")) { try { charsetName = StyleParser.readCSSCharset(bufferedInputStream); } catch (StyleSheetInvalidCharset e) { // add filename to exception and log warning. log(node, new StyleSheetInvalidCharset(f.getName(), e.charsetName)); return null; } } String bomCharsetName = FileUtils.consumeBOM(bufferedInputStream, null, true); if (charsetName == null) { charsetName = bomCharsetName; } reader = new BufferedReader(new InputStreamReader(bufferedInputStream, charsetName)); StringWriter buffer = new StringWriter(); PrintWriter out = new PrintWriter(buffer); String str = null; while ((str = reader.readLine()) != null) { out.println(str); } out.flush(); cdata = new CDATANode(); cdata.image = buffer.toString().trim(); } catch (FileNotFoundException ex) { // f is not null. don't think this will happen. log(node, new ExternalFileNotFound(f.getName())); } catch (IOException ex) { log(node, new ParseFileProblem(f.getName(), ex.getMessage())); } finally { if (reader != null) { try { reader.close(); } catch (IOException ex) { } } } return cdata; } private void parseStyle(StyleNode node, String stylePath, long lastModified, CDATANode cdata) { FontManager fontManager = mxmlConfiguration.getFontsConfiguration().getTopLevelManager(); StyleSheet styleSheet = new StyleSheet(); styleSheet.checkDeprecation(mxmlConfiguration.showDeprecationWarnings()); styleSheet.parse(stylePath, new StringReader(cdata.image), ThreadLocalToolkit.getLogger(), fontManager); if (styleSheet.errorsExist()) { // Error log(node, new StyleSheetParseError(stylePath)); } node.setStyleSheet(styleSheet); } private void parseStyle(StyleNode node, String enclosingDocumentPath, long lastModified, int startLine) { FontManager fontManager = mxmlConfiguration.getFontsConfiguration().getTopLevelManager(); CDATANode cdata = (CDATANode) node.getChildAt(0); StyleSheet styleSheet = new StyleSheet(); styleSheet.checkDeprecation(mxmlConfiguration.showDeprecationWarnings()); styleSheet.parse(enclosingDocumentPath, startLine, new StringReader(cdata.image), ThreadLocalToolkit.getLogger(), fontManager); if (styleSheet.errorsExist()) { // Error log(node, new StyleSheetParseError(enclosingDocumentPath)); } node.setStyleSheet(styleSheet); } // error messages public static class IgnoreEmbeddedStylesheet extends CompilerMessage.CompilerWarning { private static final long serialVersionUID = -663088524822264581L; public IgnoreEmbeddedStylesheet() { super(); } } public static class CompileTimeAttributeBindingExpressionUnsupported extends CompilerMessage.CompilerError { private static final long serialVersionUID = -3787694300539037935L; public CompileTimeAttributeBindingExpressionUnsupported() { super(); } } public static class StylesheetNotFound extends CompilerMessage.CompilerError { private static final long serialVersionUID = 6265512596325307132L; public StylesheetNotFound(String source) { super(); this.source = source; } public final String source; } public static class EmptyTagIfSourceSpecified extends CompilerMessage.CompilerWarning { private static final long serialVersionUID = 6683414194026602697L; public EmptyTagIfSourceSpecified() { super(); } } public static class ModelNotFound extends CompilerMessage.CompilerError { private static final long serialVersionUID = 5004903591499990705L; public ModelNotFound(String source) { super(); this.source = source; } public final String source; } public static class ScalarContentOnlyUnsupportedInExternalModel extends CompilerMessage.CompilerError { private static final long serialVersionUID = 7778134403904275975L; public ScalarContentOnlyUnsupportedInExternalModel() { super(); } } public static class IgnoreInlineScript extends CompilerMessage.CompilerWarning { private static final long serialVersionUID = 8940525017916497366L; public IgnoreInlineScript() { super(); } } public static class IgnoreInlineXML extends CompilerMessage.CompilerWarning { private static final long serialVersionUID = 4976631970422220456L; public IgnoreInlineXML() { super(); } } public static class XMLNotFound extends CompilerMessage.CompilerError { private static final long serialVersionUID = 632658792662647542L; public XMLNotFound(String source) { super(); this.source = source; } public final String source; } public static class BindingMustHaveSource extends CompilerMessage.CompilerError { private static final long serialVersionUID = -5367924918244642096L; public BindingMustHaveSource() { super(); } } public static class BindingMustHaveDestination extends CompilerMessage.CompilerError { private static final long serialVersionUID = 2060746809575116784L; public BindingMustHaveDestination() { super(); } } public static class BindingMustHaveUniqueSourceDestination extends CompilerError { private static final long serialVersionUID = -7116545090937761064L; public BindingMustHaveUniqueSourceDestination() { super(); } } public static class UnknownAttribute extends CompilerMessage.CompilerError { private static final long serialVersionUID = 6364683156804532037L; public UnknownAttribute(QName qname, String tag) { super(); this.qname = qname; this.tag = tag; } public final QName qname; public final String tag; } public static class ScriptNotFound extends CompilerMessage.CompilerError { private static final long serialVersionUID = 1688968001018529008L; public ScriptNotFound(String source) { super(); this.source = source; } public final String source; } public static class IgnoreEmbeddedString extends CompilerMessage.CompilerWarning { private static final long serialVersionUID = -4800647048554425238L; public IgnoreEmbeddedString() { super(); } } public static class PrimitiveFileNotFound extends CompilerMessage.CompilerError { private static final long serialVersionUID = 1097562596712781756L; public PrimitiveFileNotFound(String source) { super(); this.source = source; } public final String source; } public static class XMLParseProblem1 extends CompilerMessage.CompilerError { private static final long serialVersionUID = 6245404102161978415L; public XMLParseProblem1(String name, int line, String reason) { super(); this.name = name; this.line = line; this.reason = reason; } public final String name; public final int line; public final String reason; } public static class XMLParseProblem2 extends CompilerMessage.CompilerError { private static final long serialVersionUID = -6202816852895893830L; public XMLParseProblem2(String name) { super(); this.name = name; } public final String name; } public static class XMLParseProblem3 extends CompilerMessage.CompilerError { private static final long serialVersionUID = 8934855322492753302L; public XMLParseProblem3(String name, String message) { super(); this.name = name; this.message = message; } public final String name; public final String message; } public static class ExternalFileNotFound extends CompilerMessage.CompilerError { private static final long serialVersionUID = 5308420983986384234L; public ExternalFileNotFound(String name) { super(); this.name = name; } public final String name; } public static class ParseFileProblem extends CompilerMessage.CompilerError { private static final long serialVersionUID = -2982048188724576242L; public ParseFileProblem(String name, String message) { super(); this.name = name; this.message = message; } public final String name; public final String message; } public static class StyleSheetParseError extends CompilerMessage.CompilerError { private static final long serialVersionUID = -7734934094694932051L; public StyleSheetParseError(String stylePath) { super(); this.stylePath = stylePath; } public final String stylePath; } public static class InlineComponentMustHaveOneChild extends CompilerMessage.CompilerError { private static final long serialVersionUID = -8013015924130843086L; public InlineComponentMustHaveOneChild() { super(); } } public static class DefinitionMustHaveNameError extends CompilerMessage.CompilerError { private static final long serialVersionUID = 2473265116122447983L; public DefinitionMustHaveNameError() { super(); } } public static class DefinitionMustHaveOneChildError extends CompilerMessage.CompilerError { private static final long serialVersionUID = -4954341364049052865L; public String name; public DefinitionMustHaveOneChildError(String name) { super(); this.name = name; } } public static class LibraryMustBeFirstChildOfDocumentError extends CompilerMessage.CompilerError { private static final long serialVersionUID = -8197039600346556673L; public LibraryMustBeFirstChildOfDocumentError() { super(); } } public static class PrivateMustBeLastChildOfDocumentError extends CompilerError { private static final long serialVersionUID = 2883815035659543585L; public PrivateMustBeLastChildOfDocumentError() { super(); } } public static class ReparentMustHaveTarget extends CompilerMessage.CompilerError { private static final long serialVersionUID = 9187442166720946682L; public ReparentMustHaveTarget() { super(); } } public static class ReparentMustHaveStates extends CompilerMessage.CompilerError { private static final long serialVersionUID = -9048719337863206820L; public ReparentMustHaveStates() { super(); } } }
/* * Copyright 2007 Yusuke Yamamoto * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package twitter4j.examples.stream; import twitter4j.*; /** * <p>This is a code example of Twitter4J Streaming API - Site Streams support.<br> * Usage: java twitter4j.examples.stream.PrintSiteStreams [follow(comma separated numerical user ids)]<br> * </p> * * @author Yusuke Yamamoto - yusuke at mac.com */ public final class PrintSiteStreams { /** * Main entry of this application. * * @param args follow(comma separated user ids) track(comma separated filter terms) * @throws twitter4j.TwitterException */ public static void main(String[] args) throws TwitterException { if (args.length < 1) { System.out.println("Usage: java twitter4j.examples.PrintSiteStreams [follow(comma separated numerical user ids)]"); System.exit(-1); } TwitterStream twitterStream = new TwitterStreamFactory().getInstance(); twitterStream.addListener(listener); String[] split = args[0].split(","); long[] followArray = new long[split.length]; for (int i = 0; i < followArray.length; i++) { followArray[i] = Long.parseLong(split[i]); } // site() method internally creates a thread which manipulates TwitterStream and calls these adequate listener methods continuously. twitterStream.site(true, followArray); } private static final SiteStreamsListener listener = new SiteStreamsListener() { @Override public void onStatus(long forUser, Status status) { System.out.println("onStatus for_user:" + forUser + " @" + status.getUser().getScreenName() + " - " + status.getText()); } @Override public void onDeletionNotice(long forUser, StatusDeletionNotice statusDeletionNotice) { System.out.println("Got a status deletion notice for_user:" + forUser + " id:" + statusDeletionNotice.getStatusId()); } @Override public void onFriendList(long forUser, long[] friendIds) { System.out.print("onFriendList for_user:" + forUser); for (long friendId : friendIds) { System.out.print(" " + friendId); } System.out.println(); } @Override public void onFavorite(long forUser, User source, User target, Status favoritedStatus) { System.out.println("onFavorite for_user:" + forUser + " source:@" + source.getScreenName() + " target:@" + target.getScreenName() + " @" + favoritedStatus.getUser().getScreenName() + " - " + favoritedStatus.getText()); } @Override public void onUnfavorite(long forUser, User source, User target, Status unfavoritedStatus) { System.out.println("onUnFavorite for_user:" + forUser + " source:@" + source.getScreenName() + " target:@" + target.getScreenName() + " @" + unfavoritedStatus.getUser().getScreenName() + " - " + unfavoritedStatus.getText()); } @Override public void onFollow(long forUser, User source, User followedUser) { System.out.println("onFollow for_user:" + forUser + " source:@" + source.getScreenName() + " target:@" + followedUser.getScreenName()); } @Override public void onUnfollow(long forUser, User source, User followedUser) { System.out.println("onUnfollow for_user:" + forUser + " source:@" + source.getScreenName() + " target:@" + followedUser.getScreenName()); } @Override public void onDirectMessage(long forUser, DirectMessage directMessage) { System.out.println("onDirectMessage for_user:" + forUser + " text:" + directMessage.getText()); } @Override public void onDeletionNotice(long forUser, long directMessageId, long userId) { System.out.println("Got a direct message deletion notice for_user:" + forUser + " id:" + directMessageId); } @Override public void onUserListMemberAddition(long forUser, User addedMember, User listOwner, UserList list) { System.out.println("onUserListMemberAddition for_user:" + forUser + " member:@" + addedMember.getScreenName() + " listOwner:@" + listOwner.getScreenName() + " list:" + list.getName()); } @Override public void onUserListMemberDeletion(long forUser, User deletedMember, User listOwner, UserList list) { System.out.println("onUserListMemberDeletion for_user:" + forUser + " member:@" + deletedMember.getScreenName() + " listOwner:@" + listOwner.getScreenName() + " list:" + list.getName()); } @Override public void onUserListSubscription(long forUser, User subscriber, User listOwner, UserList list) { System.out.println("onUserListSubscribed for_user:" + forUser + " subscriber:@" + subscriber.getScreenName() + " listOwner:@" + listOwner.getScreenName() + " list:" + list.getName()); } @Override public void onUserListUnsubscription(long forUser, User subscriber, User listOwner, UserList list) { System.out.println("onUserListUnsubscribed for_user:" + forUser + " subscriber:@" + subscriber.getScreenName() + " listOwner:@" + listOwner.getScreenName() + " list:" + list.getName()); } @Override public void onUserListCreation(long forUser, User listOwner, UserList list) { System.out.println("onUserListCreated for_user:" + forUser + " listOwner:@" + listOwner.getScreenName() + " list:" + list.getName()); } @Override public void onUserListUpdate(long forUser, User listOwner, UserList list) { System.out.println("onUserListUpdated for_user:" + forUser + " listOwner:@" + listOwner.getScreenName() + " list:" + list.getName()); } @Override public void onUserListDeletion(long forUser, User listOwner, UserList list) { System.out.println("onUserListDestroyed for_user:" + forUser + " listOwner:@" + listOwner.getScreenName() + " list:" + list.getName()); } @Override public void onUserProfileUpdate(long forUser, User updatedUser) { System.out.println("onUserProfileUpdated for_user:" + forUser + " user:@" + updatedUser.getScreenName()); } @Override public void onUserDeletion(long forUser, long deletedUser) { System.out.println("onUserDeletion for_user:" + forUser + " user:@"); } @Override public void onUserSuspension(long forUser, long suspendedUser) { System.out.println("onUserSuspension for_user:" + forUser + " user:@" + suspendedUser); } @Override public void onBlock(long forUser, User source, User blockedUser) { System.out.println("onBlock for_user:" + forUser + " source:@" + source.getScreenName() + " target:@" + blockedUser.getScreenName()); } @Override public void onUnblock(long forUser, User source, User unblockedUser) { System.out.println("onUnblock for_user:" + forUser + " source:@" + source.getScreenName() + " target:@" + unblockedUser.getScreenName()); } @Override public void onDisconnectionNotice(String line) { System.out.println("onDisconnectionNotice:" + line); } @Override public void onException(Exception ex) { ex.printStackTrace(); System.out.println("onException:" + ex.getMessage()); } }; }
/* ******************************************************************* * * Copyright 2015 Intel Corporation. * *-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * *-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-= */ package org.iotivity.base.examples; import android.app.Activity; import android.os.Bundle; import android.os.Message; import android.text.method.ScrollingMovementMethod; import android.util.Log; import android.view.View; import android.widget.Button; import android.widget.TextView; import org.iotivity.base.ModeType; import org.iotivity.base.OcConnectivityType; import org.iotivity.base.OcException; import org.iotivity.base.OcHeaderOption; import org.iotivity.base.OcPlatform; import org.iotivity.base.OcRepresentation; import org.iotivity.base.OcResource; import org.iotivity.base.PlatformConfig; import org.iotivity.base.QualityOfService; import org.iotivity.base.ServiceType; import java.util.EnumSet; import java.util.HashMap; import java.util.LinkedList; import java.util.List; /** * FridgeClient * <p/> * FridgeClient is a sample client app which should be started after the fridgeServer is started. * It creates DeviceResource, DoorResources, LightResource and performs a GET operation on them. */ public class FridgeClient extends Activity implements OcPlatform.OnResourceFoundListener, OcResource.OnGetListener { public static final String DEVICE_URI = "/device"; public static final String LIGHT = "/light"; public static final String LEFT_DOOR = "/door/left"; public static final String RIGHT_DOOR = "/door/right"; public static final String RANDOM_DOOR = "/door/random"; public static final String API_VERSION = "v.1.0"; public static final String CLIENT_TOKEN = "21ae43gf"; public static final int API_VERSION_KEY = 2048; public static final int CLIENT_TOKEN_KEY = 3000; private final List<OcResource> mResourceList = new LinkedList<OcResource>(); private OcResource mFridgeResource; /** * configure OIC platform and call findResource */ private void startFridgeClient() { PlatformConfig cfg = new PlatformConfig( this, // context ServiceType.IN_PROC, ModeType.CLIENT, QualityOfService.LOW); logMessage("Configuring platform"); OcPlatform.Configure(cfg); logMessage("Initiating fridge discovery"); try { OcPlatform.findResource("", OcPlatform.WELL_KNOWN_QUERY + "?rt=" + "intel.fridge", EnumSet.of(OcConnectivityType.CT_DEFAULT), this); } catch (OcException e) { logMessage(" Failed to discover resource"); Log.e(TAG, e.getMessage()); } logMessage("-----------------------------------------------------"); } /** * An event handler to be executed whenever a "findResource" request completes successfully * * @param ocResource found resource */ @Override public synchronized void onResourceFound(OcResource ocResource) { if (null != mFridgeResource || !ocResource.getUri().equals(DEVICE_URI)) { logMessage("Didn't find the correct fridge resource. Exiting"); return; } mFridgeResource = ocResource; logMessage("Discovered a fridge with \nHost: " + mFridgeResource.getHost()); List<String> lightTypes = new LinkedList<>(); lightTypes.add("intel.fridge.light"); List<String> doorTypes = new LinkedList<>(); doorTypes.add("intel.fridge.door"); List<String> resourceInterfaces = new LinkedList<>(); resourceInterfaces.add(OcPlatform.DEFAULT_INTERFACE); logMessage("Creating child resource proxies for the previously known fridge components"); OcResource light = null; OcResource leftDoor = null; OcResource rightDoor = null; OcResource randomDoor = null; try { light = OcPlatform.constructResourceObject(mFridgeResource.getHost(), LIGHT, mFridgeResource.getConnectivityTypeSet(), false, //isObservable lightTypes, resourceInterfaces); mResourceList.add(light); leftDoor = OcPlatform.constructResourceObject(mFridgeResource.getHost(), LEFT_DOOR, mFridgeResource.getConnectivityTypeSet(), false, //isObservable doorTypes, resourceInterfaces); mResourceList.add(leftDoor); rightDoor = OcPlatform.constructResourceObject(mFridgeResource.getHost(), RIGHT_DOOR, mFridgeResource.getConnectivityTypeSet(), false, //isObservable doorTypes, resourceInterfaces); mResourceList.add(rightDoor); randomDoor = OcPlatform.constructResourceObject(mFridgeResource.getHost(), RANDOM_DOOR, mFridgeResource.getConnectivityTypeSet(), false, //isObservable doorTypes, resourceInterfaces); mResourceList.add(randomDoor); } catch (OcException e) { logMessage("Error in constructResourceObject"); Log.e(TAG, e.getMessage()); } List<OcHeaderOption> headerOptions = new LinkedList<>(); OcHeaderOption apiVersion = new OcHeaderOption(API_VERSION_KEY, API_VERSION); OcHeaderOption clientToken = new OcHeaderOption(CLIENT_TOKEN_KEY, CLIENT_TOKEN); headerOptions.add(apiVersion); headerOptions.add(clientToken); try { mFridgeResource.setHeaderOptions(headerOptions); } catch (OcException e) { logMessage("Error in setHeaderOptions"); Log.e(TAG, e.getMessage()); } logMessage("Calling GET api on mFridgeResource and other component resources"); try { mFridgeResource.get(new HashMap<String, String>(), this); if (null != light) light.get(new HashMap<String, String>(), this); if (null != leftDoor) leftDoor.get(new HashMap<String, String>(), this); if (null != rightDoor) rightDoor.get(new HashMap<String, String>(), this); if (null != randomDoor) randomDoor.get(new HashMap<String, String>(), this); } catch (OcException e) { logMessage("Error in GET calls"); Log.e(TAG, e.getMessage()); } } @Override public synchronized void onFindResourceFailed(Throwable throwable, String uri) { logMessage("findResource request has failed"); Log.e(TAG, throwable.toString()); } /** * An event handler to be executed whenever a "get" request completes successfully * * @param headerOptionList list of the header options * @param ocRepresentation representation of a resource */ @Override public synchronized void onGetCompleted(List<OcHeaderOption> headerOptionList, OcRepresentation ocRepresentation) { logMessage("Got a response from " + ocRepresentation.getUri()); } /** * An event handler to be executed whenever a "get" request fails * * @param throwable exception */ @Override public synchronized void onGetFailed(Throwable throwable) { logMessage("GET request has failed"); Log.e(TAG, throwable.toString()); } //****************************************************************************** // End of the OIC specific code //****************************************************************************** @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_fridge_client); mConsoleTextView = (TextView) findViewById(R.id.consoleTextView); mConsoleTextView.setMovementMethod(new ScrollingMovementMethod()); final Button button = (Button) findViewById(R.id.button); if (null == savedInstanceState) { button.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { button.setEnabled(false); new Thread(new Runnable() { public void run() { startFridgeClient(); } }).start(); } }); } else { String consoleOutput = savedInstanceState.getString("consoleOutputString"); mConsoleTextView.setText(consoleOutput); } } private void logMessage(final String text) { runOnUiThread(new Runnable() { public void run() { final Message msg = new Message(); msg.obj = text; mConsoleTextView.append("\n"); mConsoleTextView.append(text); } }); Log.i(TAG, text); } private static String TAG = "FridgeClient: "; private TextView mConsoleTextView; }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.streaming; import java.io.DataOutputStream; import java.io.IOException; import org.apache.carbondata.core.datastore.compression.Compressor; import org.apache.carbondata.core.datastore.compression.CompressorFactory; import org.apache.carbondata.core.datastore.page.statistics.ColumnPageStatsCollector; import org.apache.carbondata.core.datastore.page.statistics.KeyPageStatsCollector; import org.apache.carbondata.core.datastore.page.statistics.PrimitivePageStatsCollector; import org.apache.carbondata.core.datastore.page.statistics.SimpleStatsResult; import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex; import org.apache.carbondata.core.metadata.datatype.DataType; import org.apache.carbondata.core.metadata.datatype.DataTypes; import org.apache.carbondata.core.util.CarbonMetadataUtil; import org.apache.carbondata.core.util.CarbonUtil; import org.apache.carbondata.format.BlockletHeader; import org.apache.carbondata.format.BlockletIndex; import org.apache.carbondata.format.BlockletInfo; import org.apache.carbondata.format.MutationType; import org.apache.carbondata.streaming.segment.StreamSegment; /** * stream blocklet writer */ public class StreamBlockletWriter { private byte[] buffer; private int maxSize; private int maxRowNum; private final int rowSize; private int count = 0; private int rowIndex = -1; private final Compressor compressor; private final int dimCountWithoutComplex; private final int measureCount; private final DataType[] measureDataTypes; // blocklet level stats ColumnPageStatsCollector[] dimStatsCollectors; ColumnPageStatsCollector[] msrStatsCollectors; // blocklet level Min/Max private BlockletMinMaxIndex blockletMinMaxIndex; StreamBlockletWriter(int maxSize, int maxRowNum, int rowSize, int dimCountWithoutComplex, int measureCount, DataType[] measureDataTypes, String compressorName) { buffer = new byte[maxSize]; this.maxSize = maxSize; this.maxRowNum = maxRowNum; this.rowSize = rowSize; this.dimCountWithoutComplex = dimCountWithoutComplex; this.measureCount = measureCount; this.measureDataTypes = measureDataTypes; this.compressor = CompressorFactory.getInstance().getCompressor(compressorName); initializeStatsCollector(); } private void initializeStatsCollector() { // dimension stats collectors // not require to collector stats for complex type // so it only contains dictionary dimensions and no-dictionary dimensions dimStatsCollectors = new ColumnPageStatsCollector[dimCountWithoutComplex]; // measure stats collectors msrStatsCollectors = new ColumnPageStatsCollector[measureCount]; int dimCount = 0; for (; dimCount < dimCountWithoutComplex; dimCount++) { dimStatsCollectors[dimCount] = KeyPageStatsCollector.newInstance(DataTypes.BYTE_ARRAY); } for (int msrCount = 0; msrCount < measureCount; msrCount++) { msrStatsCollectors[msrCount] = PrimitivePageStatsCollector.newInstance(measureDataTypes[msrCount]); } } private void ensureCapacity(int space) { int newCount = space + count; if (newCount > buffer.length) { byte[] newBuffer = new byte[Math.max(newCount, buffer.length + rowSize)]; System.arraycopy(buffer, 0, newBuffer, 0, count); buffer = newBuffer; } } void reset() { count = 0; rowIndex = -1; initializeStatsCollector(); blockletMinMaxIndex = null; } byte[] getBytes() { return buffer; } int getCount() { return count; } int getRowIndex() { return rowIndex; } void nextRow() { rowIndex++; } void skipRow() { maxSize--; maxRowNum--; } boolean isFull() { return rowIndex == maxRowNum || count >= maxSize; } void writeBoolean(boolean val) { ensureCapacity(1); buffer[count] = (byte) (val ? 1 : 0); count += 1; } void writeShort(int val) { ensureCapacity(2); buffer[count + 1] = (byte) (val); buffer[count] = (byte) (val >>> 8); count += 2; } void writeInt(int val) { ensureCapacity(4); buffer[count + 3] = (byte) (val); buffer[count + 2] = (byte) (val >>> 8); buffer[count + 1] = (byte) (val >>> 16); buffer[count] = (byte) (val >>> 24); count += 4; } void writeLong(long val) { ensureCapacity(8); buffer[count + 7] = (byte) (val); buffer[count + 6] = (byte) (val >>> 8); buffer[count + 5] = (byte) (val >>> 16); buffer[count + 4] = (byte) (val >>> 24); buffer[count + 3] = (byte) (val >>> 32); buffer[count + 2] = (byte) (val >>> 40); buffer[count + 1] = (byte) (val >>> 48); buffer[count] = (byte) (val >>> 56); count += 8; } void writeDouble(double val) { writeLong(Double.doubleToLongBits(val)); } void writeBytes(byte[] b) { writeBytes(b, 0, b.length); } void writeBytes(byte[] b, int off, int len) { ensureCapacity(len); System.arraycopy(b, off, buffer, count, len); count += len; } private SimpleStatsResult[] getDimStats() { if (dimStatsCollectors == null) { return new SimpleStatsResult[0]; } SimpleStatsResult[] stats = new SimpleStatsResult[dimStatsCollectors.length]; int dimCount = 0; for (; dimCount < dimStatsCollectors.length; dimCount++) { stats[dimCount] = dimStatsCollectors[dimCount].getPageStats(); } return stats; } private SimpleStatsResult[] getMsrStats() { if (msrStatsCollectors == null) { return new SimpleStatsResult[0]; } SimpleStatsResult[] stats = new SimpleStatsResult[msrStatsCollectors.length]; for (int mrsCount = 0; mrsCount < msrStatsCollectors.length; mrsCount++) { stats[mrsCount] = msrStatsCollectors[mrsCount].getPageStats(); } return stats; } BlockletMinMaxIndex generateBlockletMinMax() { if (blockletMinMaxIndex == null) { blockletMinMaxIndex = StreamSegment.collectMinMaxIndex(getDimStats(), getMsrStats()); } return blockletMinMaxIndex; } void appendBlocklet(DataOutputStream outputStream) throws IOException { outputStream.write(CarbonStreamOutputFormat.CARBON_SYNC_MARKER); BlockletInfo blockletInfo = new BlockletInfo(); blockletInfo.setNum_rows(getRowIndex() + 1); BlockletHeader blockletHeader = new BlockletHeader(); blockletHeader.setBlocklet_length(getCount()); blockletHeader.setMutation(MutationType.INSERT); blockletHeader.setBlocklet_info(blockletInfo); // add blocklet level min/max blockletMinMaxIndex = generateBlockletMinMax(); if (blockletInfo.getNum_rows() > 1) { BlockletIndex blockletIndex = new BlockletIndex(); blockletIndex.setMin_max_index(CarbonMetadataUtil.convertMinMaxIndex(blockletMinMaxIndex)); blockletHeader.setBlocklet_index(blockletIndex); } byte[] headerBytes = CarbonUtil.getByteArray(blockletHeader); outputStream.writeInt(headerBytes.length); outputStream.write(headerBytes); byte[] compressed = compressor.compressByte(getBytes(), getCount()); outputStream.writeInt(compressed.length); outputStream.write(compressed); } void close() { } }
package org.apache.maven.artifact.versioning; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import java.util.StringTokenizer; import java.util.regex.Pattern; import java.util.NoSuchElementException; /** * Default implementation of artifact versioning. * * @author <a href="mailto:brett@apache.org">Brett Porter</a> */ public class DefaultArtifactVersion implements ArtifactVersion { private Integer majorVersion; private Integer minorVersion; private Integer incrementalVersion; private Integer buildNumber; private String qualifier; private ComparableVersion comparable; public DefaultArtifactVersion( String version ) { parseVersion( version ); } @Override public int hashCode() { return 11 + comparable.hashCode(); } @Override public boolean equals( Object other ) { if ( this == other ) { return true; } if ( !( other instanceof ArtifactVersion ) ) { return false; } return compareTo( (ArtifactVersion) other ) == 0; } public int compareTo( ArtifactVersion otherVersion ) { if ( otherVersion instanceof DefaultArtifactVersion ) { return this.comparable.compareTo( ( (DefaultArtifactVersion) otherVersion ).comparable ); } else { return compareTo( new DefaultArtifactVersion( otherVersion.toString() ) ); } } public int getMajorVersion() { return majorVersion != null ? majorVersion : 0; } public int getMinorVersion() { return minorVersion != null ? minorVersion : 0; } public int getIncrementalVersion() { return incrementalVersion != null ? incrementalVersion : 0; } public int getBuildNumber() { return buildNumber != null ? buildNumber : 0; } public String getQualifier() { return qualifier; } public final void parseVersion( String version ) { comparable = new ComparableVersion( version ); int index = version.indexOf( '-' ); String part1; String part2 = null; if ( index < 0 ) { part1 = version; } else { part1 = version.substring( 0, index ); part2 = version.substring( index + 1 ); } if ( part2 != null ) { try { if ( ( part2.length() == 1 ) || !part2.startsWith( "0" ) ) { buildNumber = Integer.valueOf( part2 ); } else { qualifier = part2; } } catch ( NumberFormatException e ) { qualifier = part2; } } if ( ( !part1.contains( "." ) ) && !part1.startsWith( "0" ) ) { try { majorVersion = Integer.valueOf( part1 ); } catch ( NumberFormatException e ) { // qualifier is the whole version, including "-" qualifier = version; buildNumber = null; } } else { boolean fallback = false; StringTokenizer tok = new StringTokenizer( part1, "." ); try { majorVersion = getNextIntegerToken( tok ); if ( tok.hasMoreTokens() ) { minorVersion = getNextIntegerToken( tok ); } if ( tok.hasMoreTokens() ) { incrementalVersion = getNextIntegerToken( tok ); } if ( tok.hasMoreTokens() ) { qualifier = tok.nextToken(); fallback = Pattern.compile( "\\d+" ).matcher( qualifier ).matches(); } // string tokenizer won't detect these and ignores them if ( part1.contains( ".." ) || part1.startsWith( "." ) || part1.endsWith( "." ) ) { fallback = true; } } catch ( NumberFormatException e ) { fallback = true; } if ( fallback ) { // qualifier is the whole version, including "-" qualifier = version; majorVersion = null; minorVersion = null; incrementalVersion = null; buildNumber = null; } } } private static Integer getNextIntegerToken( StringTokenizer tok ) { try { String s = tok.nextToken(); if ( ( s.length() > 1 ) && s.startsWith( "0" ) ) { throw new NumberFormatException( "Number part has a leading 0: '" + s + "'" ); } return Integer.valueOf( s ); } catch ( NoSuchElementException e ) { throw new NumberFormatException( "Number is invalid" ); } } @Override public String toString() { return comparable.toString(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.constraint; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.util.Bytes; import org.junit.After; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.categories.Category; /** * Do the complex testing of constraints against a minicluster */ @Category({MiscTests.class, MediumTests.class}) public class TestConstraint { private static final Log LOG = LogFactory .getLog(TestConstraint.class); private static HBaseTestingUtility util; private static final TableName tableName = TableName.valueOf("test"); private static final byte[] dummy = Bytes.toBytes("dummy"); private static final byte[] row1 = Bytes.toBytes("r1"); private static final byte[] test = Bytes.toBytes("test"); @BeforeClass public static void setUpBeforeClass() throws Exception { util = new HBaseTestingUtility(); util.getConfiguration().setBoolean(CoprocessorHost.ABORT_ON_ERROR_KEY, false); util.startMiniCluster(); } /** * Test that we run a passing constraint * @throws Exception */ @SuppressWarnings("unchecked") @Test public void testConstraintPasses() throws Exception { // create the table // it would be nice if this was also a method on the util HTableDescriptor desc = new HTableDescriptor(tableName); for (byte[] family : new byte[][] { dummy, test }) { desc.addFamily(new HColumnDescriptor(family)); } // add a constraint Constraints.add(desc, CheckWasRunConstraint.class); util.getAdmin().createTable(desc); Table table = util.getConnection().getTable(tableName); try { // test that we don't fail on a valid put Put put = new Put(row1); byte[] value = Integer.toString(10).getBytes(); byte[] qualifier = new byte[0]; put.addColumn(dummy, qualifier, value); table.put(put); } finally { table.close(); } assertTrue(CheckWasRunConstraint.wasRun); } /** * Test that constraints will fail properly * @throws Exception */ @SuppressWarnings("unchecked") @Test(timeout = 60000) public void testConstraintFails() throws Exception { // create the table // it would be nice if this was also a method on the util HTableDescriptor desc = new HTableDescriptor(tableName); for (byte[] family : new byte[][] { dummy, test }) { desc.addFamily(new HColumnDescriptor(family)); } // add a constraint that is sure to fail Constraints.add(desc, AllFailConstraint.class); util.getAdmin().createTable(desc); Table table = util.getConnection().getTable(tableName); // test that we do fail on violation Put put = new Put(row1); byte[] qualifier = new byte[0]; put.addColumn(dummy, qualifier, "fail".getBytes()); LOG.warn("Doing put in table"); try { table.put(put); fail("This put should not have suceeded - AllFailConstraint was not run!"); } catch (ConstraintException e) { // expected } table.close(); } /** * Check that if we just disable one constraint, then * @throws Throwable */ @SuppressWarnings("unchecked") @Test public void testDisableConstraint() throws Throwable { // create the table HTableDescriptor desc = new HTableDescriptor(tableName); // add a family to the table for (byte[] family : new byte[][] { dummy, test }) { desc.addFamily(new HColumnDescriptor(family)); } // add a constraint to make sure it others get run Constraints.add(desc, CheckWasRunConstraint.class); // Add Constraint to check Constraints.add(desc, AllFailConstraint.class); // and then disable the failing constraint Constraints.disableConstraint(desc, AllFailConstraint.class); util.getAdmin().createTable(desc); Table table = util.getConnection().getTable(tableName); try { // test that we don't fail because its disabled Put put = new Put(row1); byte[] qualifier = new byte[0]; put.addColumn(dummy, qualifier, "pass".getBytes()); table.put(put); } finally { table.close(); } assertTrue(CheckWasRunConstraint.wasRun); } /** * Test that if we disable all constraints, then nothing gets run * @throws Throwable */ @SuppressWarnings("unchecked") @Test public void testDisableConstraints() throws Throwable { // create the table HTableDescriptor desc = new HTableDescriptor(tableName); // add a family to the table for (byte[] family : new byte[][] { dummy, test }) { desc.addFamily(new HColumnDescriptor(family)); } // add a constraint to check to see if is run Constraints.add(desc, CheckWasRunConstraint.class); // then disable all the constraints Constraints.disable(desc); util.getAdmin().createTable(desc); Table table = util.getConnection().getTable(tableName); try { // test that we do fail on violation Put put = new Put(row1); byte[] qualifier = new byte[0]; put.addColumn(dummy, qualifier, "pass".getBytes()); LOG.warn("Doing put in table"); table.put(put); } finally { table.close(); } assertFalse(CheckWasRunConstraint.wasRun); } /** * Check to make sure a constraint is unloaded when it fails * @throws Exception */ @Test public void testIsUnloaded() throws Exception { // create the table HTableDescriptor desc = new HTableDescriptor(tableName); // add a family to the table for (byte[] family : new byte[][] { dummy, test }) { desc.addFamily(new HColumnDescriptor(family)); } // make sure that constraints are unloaded Constraints.add(desc, RuntimeFailConstraint.class); // add a constraint to check to see if is run Constraints.add(desc, CheckWasRunConstraint.class); CheckWasRunConstraint.wasRun = false; util.getAdmin().createTable(desc); Table table = util.getConnection().getTable(tableName); // test that we do fail on violation Put put = new Put(row1); byte[] qualifier = new byte[0]; put.addColumn(dummy, qualifier, "pass".getBytes()); try{ table.put(put); fail("RuntimeFailConstraint wasn't triggered - this put shouldn't work!"); } catch (Exception e) {// NOOP } // try the put again, this time constraints are not used, so it works table.put(put); // and we make sure that constraints were not run... assertFalse(CheckWasRunConstraint.wasRun); table.close(); } @After public void cleanup() throws Exception { // cleanup CheckWasRunConstraint.wasRun = false; util.getAdmin().disableTable(tableName); util.getAdmin().deleteTable(tableName); } @AfterClass public static void tearDownAfterClass() throws Exception { util.shutdownMiniCluster(); } /** * Constraint to check that it was actually run (or not) */ public static class CheckWasRunConstraint extends BaseConstraint { public static boolean wasRun = false; @Override public void check(Put p) { wasRun = true; } } }
package water.util; import water.H2O; import water.Iced; import water.MRTask; import java.util.ArrayList; import java.util.Map; import java.util.Map.Entry; import java.util.TreeMap; public class JStackCollectorTask extends MRTask<JStackCollectorTask> { JStackCollectorTask() { super(H2O.MIN_HI_PRIORITY); } public static class DStackTrace extends Iced { public final String _node; // Node name public final long _time; // Unix epoch time public final String[] _thread_traces; // One per thread DStackTrace( String[] traces ) { _node = H2O.getIpPortString(); _time = System.currentTimeMillis(); _thread_traces = traces; } } public DStackTrace _traces[]; // One per Node @Override public void reduce(JStackCollectorTask that) { for( int i=0; i<_traces.length; ++i ) if( _traces[i] == null ) _traces[i] = that._traces[i]; } private static class ThreadInfo { int _parked; int _active; int _blocked; int _unknown; public ThreadInfo add(ThreadInfo ti) { _parked += ti._parked; _active += ti._active; _blocked += ti._blocked; _unknown += ti._unknown; return this; } public double [] toDoubleArray(){ return new double[]{_active + _unknown, _blocked, _parked, _active + _unknown + _blocked + _parked}; } public boolean hasAny(){return _parked + _active + _blocked + _unknown > 0;} } enum ThreadType {HTTP_REQUEST, FJ, OTHER, TCP, JETTY, HADOOP} private static class ThreadKey implements Comparable<ThreadKey> { ThreadType _type; @Override public int compareTo(ThreadKey o) { return _type.ordinal() - o._type.ordinal(); } @Override public String toString() {return _type.toString();} } // bruteforce search for H2O Servlet, don't call until other obvious cases were filtered out private int isH2OHTTPRequestThread(StackTraceElement [] elms){ for(int i = 0; i < elms.length; ++i) if(elms[i].getClassName().equals("....JettyHTTPD$H2oDefaultServlet")) //TODO FIXME! No such class(H2oDefaultServlet) exists there now! Use class comparison if another one took the role. return i; return elms.length; } @Override public void setupLocal() { _traces = new DStackTrace[H2O.CLOUD.size()]; if( H2O.SELF.isClient() ) return; // Clients are not in the cloud, and do not get stack traces Map<Thread, StackTraceElement[]> allStackTraces = Thread.getAllStackTraces(); // Known to be interesting ArrayList<String> http_traces = new ArrayList<>(); http_traces.add("HttpReq traces"); ArrayList<String> fj_traces = new ArrayList<>(); fj_traces.add("FJ traces"); // unknown - possibly interesting ArrayList<String> other_traces = new ArrayList<>(); other_traces.add("'other' traces"); // Most likely uninteresting ArrayList<String> tcp_traces = new ArrayList<>(); tcp_traces.add("TCP traces"); ArrayList<String> system_traces = new ArrayList<>(); system_traces.add("system traces"); ArrayList<String> jetty_traces = new ArrayList<>(); jetty_traces.add("Jetty traces"); ArrayList<String> h2o_sys_traces = new ArrayList<>(); h2o_sys_traces.add("H2O System traces"); Map<Integer,ThreadInfo> fjThreadSummary = new TreeMap<>(); ThreadInfo threadSum = new ThreadInfo(); ThreadInfo httpReqs = new ThreadInfo(); ThreadInfo tcpThreads = new ThreadInfo(); ThreadInfo otherThreads = new ThreadInfo(); ThreadInfo jettythreads = new ThreadInfo(); ThreadInfo h2oSysThreads = new ThreadInfo(); ThreadInfo systemThreads = new ThreadInfo(); for( Entry<Thread,StackTraceElement[]> el : allStackTraces.entrySet() ) { StackTraceElement [] elms = el.getValue(); Thread t = el.getKey(); int idx = elms.length; ArrayList<String> trace = null; ThreadInfo tinfo = null; if(elms.length == 0) continue; if(t.getName().startsWith("FJ-") && elms[elms.length-1].getClassName().contains("ForkJoinWorkerThread")) { // H2O specific FJ Thread trace = fj_traces; Integer fjq = Integer.parseInt(t.getName().substring(3, t.getName().indexOf('-', 3))); if (!fjThreadSummary.containsKey(fjq)) fjThreadSummary.put(fjq, new ThreadInfo()); tinfo = fjThreadSummary.get(fjq); } else if(elms[elms.length-1].getClassName().equals("water.TCPReceiverThread$TCPReaderThread")) { if (elms[elms.length - 2].getClassName().equals("water.AutoBuffer") && elms[elms.length - 2].getMethodName().equals("<init>")) { tcpThreads._parked++; continue; } trace = tcp_traces; tinfo = tcpThreads; } else if(elms[elms.length-1].getClassName().equals("water.MultiReceiverThread") || elms[elms.length-1].getClassName().equals("water.TCPReceiverThread") || elms[elms.length-1].getClassName().equals("water.HeartBeatThread")){ trace = h2o_sys_traces; tinfo = h2oSysThreads; } else if(elms.length > 1 && elms[elms.length-2].getClassName().startsWith("java.util.concurrent.ThreadPoolExecutor") || elms[elms.length-1].getClassName().startsWith("java.lang.ref.Finalizer") || elms[elms.length-1].getClassName().startsWith("java.lang.ref.Reference")) { trace = system_traces; tinfo = systemThreads; }else if((idx = isH2OHTTPRequestThread(elms)) < elms.length) { // h2o HTTP request trace = http_traces; tinfo = httpReqs; } else if(elms.length > 1 && elms[elms.length-2].getClassName().startsWith("org.eclipse.jetty")){ trace = jetty_traces; tinfo = jettythreads; } else { trace = other_traces; tinfo = otherThreads; } if(elms[0].getClassName().equals("sun.misc.Unsafe") && elms[0].getMethodName().equals("park")) { ++tinfo._parked; // don't include parked stacktraces continue; } if(t.getState().toString().equals("RUNNABLE")) { ++tinfo._active; } else if(t.getState().toString().contains("WAITING")) { ++tinfo._blocked; } else { ++tinfo._unknown; System.out.println("UNKNOWN STATE: " + t.getState()); } SB sb = new SB().p('"').p(t.getName()).p('"'); if (t.isDaemon()) sb.p(" daemon"); sb.p(" prio=").p(t.getPriority()); sb.p(" tid=").p(t.getId()); sb.p(" java.lang.Thread.State: ").p(t.getState().toString()); sb.nl(); for( int j = 0; j < idx; ++j) sb.p("\tat ").p(elms[j].toString()).nl(); trace.add(sb.toString()); } // get the summary of idle threads // String tableHeader, String tableDescription, String[] rowHeaders, String[] colHeaders, String[] colTypes, // String[] colFormats, String colHeaderForRowHeaders, String[][] strCellValues, double[][] dblCellValues ArrayList<String> rowNames = new ArrayList<>(); ArrayList<double[]> cellVals = new ArrayList<>(); if(httpReqs.hasAny()) { rowNames.add("HttpReq"); cellVals.add(httpReqs.toDoubleArray()); } for(Entry<Integer,ThreadInfo> e:fjThreadSummary.entrySet()) { rowNames.add("FJ-" + e.getKey()); ThreadInfo fjt = e.getValue(); threadSum.add(fjt); cellVals.add(fjt.toDoubleArray()); } if(otherThreads.hasAny()) { rowNames.add("other"); cellVals.add(otherThreads.toDoubleArray()); } if(tcpThreads.hasAny()) { rowNames.add("TCP"); cellVals.add(tcpThreads.toDoubleArray()); } if(h2oSysThreads.hasAny()) { rowNames.add("h2osys"); cellVals.add(h2oSysThreads.toDoubleArray()); } if(systemThreads.hasAny()) { rowNames.add("system"); cellVals.add(systemThreads.toDoubleArray()); } if(jettythreads.hasAny()) { rowNames.add("jetty"); cellVals.add(jettythreads.toDoubleArray()); } rowNames.add("TOTAL"); cellVals.add(threadSum.add(httpReqs).add(otherThreads).add(tcpThreads).add(systemThreads).add(jettythreads).toDoubleArray()); TwoDimTable td = new TwoDimTable("Thread Summary", "Summary of running threads", rowNames.toArray(new String[0]), new String[] {"active","blocked","idle","TOTAL"}, new String[]{"int","int","int","int"}, new String[]{"%d","%d","%d","%d"}, "Thread",new String[cellVals.size()][],cellVals.toArray(new double[0][0])); // todo - sort FJ traces? String [] traces = new String[1+ http_traces.size() + fj_traces.size() + other_traces.size() + tcp_traces.size() + h2o_sys_traces.size() + system_traces.size() + jetty_traces.size()]; int ii = 1; for(String t:http_traces) { traces[ii++] = t; Log.info(t); } for(String t:fj_traces) { traces[ii++] = t; Log.info(t); } for(String t:other_traces) { traces[ii++] = t; Log.info(t); } for(String t:tcp_traces) { traces[ii++] = t; Log.info(t); } for(String t:h2o_sys_traces) { traces[ii++] = t; Log.info(t); } for(String t:system_traces) { traces[ii++] = t; Log.info(t); } for(String t:jetty_traces) { traces[ii++] = t; Log.info(t); } traces[0] = td.toString(); Log.info(traces[0]); _traces[H2O.SELF.index()] = new DStackTrace(traces); } }
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * */ package org.apache.cassandra.db.lifecycle; import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.zip.CRC32; import org.apache.cassandra.io.sstable.SSTable; import org.apache.cassandra.io.util.FileUtils; import org.apache.cassandra.utils.FBUtilities; /** * A decoded line in a transaction log file replica. * * @see LogReplica and LogFile. */ final class LogRecord { public enum Type { UNKNOWN, // a record that cannot be parsed ADD, // new files to be retained on commit REMOVE, // old files to be retained on abort COMMIT, // commit flag ABORT; // abort flag public static Type fromPrefix(String prefix) { return valueOf(prefix.toUpperCase()); } public boolean hasFile() { return this == Type.ADD || this == Type.REMOVE; } public boolean matches(LogRecord record) { return this == record.type; } public boolean isFinal() { return this == Type.COMMIT || this == Type.ABORT; } } /** * The status of a record after it has been verified, any parsing errors * are also store here. */ public final static class Status { // if there are any errors, they end up here Optional<String> error = Optional.empty(); // if the record was only partially matched across files this is true boolean partial = false; // if the status of this record on disk is required (e.g. existing files), it is // stored here for caching LogRecord onDiskRecord; void setError(String error) { if (!this.error.isPresent()) this.error = Optional.of(error); } boolean hasError() { return error.isPresent(); } } // the type of record, see Type public final Type type; // for sstable records, the absolute path of the table desc public final Optional<String> absolutePath; // for sstable records, the last update time of all files (may not be available for NEW records) public final long updateTime; // for sstable records, the total number of files (may not be accurate for NEW records) public final int numFiles; // the raw string as written or read from a file public final String raw; // the checksum of this record, written at the end of the record string public final long checksum; // the status of this record, @see Status class public final Status status; // (add|remove|commit|abort):[*,*,*][checksum] static Pattern REGEX = Pattern.compile("^(add|remove|commit|abort):\\[([^,]*),?([^,]*),?([^,]*)\\]\\[(\\d*)\\]$", Pattern.CASE_INSENSITIVE); public static LogRecord make(String line) { try { Matcher matcher = REGEX.matcher(line); if (!matcher.matches()) return new LogRecord(Type.UNKNOWN, null, 0, 0, 0, line) .setError(String.format("Failed to parse [%s]", line)); Type type = Type.fromPrefix(matcher.group(1)); return new LogRecord(type, matcher.group(2), Long.parseLong(matcher.group(3)), Integer.parseInt(matcher.group(4)), Long.parseLong(matcher.group(5)), line); } catch (IllegalArgumentException e) { return new LogRecord(Type.UNKNOWN, null, 0, 0, 0, line) .setError(String.format("Failed to parse line: %s", e.getMessage())); } } public static LogRecord makeCommit(long updateTime) { return new LogRecord(Type.COMMIT, updateTime); } public static LogRecord makeAbort(long updateTime) { return new LogRecord(Type.ABORT, updateTime); } public static LogRecord make(Type type, SSTable table) { String absoluteTablePath = FileUtils.getCanonicalPath(table.descriptor.baseFilename()); return make(type, getExistingFiles(absoluteTablePath), table.getAllFilePaths().size(), absoluteTablePath); } public LogRecord withExistingFiles() { return make(type, getExistingFiles(), 0, absolutePath.get()); } public static LogRecord make(Type type, List<File> files, int minFiles, String absolutePath) { // CASSANDRA-11889: File.lastModified() returns a positive value only if the file exists, therefore // we filter by positive values to only consider the files that still exists right now, in case things // changed on disk since getExistingFiles() was called List<Long> positiveModifiedTimes = files.stream().map(File::lastModified).filter(lm -> lm > 0).collect(Collectors.toList()); long lastModified = positiveModifiedTimes.stream().reduce(0L, Long::max); return new LogRecord(type, absolutePath, lastModified, Math.max(minFiles, positiveModifiedTimes.size())); } private LogRecord(Type type, long updateTime) { this(type, null, updateTime, 0, 0, null); } private LogRecord(Type type, String absolutePath, long updateTime, int numFiles) { this(type, absolutePath, updateTime, numFiles, 0, null); } private LogRecord(Type type, String absolutePath, long updateTime, int numFiles, long checksum, String raw) { assert !type.hasFile() || absolutePath != null : "Expected file path for file records"; this.type = type; this.absolutePath = type.hasFile() ? Optional.of(absolutePath) : Optional.<String>empty(); this.updateTime = type == Type.REMOVE ? updateTime : 0; this.numFiles = type.hasFile() ? numFiles : 0; this.status = new Status(); if (raw == null) { assert checksum == 0; this.checksum = computeChecksum(); this.raw = format(); } else { this.checksum = checksum; this.raw = raw; } } LogRecord setError(String error) { status.setError(error); return this; } String error() { return status.error.orElse(""); } void setPartial() { status.partial = true; } boolean partial() { return status.partial; } boolean isValid() { return !status.hasError() && type != Type.UNKNOWN; } boolean isInvalid() { return !isValid(); } boolean isInvalidOrPartial() { return isInvalid() || partial(); } private String format() { return String.format("%s:[%s,%d,%d][%d]", type.toString(), absolutePath(), updateTime, numFiles, checksum); } public List<File> getExistingFiles() { assert absolutePath.isPresent() : "Expected a path in order to get existing files"; return getExistingFiles(absolutePath.get()); } public static List<File> getExistingFiles(String absoluteFilePath) { Path path = Paths.get(absoluteFilePath); File[] files = path.getParent().toFile().listFiles((dir, name) -> name.startsWith(path.getFileName().toString())); // files may be null if the directory does not exist yet, e.g. when tracking new files return files == null ? Collections.emptyList() : Arrays.asList(files); } public boolean isFinal() { return type.isFinal(); } String fileName() { return absolutePath.isPresent() ? Paths.get(absolutePath.get()).getFileName().toString() : ""; } boolean isInFolder(Path folder) { return absolutePath.isPresent() ? FileUtils.isContained(folder.toFile(), Paths.get(absolutePath.get()).toFile()) : false; } String absolutePath() { return absolutePath.isPresent() ? absolutePath.get() : ""; } @Override public int hashCode() { // see comment in equals return Objects.hash(type, absolutePath, numFiles, updateTime); } @Override public boolean equals(Object obj) { if (!(obj instanceof LogRecord)) return false; final LogRecord other = (LogRecord)obj; // we exclude on purpose checksum, error and full file path // since records must match across log file replicas on different disks return type == other.type && absolutePath.equals(other.absolutePath) && numFiles == other.numFiles && updateTime == other.updateTime; } @Override public String toString() { return raw; } long computeChecksum() { CRC32 crc32 = new CRC32(); crc32.update((absolutePath()).getBytes(FileUtils.CHARSET)); crc32.update(type.toString().getBytes(FileUtils.CHARSET)); FBUtilities.updateChecksumInt(crc32, (int) updateTime); FBUtilities.updateChecksumInt(crc32, (int) (updateTime >>> 32)); FBUtilities.updateChecksumInt(crc32, numFiles); return crc32.getValue() & (Long.MAX_VALUE); } }
/* * Copyright 2013 Cloudera Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kitesdk.morphline.stdlib; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.kitesdk.morphline.shaded.org.apache.hadoop.fs.GlobPattern; import com.google.common.base.Preconditions; /** * A Predicate that uses pattern matching with include/exclude specifications to determine if a * given string matches. * * A string matches the predicate if the string matches at least one include expression, but matches * none of the exclude expressions. An include/exclude expression can be a regex pattern (e.g. * "regex:foo.*") or glob pattern (e.g. "glob:foo*") or literal pattern (e.g. "literal:foo") or "*" * which is equivalent to "glob:*". */ final class PatternNameMatcher { private final Expression[] includes; private final Expression[] excludes; private Set<String> includeLiterals = new HashSet<String>(16, 0.5f); private Set<String> excludeLiterals = new HashSet<String>(16, 0.5f); private final String[] literalsOnly; public PatternNameMatcher(List<String> includeExpressions, List<String> excludeExpressions, int cacheCapacity) { includes = parseExpressions(includeExpressions, includeLiterals, cacheCapacity); excludes = parseExpressions(excludeExpressions, excludeLiterals, cacheCapacity); includeLiterals.removeAll(excludeLiterals); includeLiterals = optimize(includeLiterals); excludeLiterals = optimize(excludeLiterals); if (includes.length == 0 && excludes.length == 0) { literalsOnly = includeLiterals.toArray(new String[includeLiterals.size()]); } else { literalsOnly = null; } } /** Expert mode; For optional performance optimizations */ public String[] getLiteralsOnly() { return literalsOnly; } private Set<String> optimize(Set<String> items) { if (items.size() == 1) { return Collections.singleton(items.iterator().next()); } else { return items; } } private Expression[] parseExpressions(List<String> expressions, Set<String> literals, int cacheCapacity) { List<Expression> parsedExpressions = new ArrayList<Expression>(); for (int i = 0; i < expressions.size(); i++) { Expression expr = parseExpression(expressions.get(i), literals, cacheCapacity); if (expr != null) { parsedExpressions.add(expr); } } return parsedExpressions.toArray(new Expression[parsedExpressions.size()]); } private Expression parseExpression(String expr, Set<String> literals, int cacheCapacity) { if (expr.equals("*")) { expr = "glob:*"; } int i = expr.indexOf(':'); if (i < 0) { throw new IllegalArgumentException("Illegal match expression: " + expr); } String type = expr.substring(0, i); String pattern = expr.substring(i + 1, expr.length()); if (type.equals("literal")) { //return new LiteralExpression(pattern); literals.add(pattern); return null; } else if (type.equals("regex")) { if (pattern.equals(".*")) { return new MatchAllExpression(); // optimization } return new RegexExpression(Pattern.compile(pattern), cacheCapacity); } else if (type.equals("glob")) { if (pattern.equals("*")) { return new MatchAllExpression(); // optimization } return new RegexExpression(GlobPattern.compile(pattern), cacheCapacity); } else { throw new IllegalArgumentException("Illegal match type: " + type); } } public boolean matches(String name) { Preconditions.checkNotNull(name); if (excludeLiterals.size() > 0 && excludeLiterals.contains(name)) { return false; } boolean isIncluded = includeLiterals.size() > 0 && includeLiterals.contains(name); for (int i = 0; !isIncluded && i < includes.length; i++) { isIncluded = includes[i].matches(name); } if (!isIncluded) { return false; } for (Expression exclude : excludes) { if (exclude.matches(name)) { return false; } } return true; } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// private static interface Expression { boolean matches(String str); } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// private static final class MatchAllExpression implements Expression { @Override public boolean matches(String str) { return true; } } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// private static final class RegexExpression implements Expression { private final Matcher regex; private final Map<String, Boolean> cache; public RegexExpression(Pattern pattern, int cacheCapacity) { this.regex = pattern.matcher(""); if (cacheCapacity < 0) { throw new IllegalArgumentException("Cache capacity must not be negative"); } this.cache = cacheCapacity > 0 ? new BoundedLRUHashMap<String, Boolean>(cacheCapacity) : null; } @Override public boolean matches(String str) { if (cache == null) { return regex.reset(str).matches(); } Boolean isMatch = cache.get(str); if (isMatch == null) { isMatch = regex.reset(str).matches(); cache.put(str, isMatch); // cache it for later fast reuse } return isMatch.booleanValue(); } } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// private static final class BoundedLRUHashMap<K,V> extends LinkedHashMap<K,V> { private final int capacity; private BoundedLRUHashMap(int capacity) { super(16, 0.5f, true); this.capacity = capacity; } @Override protected boolean removeEldestEntry(Map.Entry eldest) { return size() > capacity; } } /////////////////////////////////////////////////////////////////////////////// // Nested classes: /////////////////////////////////////////////////////////////////////////////// // private static final class LiteralExpression implements Expression { // // private final String pattern; // // public LiteralExpression(String pattern) { // this.pattern = pattern; // } // // @Override // public boolean matches(String str) { // return pattern.equals(str); // } // // } }
package psidev.psi.mi.jami.tab.io.parser; import psidev.psi.mi.jami.datasource.DefaultFileSourceContext; import psidev.psi.mi.jami.datasource.FileSourceContext; import psidev.psi.mi.jami.factory.InteractorFactory; import psidev.psi.mi.jami.model.*; import psidev.psi.mi.jami.tab.extension.*; import psidev.psi.mi.jami.tab.extension.factory.MitabInteractorFactory; import psidev.psi.mi.jami.tab.listener.MitabParserListener; import psidev.psi.mi.jami.tab.utils.MitabUtils; import psidev.psi.mi.jami.utils.AliasUtils; import psidev.psi.mi.jami.utils.CvTermUtils; import psidev.psi.mi.jami.utils.XrefUtils; import java.io.InputStream; import java.io.Reader; import java.util.Collection; import java.util.Iterator; import java.util.regex.Pattern; /** * Abstract mitab line parser * * @author Marine Dumousseau (marine@ebi.ac.uk) * @version $Id$ * @since <pre>20/06/13</pre> */ public abstract class AbstractInteractionLineParser<T extends Interaction, P extends Participant, F extends Feature> extends MitabLineParser<T,P,F> { private MitabParserListener listener; private InteractorFactory interactorFactory; private boolean hasFinished = false; private StringBuilder builder = new StringBuilder(82); private static final String INTERPRO_PATTERN = "^IPR\\d{6}$"; /** * <p>Constructor for AbstractInteractionLineParser.</p> * * @param stream a {@link java.io.InputStream} object. */ public AbstractInteractionLineParser(InputStream stream) { super(stream); } /** * <p>Constructor for AbstractInteractionLineParser.</p> * * @param stream a {@link java.io.InputStream} object. * @param encoding a {@link java.lang.String} object. */ public AbstractInteractionLineParser(InputStream stream, String encoding) { super(stream, encoding); } /** * <p>Constructor for AbstractInteractionLineParser.</p> * * @param stream a {@link java.io.Reader} object. */ public AbstractInteractionLineParser(Reader stream) { super(stream); } /** * <p>Constructor for AbstractInteractionLineParser.</p> * * @param tm a {@link psidev.psi.mi.jami.tab.io.parser.MitabLineParserTokenManager} object. */ public AbstractInteractionLineParser(MitabLineParserTokenManager tm) { super(tm); } /** {@inheritDoc} */ @Override public MitabParserListener getParserListener() { return listener; } /** {@inheritDoc} */ @Override public void setParserListener(MitabParserListener listener) { this.listener = listener; } /** * <p>Getter for the field <code>interactorFactory</code>.</p> * * @return a {@link psidev.psi.mi.jami.factory.InteractorFactory} object. */ public InteractorFactory getInteractorFactory() { if (interactorFactory == null){ interactorFactory = new MitabInteractorFactory(); } return interactorFactory; } /** * <p>Setter for the field <code>interactorFactory</code>.</p> * * @param interactorFactory a {@link psidev.psi.mi.jami.factory.InteractorFactory} object. */ public void setInteractorFactory(InteractorFactory interactorFactory) { this.interactorFactory = interactorFactory; } @Override void fireOnInvalidSyntax(int numberLine, int numberColumn, int mitabColumn, Exception e) { if (this.listener != null){ this.listener.onInvalidSyntax(new DefaultFileSourceContext(new MitabSourceLocator(numberLine, numberColumn, mitabColumn)), e); } } @Override void reachEndOfFile() { this.hasFinished = true; } /** {@inheritDoc} */ @Override public void ReInit(InputStream stream) { hasFinished = false; super.ReInit(stream); } /** {@inheritDoc} */ @Override public void ReInit(InputStream stream, String encoding) { hasFinished = false; super.ReInit(stream, encoding); } /** {@inheritDoc} */ @Override public void ReInit(Reader stream) { hasFinished = false; super.ReInit(stream); } /** {@inheritDoc} */ @Override public void ReInit(MitabLineParserTokenManager tm) { hasFinished = false; super.ReInit(tm); } /** * <p>hasFinished.</p> * * @return a boolean. */ public boolean hasFinished() { return hasFinished; } @Override StringBuilder resetStringBuilder() { builder.setLength(0); return builder; } /** * <p>initialiseInteractionIdentifiers.</p> * * @param interactionIds a {@link java.util.Collection} object. * @param interaction a T object. */ protected void initialiseInteractionIdentifiers(Collection<MitabXref> interactionIds, T interaction){ Iterator<MitabXref> refsIterator = interactionIds.iterator(); while (refsIterator.hasNext()){ MitabXref ref = refsIterator.next(); if (XrefUtils.isXrefFromDatabase(ref, Xref.IMEX_MI, Xref.IMEX) && XrefUtils.doesXrefHaveQualifier(ref, Xref.IMEX_PRIMARY_MI, Xref.IMEX_PRIMARY)){ interaction.getXrefs().add(ref); } else if (XrefUtils.isXrefFromDatabase(ref, Checksum.RIGID_MI, Checksum.RIGID) || XrefUtils.isXrefFromDatabase(ref, null, Checksum.IRIGID)){ createChecksumFromId(interaction, ref); } else{ interaction.getIdentifiers().add(ref); } } } /** * <p>createInteractorFrom.</p> * * @param uniqueId a {@link java.util.Collection} object. * @param altid a {@link java.util.Collection} object. * @param aliases a {@link java.util.Collection} object. * @param taxid a {@link java.util.Collection} object. * @param type a {@link java.util.Collection} object. * @param xref a {@link java.util.Collection} object. * @param checksum a {@link java.util.Collection} object. * @param line a int. * @param column a int. * @param mitabColumn a int. * @return a {@link psidev.psi.mi.jami.model.Interactor} object. */ protected Interactor createInteractorFrom(Collection<MitabXref> uniqueId, Collection<MitabXref> altid, Collection<MitabAlias> aliases, Collection<MitabOrganism> taxid, Collection<MitabCvTerm> type, Collection<MitabXref> xref, Collection<MitabChecksum> checksum, int line, int column, int mitabColumn){ boolean hasId = !uniqueId.isEmpty() || !altid.isEmpty(); boolean hasAlias = !aliases.isEmpty(); boolean hasOtherFields = !taxid.isEmpty() || !checksum.isEmpty() || !type.isEmpty() || !xref.isEmpty(); Interactor interactor = null; String shortName; String fullName = null; // find shortName first // the interactor is empty if (!hasId && !hasAlias && !hasOtherFields){ return null; } // the interactor name will be unknown but needs to be created else if (!hasId && !hasAlias){ if (this.listener != null){ listener.onMissingInteractorIdentifierColumns(line, column, mitabColumn); } shortName = MitabUtils.UNKNOWN_NAME; } else{ // first retrieve what will be the name of the interactor String [] names = findInteractorShortNameAndFullNameFrom(uniqueId, altid, aliases, line, column, mitabColumn); if (names.length == 1){ shortName = names[0]; } else if (names.length == 2){ shortName = names[0]; fullName = names[1]; } else{ shortName = MitabUtils.UNKNOWN_NAME; } } // fire event if several uniqueIds if (uniqueId.size() > 1 && listener != null){ listener.onSeveralUniqueIdentifiers(uniqueId); } // find interactor type if (getInteractorFactory() instanceof MitabInteractorFactory){ interactor = ((MitabInteractorFactory)getInteractorFactory()).createInteractorFromInteractorTypes(type, shortName); } else if (!type.isEmpty()){ interactor = getInteractorFactory().createInteractorFromInteractorType(type.iterator().next(), shortName); } // we don't have an interactor type, use identifiers if (interactor == null && !uniqueId.isEmpty()){ interactor = getInteractorFactory().createInteractorFromIdentityXrefs(uniqueId, shortName); if (interactor == null && !altid.isEmpty()){ interactor = getInteractorFactory().createInteractorFromIdentityXrefs(altid, shortName); // we still don't know which interactor it is if (interactor == null){ interactor = getInteractorFactory().createInteractor(shortName, null); } } // we still don't know which interactor it is else if (interactor == null){ interactor = getInteractorFactory().createInteractor(shortName, null); } } else if (interactor == null && !altid.isEmpty()){ interactor = getInteractorFactory().createInteractorFromIdentityXrefs(altid, shortName); // we still don't know which interactor it is if (interactor == null){ interactor = getInteractorFactory().createInteractor(shortName, null); } } // we don't have an interactor type, and we don't have identifiers, create an unknown participant else if (interactor == null){ interactor = getInteractorFactory().createInteractor(shortName, null); } // set fullName interactor.setFullName(fullName); if (hasId){ // add unique ids first interactor.getIdentifiers().addAll(uniqueId); // add alternative identifiers fillInteractorWithAlternativeIdentifiers(altid, interactor); } if (hasAlias){ fillInteractorWithAliases(aliases, interactor); } // add checksum interactor.getChecksums().addAll(checksum); // if we have an interactor set, we can retrieve the components from the xrefs if (interactor instanceof InteractorPool){ processInteractorPool(xref, (InteractorPool) interactor); } // add all xrefs else{ interactor.getXrefs().addAll(xref); } // set organism initialiseOrganism(taxid, interactor); // if several types fire event if (type.size() > 1 && listener != null){ listener.onSeveralCvTermsFound(type, type.iterator().next(), type.size() + " interactor types were found and only the first one will be loaded."); } // set source locator ((FileSourceContext)interactor).setSourceLocator(new MitabSourceLocator(line, column, mitabColumn)); if (!hasAlias){ listener.onMissingInteractorName(interactor, (FileSourceContext)interactor); } return interactor; } /** * <p>processInteractorPool.</p> * * @param xref a {@link java.util.Collection} object. * @param interactor a {@link psidev.psi.mi.jami.model.InteractorPool} object. */ protected void processInteractorPool(Collection<MitabXref> xref, InteractorPool interactor) { InteractorPool pool = (InteractorPool)interactor; for (Xref ref : xref){ // we have a component of the interactor pool if (XrefUtils.doesXrefHaveQualifier(ref, Xref.INTERACTOR_SET_QUALIFIER_MI, Xref.INTERACTOR_SET_QUALIFIER)){ Interactor subInteractor = interactorFactory.createInteractorFromDatabase(ref.getDatabase(), ref.getId().toLowerCase()); if (subInteractor != null){ subInteractor.getIdentifiers().add(new MitabXref(ref.getDatabase(), ref.getId(), ref.getVersion(), CvTermUtils.createIdentityQualifier())); ((MitabInteractor)subInteractor).setSourceLocator(((MitabXref)ref).getSourceLocator()); } // create a default interactor else{ subInteractor = interactorFactory.createInteractor(ref.getId().toLowerCase(), CvTermUtils.createUnknownInteractorType()); subInteractor.getIdentifiers().add(new MitabXref(ref.getDatabase(), ref.getId(), ref.getVersion(), CvTermUtils.createIdentityQualifier())); ((MitabInteractor)subInteractor).setSourceLocator(((MitabXref)ref).getSourceLocator()); } // add the component to the interactor pool pool.add(subInteractor); } // we have a simple xref else{ pool.getXrefs().add(ref); } } } /** * <p>findInteractorShortNameAndFullNameFrom.</p> * * @param uniqueId a {@link java.util.Collection} object. * @param altid a {@link java.util.Collection} object. * @param aliases a {@link java.util.Collection} object. * @param line a int. * @param column a int. * @param mitabColumn a int. * @return an array of {@link java.lang.String} objects. */ protected String[] findInteractorShortNameAndFullNameFrom(Collection<MitabXref> uniqueId, Collection<MitabXref> altid, Collection<MitabAlias> aliases, int line, int column, int mitabColumn){ MitabAlias[] names = MitabUtils.findBestShortNameAndFullNameFromAliases(aliases); if (names != null){ MitabAlias shortName = null; MitabAlias fullName = null; if (names.length == 1){ shortName = names[0]; // do not need to keep the shortname as it is loaded as a shortname if (shortName.getType() != null && MitabUtils.DISPLAY_SHORT.equals(shortName.getType().getShortName())){ aliases.remove(shortName); } return new String[]{shortName.getName()}; } else if (names.length == 2){ shortName = names[0]; fullName = names[1]; if (shortName.getType() != null && MitabUtils.DISPLAY_SHORT.equals(shortName.getType().getShortName())){ aliases.remove(shortName); } if (fullName.getType() != null && MitabUtils.DISPLAY_LONG.equals(fullName.getType().getShortName())){ aliases.remove(shortName); } return new String[]{shortName.getName(), fullName.getName()}; } } MitabXref shortNameFromAltid = MitabUtils.findBestShortNameFromAlternativeIdentifiers(altid); if (shortNameFromAltid != null){ return new String[]{shortNameFromAltid.getId()}; } else if (!uniqueId.isEmpty()){ return new String[]{uniqueId.iterator().next().getId()}; } else if (!altid.isEmpty()){ if (this.listener != null){ listener.onEmptyUniqueIdentifiers(line, column, mitabColumn); } return new String[]{altid.iterator().next().getId()}; } else if (this.listener != null){ listener.onEmptyUniqueIdentifiers(line, column, mitabColumn); } return null; } /** * <p>fillInteractorWithAlternativeIdentifiers.</p> * * @param altid a {@link java.util.Collection} object. * @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object. */ protected void fillInteractorWithAlternativeIdentifiers(Collection<MitabXref> altid, Interactor interactor){ Iterator<MitabXref> refsIterator = altid.iterator(); while (refsIterator.hasNext()){ MitabXref ref = refsIterator.next(); // gene name is alias if (XrefUtils.doesXrefHaveQualifier(ref, Alias.GENE_NAME_MI, Alias.GENE_NAME)){ createAliasFromAltId(interactor, ref); } // gene name synonym is alias else if (XrefUtils.doesXrefHaveQualifier(ref, Alias.GENE_NAME_SYNONYM_MI, Alias.GENE_NAME)){ createAliasFromAltId(interactor, ref); } // short label is alias else if (XrefUtils.doesXrefHaveQualifier(ref, null, MitabUtils.SHORTLABEL)){ createAliasFromAltId(interactor, ref); } // display short is alias else if (XrefUtils.doesXrefHaveQualifier(ref, null, MitabUtils.DISPLAY_SHORT)){ createAliasFromAltId(interactor, ref); } // display long is alias else if (XrefUtils.doesXrefHaveQualifier(ref, null, MitabUtils.DISPLAY_LONG)){ createAliasFromAltId(interactor, ref); } // database is rogid so we have a checksum else if (XrefUtils.isXrefFromDatabase(ref, Checksum.ROGID_MI, Checksum.ROGID)){ createChecksumFromAltId(interactor, ref); } // database is irogid so we have a checksum else if (XrefUtils.isXrefFromDatabase(ref, null, Checksum.IROGID)){ createChecksumFromAltId(interactor, ref); } // we have a simple xref else { interactor.getIdentifiers().add(ref); } } } /** * <p>fillInteractorWithAliases.</p> * * @param aliases a {@link java.util.Collection} object. * @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object. */ protected void fillInteractorWithAliases(Collection<MitabAlias> aliases, Interactor interactor){ Iterator<MitabAlias> aliasIterator = aliases.iterator(); while (aliasIterator.hasNext()){ MitabAlias alias = aliasIterator.next(); // we have a smile if (AliasUtils.doesAliasHaveType(alias, Checksum.SMILE_MI, Checksum.SMILE) || AliasUtils.doesAliasHaveType(alias, Checksum.SMILE_MI, Checksum.SMILE_SHORT)){ createChecksumFromAlias(interactor, alias); } // we have inchi key else if (AliasUtils.doesAliasHaveType(alias, Checksum.INCHI_KEY_MI, Checksum.INCHI_KEY)){ createChecksumFromAlias(interactor, alias); } // we have standard inchi else if (AliasUtils.doesAliasHaveType(alias, Checksum.STANDARD_INCHI_KEY_MI, Checksum.STANDARD_INCHI_KEY)){ createChecksumFromAlias(interactor, alias); } // we have inchi else if (AliasUtils.doesAliasHaveType(alias, Checksum.INCHI_MI, Checksum.INCHI) || AliasUtils.doesAliasHaveType(alias, Checksum.INCHI_MI, Checksum.INCHI_SHORT)){ createChecksumFromAlias(interactor, alias); } // we have rogid else if (AliasUtils.doesAliasHaveType(alias, Checksum.ROGID_MI, Checksum.ROGID) || AliasUtils.doesAliasHaveType(alias, null, Checksum.IROGID)){ createChecksumFromAlias(interactor, alias); } // we have a simple alias else { interactor.getAliases().add(alias); } } } /** * <p>initialiseOrganism.</p> * * @param organisms a {@link java.util.Collection} object. * @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object. */ protected void initialiseOrganism(Collection<MitabOrganism> organisms, Interactor interactor){ if (organisms.size() > 1){ Iterator<MitabOrganism> organismsIterator = organisms.iterator(); int taxid=0; String commonName=null; int commonNameLength = 0; int fullNameLength = 0; MitabOrganism currentOrganism=null; boolean hasSeveralOrganisms = false; do { MitabOrganism organism = organismsIterator.next(); if (currentOrganism == null){ currentOrganism = organism; commonName = organism.getCommonName(); commonNameLength = commonName.length(); fullNameLength = commonName.length(); taxid = organism.getTaxId(); } // we have same organism else if (organism.getTaxId() == taxid){ // we have a new common name if (organism.getCommonName() != null && organism.getCommonName().length() < commonNameLength){ if (currentOrganism.getScientificName() == null){ currentOrganism.setScientificName(currentOrganism.getCommonName()); } // we have a synonym for the organism else { currentOrganism.getAliases().add(AliasUtils.createAlias(Alias.SYNONYM, Alias.SYNONYM_MI, currentOrganism.getCommonName())); } currentOrganism.setCommonName(organism.getCommonName()); commonNameLength = organism.getCommonName().length(); } // we have a full name else if (currentOrganism.getScientificName() == null){ currentOrganism.setScientificName(organism.getCommonName()); fullNameLength = organism.getCommonName().length(); } // we have a new fullname else if (organism.getCommonName().length() < fullNameLength) { currentOrganism.getAliases().add(AliasUtils.createAlias(Alias.SYNONYM, Alias.SYNONYM_MI, currentOrganism.getScientificName())); currentOrganism.setScientificName(organism.getCommonName()); fullNameLength = organism.getCommonName().length(); } // we have a synonym for the organism else { currentOrganism.getAliases().add(AliasUtils.createAlias(Alias.SYNONYM, Alias.SYNONYM_MI, organism.getCommonName())); } } else{ hasSeveralOrganisms = true; } } while(organismsIterator.hasNext()); if (listener != null && hasSeveralOrganisms){ listener.onSeveralOrganismFound(organisms); } interactor.setOrganism(currentOrganism); } else if (!organisms.isEmpty()){ interactor.setOrganism(organisms.iterator().next()); } } /** * <p>createChecksumFromId.</p> * * @param interaction a {@link psidev.psi.mi.jami.model.Interaction} object. * @param ref a {@link psidev.psi.mi.jami.tab.extension.MitabXref} object. */ protected void createChecksumFromId(Interaction interaction, MitabXref ref) { // create checksum from xref MitabChecksum checksum = new MitabChecksum(ref.getDatabase(), ref.getId(), ref.getSourceLocator()); interaction.getChecksums().add(checksum); if (listener != null){ listener.onSyntaxWarning(ref, "Found a Checksum in the interaction identifiers column. Will load it as a checksum."); } } /** * <p>createChecksumFromAltId.</p> * * @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object. * @param ref a {@link psidev.psi.mi.jami.tab.extension.MitabXref} object. */ protected void createChecksumFromAltId(Interactor interactor, MitabXref ref) { // create checksum from xref MitabChecksum checksum = new MitabChecksum(ref.getDatabase(), ref.getId(), ref.getSourceLocator()); interactor.getChecksums().add(checksum); if (listener != null){ listener.onSyntaxWarning(ref, "Found a Checksum in the alternative identifiers column. Will load it as a checksum."); } } /** * <p>createAliasFromAltId.</p> * * @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object. * @param ref a {@link psidev.psi.mi.jami.tab.extension.MitabXref} object. */ protected void createAliasFromAltId(Interactor interactor, MitabXref ref) { // create alias from xref MitabAlias alias = new MitabAlias(ref.getDatabase().getShortName(), ref.getQualifier(), ref.getId(), ref.getSourceLocator()); interactor.getAliases().add(alias); if (listener != null){ listener.onSyntaxWarning(ref, "Found an Alias in the alternative identifiers column. Will load it as a checksum."); } } /** * <p>createChecksumFromAlias.</p> * * @param interactor a {@link psidev.psi.mi.jami.model.Interactor} object. * @param alias a {@link psidev.psi.mi.jami.tab.extension.MitabAlias} object. */ protected void createChecksumFromAlias(Interactor interactor, MitabAlias alias) { // create checksum from alias MitabChecksum checksum = new MitabChecksum(alias.getType(), alias.getName(), alias.getSourceLocator()); interactor.getChecksums().add(checksum); if (listener != null){ listener.onSyntaxWarning(alias, "Found a Checksum in the aliases column. Will load it as a checksum."); } } /** * <p>initialiseExpansionMethod.</p> * * @param expansion a {@link java.util.Collection} object. * @param interaction a T object. */ protected void initialiseExpansionMethod(Collection<MitabCvTerm> expansion, T interaction){ if (expansion.size() > 1){ if (getParserListener() != null){ getParserListener().onSeveralCvTermsFound(expansion, expansion.iterator().next(), expansion.size()+" complex expansions found. Only the first one will be loaded."); } interaction.getAnnotations().add(new MitabAnnotation(expansion.iterator().next())); } else if (!expansion.isEmpty()){ interaction.getAnnotations().add(new MitabAnnotation(expansion.iterator().next())); } } /** * <p>createInteraction.</p> * * @return a T object. */ protected abstract T createInteraction(); /** * <p>processTextFor.</p> * * @param feature a {@link psidev.psi.mi.jami.tab.extension.MitabFeature} object. * @param text a {@link java.lang.String} object. */ protected void processTextFor(MitabFeature feature, String text){ if (text != null){ if (Pattern.matches(INTERPRO_PATTERN, text)){ feature.setInterpro(text); } else{ feature.setText(text); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.operators; import org.apache.flink.annotation.Internal; import org.apache.flink.configuration.Configuration; import org.apache.flink.configuration.MetricOptions; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.streaming.api.TimeCharacteristic; import org.apache.flink.streaming.api.functions.source.SourceFunction; import org.apache.flink.streaming.api.watermark.Watermark; import org.apache.flink.streaming.runtime.streamrecord.LatencyMarker; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.streamstatus.StreamStatusMaintainer; import org.apache.flink.streaming.runtime.tasks.OperatorChain; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeCallback; import org.apache.flink.streaming.runtime.tasks.ProcessingTimeService; import java.util.concurrent.ScheduledFuture; /** * {@link StreamOperator} for streaming sources. * * @param <OUT> Type of the output elements * @param <SRC> Type of the source function of this stream source operator */ @Internal public class StreamSource<OUT, SRC extends SourceFunction<OUT>> extends AbstractUdfStreamOperator<OUT, SRC> { private static final long serialVersionUID = 1L; private transient SourceFunction.SourceContext<OUT> ctx; private transient volatile boolean canceledOrStopped = false; private transient volatile boolean hasSentMaxWatermark = false; public StreamSource(SRC sourceFunction) { super(sourceFunction); this.chainingStrategy = ChainingStrategy.HEAD; } public void run( final Object lockingObject, final StreamStatusMaintainer streamStatusMaintainer, final OperatorChain<?, ?> operatorChain) throws Exception { run(lockingObject, streamStatusMaintainer, output, operatorChain); } public void run( final Object lockingObject, final StreamStatusMaintainer streamStatusMaintainer, final Output<StreamRecord<OUT>> collector, final OperatorChain<?, ?> operatorChain) throws Exception { final TimeCharacteristic timeCharacteristic = getOperatorConfig().getTimeCharacteristic(); final Configuration configuration = this.getContainingTask().getEnvironment().getTaskManagerInfo().getConfiguration(); final long latencyTrackingInterval = getExecutionConfig().isLatencyTrackingConfigured() ? getExecutionConfig().getLatencyTrackingInterval() : configuration.getLong(MetricOptions.LATENCY_INTERVAL); LatencyMarksEmitter<OUT> latencyEmitter = null; if (latencyTrackingInterval > 0) { latencyEmitter = new LatencyMarksEmitter<>( getProcessingTimeService(), collector, latencyTrackingInterval, this.getOperatorID(), getRuntimeContext().getIndexOfThisSubtask()); } final long watermarkInterval = getRuntimeContext().getExecutionConfig().getAutoWatermarkInterval(); this.ctx = StreamSourceContexts.getSourceContext( timeCharacteristic, getProcessingTimeService(), lockingObject, streamStatusMaintainer, collector, watermarkInterval, -1); try { userFunction.run(ctx); // if we get here, then the user function either exited after being done (finite source) // or the function was canceled or stopped. For the finite source case, we should emit // a final watermark that indicates that we reached the end of event-time, and end // inputs // of the operator chain if (!isCanceledOrStopped()) { // in theory, the subclasses of StreamSource may implement the BoundedOneInput // interface, // so we still need the following call to end the input synchronized (lockingObject) { operatorChain.setIgnoreEndOfInput(false); operatorChain.endInput(1); } } } finally { if (latencyEmitter != null) { latencyEmitter.close(); } } } public void advanceToEndOfEventTime() { if (!hasSentMaxWatermark) { ctx.emitWatermark(Watermark.MAX_WATERMARK); hasSentMaxWatermark = true; } } @Override public void close() throws Exception { try { super.close(); if (!isCanceledOrStopped() && ctx != null) { advanceToEndOfEventTime(); } } finally { // make sure that the context is closed in any case if (ctx != null) { ctx.close(); } } } public void cancel() { // important: marking the source as stopped has to happen before the function is stopped. // the flag that tracks this status is volatile, so the memory model also guarantees // the happens-before relationship markCanceledOrStopped(); userFunction.cancel(); // the context may not be initialized if the source was never running. if (ctx != null) { ctx.close(); } } /** * Marks this source as canceled or stopped. * * <p>This indicates that any exit of the {@link #run(Object, StreamStatusMaintainer, Output)} * method cannot be interpreted as the result of a finite source. */ protected void markCanceledOrStopped() { this.canceledOrStopped = true; } /** * Checks whether the source has been canceled or stopped. * * @return True, if the source is canceled or stopped, false is not. */ protected boolean isCanceledOrStopped() { return canceledOrStopped; } private static class LatencyMarksEmitter<OUT> { private final ScheduledFuture<?> latencyMarkTimer; public LatencyMarksEmitter( final ProcessingTimeService processingTimeService, final Output<StreamRecord<OUT>> output, long latencyTrackingInterval, final OperatorID operatorId, final int subtaskIndex) { latencyMarkTimer = processingTimeService.scheduleWithFixedDelay( new ProcessingTimeCallback() { @Override public void onProcessingTime(long timestamp) throws Exception { try { // ProcessingTimeService callbacks are executed under the // checkpointing lock output.emitLatencyMarker( new LatencyMarker( processingTimeService .getCurrentProcessingTime(), operatorId, subtaskIndex)); } catch (Throwable t) { // we catch the Throwables here so that we don't trigger the // processing // timer services async exception handler LOG.warn("Error while emitting latency marker.", t); } } }, 0L, latencyTrackingInterval); } public void close() { latencyMarkTimer.cancel(true); } } }
package ca.allanwang.swiperecyclerview.library.wasabeef.animators; /* * Copyright (C) 2015 Wasabeef * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ import android.support.v4.view.ViewCompat; import android.support.v4.view.ViewPropertyAnimatorCompat; import android.support.v4.view.ViewPropertyAnimatorListener; import android.support.v7.widget.RecyclerView.ViewHolder; import android.support.v7.widget.SimpleItemAnimator; import android.view.View; import android.view.animation.Interpolator; import android.view.animation.LinearInterpolator; import java.util.ArrayList; import java.util.List; import ca.allanwang.swiperecyclerview.library.wasabeef.animators.holder.AnimateViewHolder; import ca.allanwang.swiperecyclerview.library.wasabeef.internal.ViewHelper; public abstract class BaseItemAnimator extends SimpleItemAnimator { private static final boolean DEBUG = false; private ArrayList<ViewHolder> mPendingRemovals = new ArrayList<>(); private ArrayList<ViewHolder> mPendingAdditions = new ArrayList<>(); private ArrayList<MoveInfo> mPendingMoves = new ArrayList<>(); private ArrayList<ChangeInfo> mPendingChanges = new ArrayList<>(); private ArrayList<ArrayList<ViewHolder>> mAdditionsList = new ArrayList<>(); private ArrayList<ArrayList<MoveInfo>> mMovesList = new ArrayList<>(); private ArrayList<ArrayList<ChangeInfo>> mChangesList = new ArrayList<>(); protected ArrayList<ViewHolder> mAddAnimations = new ArrayList<>(); private ArrayList<ViewHolder> mMoveAnimations = new ArrayList<>(); protected ArrayList<ViewHolder> mRemoveAnimations = new ArrayList<>(); private ArrayList<ViewHolder> mChangeAnimations = new ArrayList<>(); protected Interpolator mInterpolator = new LinearInterpolator(); private static class MoveInfo { public ViewHolder holder; public int fromX, fromY, toX, toY; private MoveInfo(ViewHolder holder, int fromX, int fromY, int toX, int toY) { this.holder = holder; this.fromX = fromX; this.fromY = fromY; this.toX = toX; this.toY = toY; } } private static class ChangeInfo { public ViewHolder oldHolder, newHolder; public int fromX, fromY, toX, toY; private ChangeInfo(ViewHolder oldHolder, ViewHolder newHolder) { this.oldHolder = oldHolder; this.newHolder = newHolder; } private ChangeInfo(ViewHolder oldHolder, ViewHolder newHolder, int fromX, int fromY, int toX, int toY) { this(oldHolder, newHolder); this.fromX = fromX; this.fromY = fromY; this.toX = toX; this.toY = toY; } @Override public String toString() { return "ChangeInfo{" + "oldHolder=" + oldHolder + ", newHolder=" + newHolder + ", fromX=" + fromX + ", fromY=" + fromY + ", toX=" + toX + ", toY=" + toY + '}'; } } public BaseItemAnimator() { super(); setSupportsChangeAnimations(false); } public void setInterpolator(Interpolator mInterpolator) { this.mInterpolator = mInterpolator; } @Override public void runPendingAnimations() { boolean removalsPending = !mPendingRemovals.isEmpty(); boolean movesPending = !mPendingMoves.isEmpty(); boolean changesPending = !mPendingChanges.isEmpty(); boolean additionsPending = !mPendingAdditions.isEmpty(); if (!removalsPending && !movesPending && !additionsPending && !changesPending) { // nothing to animate return; } // First, remove stuff for (ViewHolder holder : mPendingRemovals) { doAnimateRemove(holder); } mPendingRemovals.clear(); // Next, move stuff if (movesPending) { final ArrayList<MoveInfo> moves = new ArrayList<MoveInfo>(); moves.addAll(mPendingMoves); mMovesList.add(moves); mPendingMoves.clear(); Runnable mover = new Runnable() { @Override public void run() { for (MoveInfo moveInfo : moves) { animateMoveImpl(moveInfo.holder, moveInfo.fromX, moveInfo.fromY, moveInfo.toX, moveInfo.toY); } moves.clear(); mMovesList.remove(moves); } }; if (removalsPending) { View view = moves.get(0).holder.itemView; ViewCompat.postOnAnimationDelayed(view, mover, getRemoveDuration()); } else { mover.run(); } } // Next, change stuff, to run in parallel with move animations if (changesPending) { final ArrayList<ChangeInfo> changes = new ArrayList<ChangeInfo>(); changes.addAll(mPendingChanges); mChangesList.add(changes); mPendingChanges.clear(); Runnable changer = new Runnable() { @Override public void run() { for (ChangeInfo change : changes) { animateChangeImpl(change); } changes.clear(); mChangesList.remove(changes); } }; if (removalsPending) { ViewHolder holder = changes.get(0).oldHolder; ViewCompat.postOnAnimationDelayed(holder.itemView, changer, getRemoveDuration()); } else { changer.run(); } } // Next, add stuff if (additionsPending) { final ArrayList<ViewHolder> additions = new ArrayList<ViewHolder>(); additions.addAll(mPendingAdditions); mAdditionsList.add(additions); mPendingAdditions.clear(); Runnable adder = new Runnable() { public void run() { for (ViewHolder holder : additions) { doAnimateAdd(holder); } additions.clear(); mAdditionsList.remove(additions); } }; if (removalsPending || movesPending || changesPending) { long removeDuration = removalsPending ? getRemoveDuration() : 0; long moveDuration = movesPending ? getMoveDuration() : 0; long changeDuration = changesPending ? getChangeDuration() : 0; long totalDelay = removeDuration + Math.max(moveDuration, changeDuration); View view = additions.get(0).itemView; ViewCompat.postOnAnimationDelayed(view, adder, totalDelay); } else { adder.run(); } } } protected void preAnimateRemoveImpl(final ViewHolder holder) { } protected void preAnimateAddImpl(final ViewHolder holder) { } protected abstract void animateRemoveImpl(final ViewHolder holder); protected abstract void animateAddImpl(final ViewHolder holder); private void preAnimateRemove(final ViewHolder holder) { ViewHelper.clear(holder.itemView); if (holder instanceof AnimateViewHolder) { ((AnimateViewHolder) holder).preAnimateRemoveImpl(holder); } else { preAnimateRemoveImpl(holder); } } private void preAnimateAdd(final ViewHolder holder) { ViewHelper.clear(holder.itemView); if (holder instanceof AnimateViewHolder) { ((AnimateViewHolder) holder).preAnimateAddImpl(holder); } else { preAnimateAddImpl(holder); } } private void doAnimateRemove(final ViewHolder holder) { if (holder instanceof AnimateViewHolder) { ((AnimateViewHolder) holder).animateRemoveImpl(holder, new DefaultRemoveVpaListener(holder)); } else { animateRemoveImpl(holder); } mRemoveAnimations.add(holder); } private void doAnimateAdd(final ViewHolder holder) { if (holder instanceof AnimateViewHolder) { ((AnimateViewHolder) holder).animateAddImpl(holder, new DefaultAddVpaListener(holder)); } else { animateAddImpl(holder); } mAddAnimations.add(holder); } @Override public boolean animateRemove(final ViewHolder holder) { endAnimation(holder); preAnimateRemove(holder); mPendingRemovals.add(holder); return true; } protected long getRemoveDelay(final ViewHolder holder) { return Math.abs(holder.getOldPosition() * getRemoveDuration() / 4); } @Override public boolean animateAdd(final ViewHolder holder) { endAnimation(holder); preAnimateAdd(holder); mPendingAdditions.add(holder); return true; } protected long getAddDelay(final ViewHolder holder) { return Math.abs(holder.getAdapterPosition() * getAddDuration() / 4); } @Override public boolean animateMove(final ViewHolder holder, int fromX, int fromY, int toX, int toY) { final View view = holder.itemView; fromX += ViewCompat.getTranslationX(holder.itemView); fromY += ViewCompat.getTranslationY(holder.itemView); endAnimation(holder); int deltaX = toX - fromX; int deltaY = toY - fromY; if (deltaX == 0 && deltaY == 0) { dispatchMoveFinished(holder); return false; } if (deltaX != 0) { ViewCompat.setTranslationX(view, -deltaX); } if (deltaY != 0) { ViewCompat.setTranslationY(view, -deltaY); } mPendingMoves.add(new MoveInfo(holder, fromX, fromY, toX, toY)); return true; } private void animateMoveImpl(final ViewHolder holder, int fromX, int fromY, int toX, int toY) { final View view = holder.itemView; final int deltaX = toX - fromX; final int deltaY = toY - fromY; if (deltaX != 0) { ViewCompat.animate(view).translationX(0); } if (deltaY != 0) { ViewCompat.animate(view).translationY(0); } // TODO: make EndActions end listeners instead, since end actions aren't called when // vpas are canceled (and can't end them. why?) // need listener functionality in VPACompat for this. Ick. mMoveAnimations.add(holder); final ViewPropertyAnimatorCompat animation = ViewCompat.animate(view); animation.setDuration(getMoveDuration()).setListener(new VpaListenerAdapter() { @Override public void onAnimationStart(View view) { dispatchMoveStarting(holder); } @Override public void onAnimationCancel(View view) { if (deltaX != 0) { ViewCompat.setTranslationX(view, 0); } if (deltaY != 0) { ViewCompat.setTranslationY(view, 0); } } @Override public void onAnimationEnd(View view) { animation.setListener(null); dispatchMoveFinished(holder); mMoveAnimations.remove(holder); dispatchFinishedWhenDone(); } }).start(); } @Override public boolean animateChange(ViewHolder oldHolder, ViewHolder newHolder, int fromX, int fromY, int toX, int toY) { final float prevTranslationX = ViewCompat.getTranslationX(oldHolder.itemView); final float prevTranslationY = ViewCompat.getTranslationY(oldHolder.itemView); final float prevAlpha = ViewCompat.getAlpha(oldHolder.itemView); endAnimation(oldHolder); int deltaX = (int) (toX - fromX - prevTranslationX); int deltaY = (int) (toY - fromY - prevTranslationY); // recover prev translation state after ending animation ViewCompat.setTranslationX(oldHolder.itemView, prevTranslationX); ViewCompat.setTranslationY(oldHolder.itemView, prevTranslationY); ViewCompat.setAlpha(oldHolder.itemView, prevAlpha); if (newHolder != null && newHolder.itemView != null) { // carry over translation values endAnimation(newHolder); ViewCompat.setTranslationX(newHolder.itemView, -deltaX); ViewCompat.setTranslationY(newHolder.itemView, -deltaY); ViewCompat.setAlpha(newHolder.itemView, 0); } mPendingChanges.add(new ChangeInfo(oldHolder, newHolder, fromX, fromY, toX, toY)); return true; } private void animateChangeImpl(final ChangeInfo changeInfo) { final ViewHolder holder = changeInfo.oldHolder; final View view = holder == null ? null : holder.itemView; final ViewHolder newHolder = changeInfo.newHolder; final View newView = newHolder != null ? newHolder.itemView : null; if (view != null) { mChangeAnimations.add(changeInfo.oldHolder); final ViewPropertyAnimatorCompat oldViewAnim = ViewCompat.animate(view).setDuration(getChangeDuration()); oldViewAnim.translationX(changeInfo.toX - changeInfo.fromX); oldViewAnim.translationY(changeInfo.toY - changeInfo.fromY); oldViewAnim.alpha(0).setListener(new VpaListenerAdapter() { @Override public void onAnimationStart(View view) { dispatchChangeStarting(changeInfo.oldHolder, true); } @Override public void onAnimationEnd(View view) { oldViewAnim.setListener(null); ViewCompat.setAlpha(view, 1); ViewCompat.setTranslationX(view, 0); ViewCompat.setTranslationY(view, 0); dispatchChangeFinished(changeInfo.oldHolder, true); mChangeAnimations.remove(changeInfo.oldHolder); dispatchFinishedWhenDone(); } }).start(); } if (newView != null) { mChangeAnimations.add(changeInfo.newHolder); final ViewPropertyAnimatorCompat newViewAnimation = ViewCompat.animate(newView); newViewAnimation.translationX(0).translationY(0).setDuration(getChangeDuration()). alpha(1).setListener(new VpaListenerAdapter() { @Override public void onAnimationStart(View view) { dispatchChangeStarting(changeInfo.newHolder, false); } @Override public void onAnimationEnd(View view) { newViewAnimation.setListener(null); ViewCompat.setAlpha(newView, 1); ViewCompat.setTranslationX(newView, 0); ViewCompat.setTranslationY(newView, 0); dispatchChangeFinished(changeInfo.newHolder, false); mChangeAnimations.remove(changeInfo.newHolder); dispatchFinishedWhenDone(); } }).start(); } } private void endChangeAnimation(List<ChangeInfo> infoList, ViewHolder item) { for (int i = infoList.size() - 1; i >= 0; i--) { ChangeInfo changeInfo = infoList.get(i); if (endChangeAnimationIfNecessary(changeInfo, item)) { if (changeInfo.oldHolder == null && changeInfo.newHolder == null) { infoList.remove(changeInfo); } } } } private void endChangeAnimationIfNecessary(ChangeInfo changeInfo) { if (changeInfo.oldHolder != null) { endChangeAnimationIfNecessary(changeInfo, changeInfo.oldHolder); } if (changeInfo.newHolder != null) { endChangeAnimationIfNecessary(changeInfo, changeInfo.newHolder); } } private boolean endChangeAnimationIfNecessary(ChangeInfo changeInfo, ViewHolder item) { boolean oldItem = false; if (changeInfo.newHolder == item) { changeInfo.newHolder = null; } else if (changeInfo.oldHolder == item) { changeInfo.oldHolder = null; oldItem = true; } else { return false; } ViewCompat.setAlpha(item.itemView, 1); ViewCompat.setTranslationX(item.itemView, 0); ViewCompat.setTranslationY(item.itemView, 0); dispatchChangeFinished(item, oldItem); return true; } @Override public void endAnimation(ViewHolder item) { final View view = item.itemView; // this will trigger end callback which should set properties to their target values. ViewCompat.animate(view).cancel(); // TODO if some other animations are chained to end, how do we cancel them as well? for (int i = mPendingMoves.size() - 1; i >= 0; i--) { MoveInfo moveInfo = mPendingMoves.get(i); if (moveInfo.holder == item) { ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(item); mPendingMoves.remove(i); } } endChangeAnimation(mPendingChanges, item); if (mPendingRemovals.remove(item)) { ViewHelper.clear(item.itemView); dispatchRemoveFinished(item); } if (mPendingAdditions.remove(item)) { ViewHelper.clear(item.itemView); dispatchAddFinished(item); } for (int i = mChangesList.size() - 1; i >= 0; i--) { ArrayList<ChangeInfo> changes = mChangesList.get(i); endChangeAnimation(changes, item); if (changes.isEmpty()) { mChangesList.remove(i); } } for (int i = mMovesList.size() - 1; i >= 0; i--) { ArrayList<MoveInfo> moves = mMovesList.get(i); for (int j = moves.size() - 1; j >= 0; j--) { MoveInfo moveInfo = moves.get(j); if (moveInfo.holder == item) { ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(item); moves.remove(j); if (moves.isEmpty()) { mMovesList.remove(i); } break; } } } for (int i = mAdditionsList.size() - 1; i >= 0; i--) { ArrayList<ViewHolder> additions = mAdditionsList.get(i); if (additions.remove(item)) { ViewHelper.clear(item.itemView); dispatchAddFinished(item); if (additions.isEmpty()) { mAdditionsList.remove(i); } } } // animations should be ended by the cancel above. if (mRemoveAnimations.remove(item) && DEBUG) { throw new IllegalStateException( "after animation is cancelled, item should not be in " + "mRemoveAnimations list"); } if (mAddAnimations.remove(item) && DEBUG) { throw new IllegalStateException( "after animation is cancelled, item should not be in " + "mAddAnimations list"); } if (mChangeAnimations.remove(item) && DEBUG) { throw new IllegalStateException( "after animation is cancelled, item should not be in " + "mChangeAnimations list"); } if (mMoveAnimations.remove(item) && DEBUG) { throw new IllegalStateException( "after animation is cancelled, item should not be in " + "mMoveAnimations list"); } dispatchFinishedWhenDone(); } @Override public boolean isRunning() { return (!mPendingAdditions.isEmpty() || !mPendingChanges.isEmpty() || !mPendingMoves.isEmpty() || !mPendingRemovals.isEmpty() || !mMoveAnimations.isEmpty() || !mRemoveAnimations.isEmpty() || !mAddAnimations.isEmpty() || !mChangeAnimations.isEmpty() || !mMovesList.isEmpty() || !mAdditionsList.isEmpty() || !mChangesList.isEmpty()); } /** * Check the state of currently pending and running animations. If there are none * pending/running, call #dispatchAnimationsFinished() to notify any * listeners. */ private void dispatchFinishedWhenDone() { if (!isRunning()) { dispatchAnimationsFinished(); } } @Override public void endAnimations() { int count = mPendingMoves.size(); for (int i = count - 1; i >= 0; i--) { MoveInfo item = mPendingMoves.get(i); View view = item.holder.itemView; ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(item.holder); mPendingMoves.remove(i); } count = mPendingRemovals.size(); for (int i = count - 1; i >= 0; i--) { ViewHolder item = mPendingRemovals.get(i); dispatchRemoveFinished(item); mPendingRemovals.remove(i); } count = mPendingAdditions.size(); for (int i = count - 1; i >= 0; i--) { ViewHolder item = mPendingAdditions.get(i); ViewHelper.clear(item.itemView); dispatchAddFinished(item); mPendingAdditions.remove(i); } count = mPendingChanges.size(); for (int i = count - 1; i >= 0; i--) { endChangeAnimationIfNecessary(mPendingChanges.get(i)); } mPendingChanges.clear(); if (!isRunning()) { return; } int listCount = mMovesList.size(); for (int i = listCount - 1; i >= 0; i--) { ArrayList<MoveInfo> moves = mMovesList.get(i); count = moves.size(); for (int j = count - 1; j >= 0; j--) { MoveInfo moveInfo = moves.get(j); ViewHolder item = moveInfo.holder; View view = item.itemView; ViewCompat.setTranslationY(view, 0); ViewCompat.setTranslationX(view, 0); dispatchMoveFinished(moveInfo.holder); moves.remove(j); if (moves.isEmpty()) { mMovesList.remove(moves); } } } listCount = mAdditionsList.size(); for (int i = listCount - 1; i >= 0; i--) { ArrayList<ViewHolder> additions = mAdditionsList.get(i); count = additions.size(); for (int j = count - 1; j >= 0; j--) { ViewHolder item = additions.get(j); View view = item.itemView; ViewCompat.setAlpha(view, 1); dispatchAddFinished(item); //this check prevent exception when removal already happened during finishing animation if (j < additions.size()) { additions.remove(j); } if (additions.isEmpty()) { mAdditionsList.remove(additions); } } } listCount = mChangesList.size(); for (int i = listCount - 1; i >= 0; i--) { ArrayList<ChangeInfo> changes = mChangesList.get(i); count = changes.size(); for (int j = count - 1; j >= 0; j--) { endChangeAnimationIfNecessary(changes.get(j)); if (changes.isEmpty()) { mChangesList.remove(changes); } } } cancelAll(mRemoveAnimations); cancelAll(mMoveAnimations); cancelAll(mAddAnimations); cancelAll(mChangeAnimations); dispatchAnimationsFinished(); } void cancelAll(List<ViewHolder> viewHolders) { for (int i = viewHolders.size() - 1; i >= 0; i--) { ViewCompat.animate(viewHolders.get(i).itemView).cancel(); } } private static class VpaListenerAdapter implements ViewPropertyAnimatorListener { @Override public void onAnimationStart(View view) { } @Override public void onAnimationEnd(View view) { } @Override public void onAnimationCancel(View view) { } } protected class DefaultAddVpaListener extends VpaListenerAdapter { ViewHolder mViewHolder; public DefaultAddVpaListener(final ViewHolder holder) { mViewHolder = holder; } @Override public void onAnimationStart(View view) { dispatchAddStarting(mViewHolder); } @Override public void onAnimationCancel(View view) { ViewHelper.clear(view); } @Override public void onAnimationEnd(View view) { ViewHelper.clear(view); dispatchAddFinished(mViewHolder); mAddAnimations.remove(mViewHolder); dispatchFinishedWhenDone(); } } protected class DefaultRemoveVpaListener extends VpaListenerAdapter { ViewHolder mViewHolder; public DefaultRemoveVpaListener(final ViewHolder holder) { mViewHolder = holder; } @Override public void onAnimationStart(View view) { dispatchRemoveStarting(mViewHolder); } @Override public void onAnimationCancel(View view) { ViewHelper.clear(view); } @Override public void onAnimationEnd(View view) { ViewHelper.clear(view); dispatchRemoveFinished(mViewHolder); mRemoveAnimations.remove(mViewHolder); dispatchFinishedWhenDone(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kylin.sdk.datasource.adaptor; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Types; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.Locale; import java.util.Map; import javax.sql.rowset.CachedRowSet; import org.apache.kylin.shaded.com.google.common.base.Joiner; import org.apache.commons.lang.StringUtils; /** * A default implementation for <C>AbstractJdbcAdaptor</C>. By default, this adaptor supposed to support most cases. * Developers can just extends this class and modify some methods if found somewhere unsupported. */ public class DefaultAdaptor extends AbstractJdbcAdaptor { private static Joiner joiner = Joiner.on('_'); public DefaultAdaptor(AdaptorConfig config) throws Exception { super(config); } /** * By default, the typeId from JDBC source will be returned. * @param type The column type name from JDBC source. * @param typeId The column type id from JDBC source. * @return */ @Override public int toKylinTypeId(String type, int typeId) { return typeId; } /** * By default, we accord to Hive's type system for this converting. * @param sourceTypeId Column type id from Source * @return The column type name supported by Kylin. */ @Override public String toKylinTypeName(int sourceTypeId) { String result = "any"; switch (sourceTypeId) { case Types.CHAR: result = "char"; break; case Types.VARCHAR: case Types.NVARCHAR: case Types.LONGVARCHAR: result = "varchar"; break; case Types.NUMERIC: case Types.DECIMAL: result = "decimal"; break; case Types.BIT: case Types.BOOLEAN: result = "boolean"; break; case Types.TINYINT: result = "tinyint"; break; case Types.SMALLINT: result = "smallint"; break; case Types.INTEGER: result = "integer"; break; case Types.BIGINT: result = "bigint"; break; case Types.REAL: case Types.FLOAT: case Types.DOUBLE: result = "double"; break; case Types.BINARY: case Types.VARBINARY: case Types.LONGVARBINARY: result = "byte"; break; case Types.DATE: result = "date"; break; case Types.TIME: result = "time"; break; case Types.TIMESTAMP: result = "timestamp"; break; default: //do nothing break; } return result; } /** * By default, the column type name from kylin will be returned. * @param kylinTypeName A column type name which is defined in Kylin. * @return */ @Override public String toSourceTypeName(String kylinTypeName) { return kylinTypeName; } /** * Be default, nothing happens when fix a sql. * @param sql The SQL statement to be fixed. * @return The fixed SQL statement. */ @Override public String fixSql(String sql) { return sql; } /** * By default, use schema as database of kylin. * @return * @throws SQLException */ @Override public List<String> listDatabases() throws SQLException { List<String> ret = new LinkedList<>(); try (Connection con = getConnection(); ResultSet rs = con.getMetaData().getSchemas()) { while (rs.next()) { String schema = rs.getString("TABLE_SCHEM"); if (StringUtils.isNotBlank(schema)) { ret.add(schema); } } } return ret; } /** * By default, use schema to list tables. * @param schema * @return * @throws SQLException */ @Override public List<String> listTables(String schema) throws SQLException { List<String> ret = new ArrayList<>(); try (Connection conn = getConnection(); ResultSet rs = conn.getMetaData().getTables(null, schema, null, null)) { while (rs.next()) { String name = rs.getString("TABLE_NAME"); if (StringUtils.isNotBlank(schema)) { ret.add(name); } } } return ret; } @Override public List<String> listColumns(String database, String tableName) throws SQLException { List<String> ret = new ArrayList<>(); CachedRowSet columnsRs = getTableColumns(database, tableName); while (columnsRs.next()) { String name = columnsRs.getString("COLUMN_NAME"); if (StringUtils.isNotBlank(name)) { ret.add(name); } } return ret; } @Override public CachedRowSet getTable(String schema, String table) throws SQLException { try (Connection conn = getConnection(); ResultSet rs = conn.getMetaData().getTables(null, schema, table, null)) { return cacheResultSet(rs); } } @Override public CachedRowSet getTableColumns(String schema, String table) throws SQLException { try (Connection conn = getConnection(); ResultSet rs = conn.getMetaData().getColumns(null, schema, table, null)) { return cacheResultSet(rs); } } @Override public String[] buildSqlToCreateSchema(String schemaName) { return new String[] { String.format(Locale.ROOT, "CREATE schema IF NOT EXISTS %s", schemaName) }; } @Override public String[] buildSqlToLoadDataFromLocal(String tableName, String tableFileDir) { return new String[] { String.format(Locale.ROOT, "LOAD DATA INFILE '%s/%s.csv' INTO %s FIELDS TERMINATED BY ',';", tableFileDir, tableName, tableName) }; } @Override public String[] buildSqlToCreateTable(String tableIdentity, Map<String, String> columnInfo) { String dropsql = "DROP TABLE IF EXISTS " + tableIdentity; String dropsql2 = "DROP VIEW IF EXISTS " + tableIdentity; StringBuilder ddl = new StringBuilder(); ddl.append("CREATE TABLE " + tableIdentity + "\n"); ddl.append("(" + "\n"); for (Map.Entry<String, String> col : columnInfo.entrySet()) { ddl.append(col.getKey() + " " + toSourceTypeName(col.getValue()) + ",\n"); } ddl.deleteCharAt(ddl.length() - 2); ddl.append(")"); return new String[] { dropsql, dropsql2, ddl.toString() }; } @Override public String[] buildSqlToCreateView(String viewName, String sql) { String dropView = "DROP VIEW IF EXISTS " + viewName; String dropTable = "DROP TABLE IF EXISTS " + viewName; String createSql = ("CREATE VIEW " + viewName + " AS " + sql); return new String[] { dropView, dropTable, createSql }; } /** * defects: * identifier can not tell column or table or database, here follow the order database->table->column, once matched and returns * so once having a database name Test and table name TEst, will always find Test. * @param identifier * @return identifier with case sensitive */ public String fixIdentifierCaseSensitve(String identifier) { try { List<String> databases = listDatabasesWithCache(); for (String database : databases) { if (identifier.equalsIgnoreCase(database)) { return database; } } List<String> tables = listTables(); for (String table : tables) { if (identifier.equalsIgnoreCase(table)) { return table; } } List<String> columns = listColumns(); for (String column : columns) { if (identifier.equalsIgnoreCase(column)) { return column; } } } catch (Exception e) { throw new IllegalStateException(e); } return identifier; } /** * Get All tables for sql case sensitive * @return * @throws SQLException */ public List<String> listTables() throws SQLException { List<String> ret = new ArrayList<>(); if (tablesCache == null || tablesCache.size() == 0) { try (Connection conn = getConnection(); ResultSet rs = conn.getMetaData().getTables(null, null, null, null)) { while (rs.next()) { String name = rs.getString("TABLE_NAME"); String database = rs.getString("TABLE_SCHEM") != null ? rs.getString("TABLE_SCHEM") : rs.getString("TABLE_CAT"); String cacheKey = joiner.join(config.datasourceId, config.url, database, "tables"); List<String> cachedTables = tablesCache.getIfPresent(cacheKey); if (cachedTables == null) { cachedTables = new ArrayList<>(); tablesCache.put(cacheKey, cachedTables); logger.debug("Add table cache for database {}", database); } if (!cachedTables.contains(name)) { cachedTables.add(name); } ret.add(name); } } } else { for (Map.Entry<String, List<String>> entry : tablesCache.asMap().entrySet()) { ret.addAll(entry.getValue()); } } return ret; } /** * Get All columns for sql case sensitive * @return * @throws SQLException */ public List<String> listColumns() throws SQLException { List<String> ret = new ArrayList<>(); if (columnsCache == null || columnsCache.size() == 0) { CachedRowSet columnsRs = null; try (Connection conn = getConnection(); ResultSet rs = conn.getMetaData().getColumns(null, null, null, null)) { columnsRs = cacheResultSet(rs); } while (columnsRs.next()) { String database = columnsRs.getString("TABLE_SCHEM") != null ? columnsRs.getString("TABLE_SCHEM") : columnsRs.getString("TABLE_CAT"); String table = columnsRs.getString("TABLE_NAME"); String column = columnsRs.getString("COLUMN_NAME"); String cacheKey = joiner.join(config.datasourceId, config.url, database, table, "columns"); List<String> cachedColumns = columnsCache.getIfPresent(cacheKey); if (cachedColumns == null) { cachedColumns = new ArrayList<>(); columnsCache.put(cacheKey, cachedColumns); logger.debug("Add column cache for table {}.{}", database, table); } if (!cachedColumns.contains(column)) { cachedColumns.add(column); } ret.add(column); } } else { for (Map.Entry<String, List<String>> entry : columnsCache.asMap().entrySet()) { ret.addAll(entry.getValue()); } } return ret; } }
/* Derby - Class org.apache.derby.iapi.types.SQLDouble Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.iapi.types; import org.apache.derby.iapi.reference.SQLState; import org.apache.derby.iapi.services.io.ArrayInputStream; import org.apache.derby.iapi.services.io.StoredFormatIds; import org.apache.derby.iapi.services.io.Storable; import org.apache.derby.iapi.services.sanity.SanityManager; import org.apache.derby.iapi.error.StandardException; import org.apache.derby.iapi.types.BooleanDataValue; import org.apache.derby.iapi.types.DataValueDescriptor; import org.apache.derby.iapi.types.NumberDataValue; import org.apache.derby.iapi.types.TypeId; import org.apache.derby.iapi.services.cache.ClassSize; import org.apache.derby.iapi.types.NumberDataType; import org.apache.derby.iapi.types.SQLBoolean; import java.io.ObjectOutput; import java.io.ObjectInput; import java.io.IOException; import java.sql.ResultSet; import java.sql.PreparedStatement; import java.sql.SQLException; /** * SQLDouble satisfies the DataValueDescriptor * interfaces (i.e., OrderableDataType). It implements a double column, * e.g. for * storing a column value; it can be specified * when constructed to not allow nulls. Nullability cannot be changed * after construction, as it affects the storage size and mechanism. * <p> * Because OrderableDataType is a subtype of DataType, * SQLDouble can play a role in either a DataType/Row * or a OrderableDataType/Row, interchangeably. * <p> * We assume the store has a flag for nullness of the value, * and simply return a 0-length array for the stored form * when the value is null. * <p> * PERFORMANCE: There are likely alot of performance improvements * possible for this implementation -- it new's Double * more than it probably wants to. * <p> * This is modeled after SQLInteger. * <p> * We don't let doubles get constructed with NaN or Infinity values, and * check for those values where they can occur on operations, so the * set* operations do not check for them coming in. * */ public final class SQLDouble extends NumberDataType { /* * DataValueDescriptor interface * (mostly implemented in DataType) */ // JDBC is lax in what it permits and what it // returns, so we are similarly lax // @see DataValueDescriptor /** * @exception StandardException thrown on failure to convert */ public int getInt() throws StandardException { // REMIND: do we want to check for truncation? if ((value > (((double) Integer.MAX_VALUE) + 1.0d)) || (value < (((double) Integer.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "INTEGER"); return (int)value; } /** * @exception StandardException thrown on failure to convert */ public byte getByte() throws StandardException { if ((value > (((double) Byte.MAX_VALUE) + 1.0d)) || (value < (((double) Byte.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "TINYINT"); return (byte) value; } /** * @exception StandardException thrown on failure to convert */ public short getShort() throws StandardException { if ((value > (((double) Short.MAX_VALUE) + 1.0d)) || (value < (((double) Short.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "SMALLINT"); return (short) value; } /** * @exception StandardException thrown on failure to convert */ public long getLong() throws StandardException { if ((value > (((double) Long.MAX_VALUE) + 1.0d)) || (value < (((double) Long.MIN_VALUE) - 1.0d))) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "BIGINT"); return (long) value; } /** * @exception StandardException thrown on failure to convert */ public float getFloat() throws StandardException { if (Float.isInfinite((float)value)) throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.REAL_NAME); return (float) value; } public double getDouble() { /* This value is bogus if the SQLDouble is null */ return value; } /** * DOUBLE implementation. Convert to a BigDecimal using getString. */ public int typeToBigDecimal() { return java.sql.Types.CHAR; } // for lack of a specification: getDouble()==0 gives true // independent of the NULL flag public boolean getBoolean() { return (value != 0); } public String getString() { if (isNull()) return null; else return Double.toString(value); } public Object getObject() { // REMIND: could create one Double and reuse it? if (isNull()) return null; else return new Double(value); } /** * Set the value from a correctly typed Double object. * @throws StandardException */ void setObject(Object theValue) throws StandardException { setValue(((Double) theValue).doubleValue()); } protected void setFrom(DataValueDescriptor theValue) throws StandardException { setValue(theValue.getDouble()); } public int getLength() { return DOUBLE_LENGTH; } // this is for DataType's error generator public String getTypeName() { return TypeId.DOUBLE_NAME; } /* * Storable interface, implies Externalizable, TypedFormat */ /** Return my format identifier. @see org.apache.derby.iapi.services.io.TypedFormat#getTypeFormatId */ public int getTypeFormatId() { return StoredFormatIds.SQL_DOUBLE_ID; } /* * see if the double value is null. */ /** @see Storable#isNull */ public boolean isNull() { return isnull; } public void writeExternal(ObjectOutput out) throws IOException { // never called when value is null if (SanityManager.DEBUG) SanityManager.ASSERT(! isNull()); out.writeDouble(value); } /** @see java.io.Externalizable#readExternal */ public void readExternal(ObjectInput in) throws IOException { value = in.readDouble(); isnull = false; } /** @see java.io.Externalizable#readExternal */ public void readExternalFromArray(ArrayInputStream in) throws IOException { value = in.readDouble(); isnull = false; } /** * @see Storable#restoreToNull * */ public void restoreToNull() { value = 0; isnull = true; } /** @exception StandardException Thrown on error */ protected int typeCompare(DataValueDescriptor arg) throws StandardException { /* neither are null, get the value */ double thisValue = this.getDouble(); double otherValue = arg.getDouble(); if (thisValue == otherValue) return 0; else if (thisValue > otherValue) return 1; else return -1; } /* * DataValueDescriptor interface */ /** @see DataValueDescriptor#cloneValue */ public DataValueDescriptor cloneValue(boolean forceMaterialization) { try { return new SQLDouble(value, isnull); } catch (StandardException se) { if (SanityManager.DEBUG) SanityManager.THROWASSERT( "error on clone, " + " value = " + value + " isnull = " + isnull, se); return null; } } /** * @see DataValueDescriptor#getNewNull */ public DataValueDescriptor getNewNull() { return new SQLDouble(); } /** * @see DataValueDescriptor#setValueFromResultSet * * @exception StandardException Thrown on error * @exception SQLException Thrown on error */ public void setValueFromResultSet(ResultSet resultSet, int colNumber, boolean isNullable) throws StandardException, SQLException { double dv = resultSet.getDouble(colNumber); isnull = (isNullable && resultSet.wasNull()); if (isnull) value = 0; else value = NumberDataType.normalizeDOUBLE(dv); } /** Set the value into a PreparedStatement. @exception SQLException Error setting value in PreparedStatement */ public final void setInto(PreparedStatement ps, int position) throws SQLException { if (isNull()) { ps.setNull(position, java.sql.Types.DOUBLE); return; } ps.setDouble(position, value); } /** Set this value into a ResultSet for a subsequent ResultSet.insertRow or ResultSet.updateRow. This method will only be called for non-null values. @exception SQLException thrown by the ResultSet object @exception StandardException thrown by me accessing my value. */ public final void setInto(ResultSet rs, int position) throws SQLException, StandardException { rs.updateDouble(position, value); } /* * class interface */ /* * constructors */ /** no-arg constructor, required by Formattable */ // This constructor also gets used when we are // allocating space for a double. public SQLDouble() { isnull = true; } public SQLDouble(double val) throws StandardException { value = NumberDataType.normalizeDOUBLE(val); } public SQLDouble(Double obj) throws StandardException { if (isnull = (obj == null)) ; else value = NumberDataType.normalizeDOUBLE(obj.doubleValue()); } private SQLDouble(double val, boolean startsnull) throws StandardException { value = NumberDataType.normalizeDOUBLE(val); // maybe only do if !startsnull isnull = startsnull; } /** @exception StandardException throws NumberFormatException when the String format is not recognized. */ public void setValue(String theValue) throws StandardException { if (theValue == null) { value = 0; isnull = true; } else { double doubleValue = 0; try { // ??? jsk: rounding??? doubleValue = Double.parseDouble(theValue.trim()); } catch (NumberFormatException nfe) { throw invalidFormat(); } value = NumberDataType.normalizeDOUBLE(doubleValue); isnull = false; } } /** * @exception StandardException on NaN or Infinite double */ public void setValue(double theValue) throws StandardException { value = NumberDataType.normalizeDOUBLE(theValue); isnull = false; } /** * @exception StandardException on NaN or Infinite float */ public void setValue(float theValue) throws StandardException { value = NumberDataType.normalizeDOUBLE(theValue); isnull = false; } public void setValue(long theValue) { value = theValue; // no check needed isnull = false; } public void setValue(int theValue) { value = theValue; // no check needed isnull = false; } public void setValue(Number theValue) throws StandardException { if (objectNull(theValue)) return; if (SanityManager.ASSERT) { if (!(theValue instanceof java.lang.Double)) SanityManager.THROWASSERT("SQLDouble.setValue(Number) passed a " + theValue.getClass()); } setValue(theValue.doubleValue()); } /** Called for an application setting this value using a BigDecimal */ public void setBigDecimal(Number bigDecimal) throws StandardException { if (objectNull(bigDecimal)) return; // Note BigDecimal.doubleValue() handles the case where // its value is outside the range of a double. It returns // infinity values which should throw an exception in setValue(double). setValue(bigDecimal.doubleValue()); } /** * @see NumberDataValue#setValue * */ public void setValue(boolean theValue) { value = theValue?1:0; isnull = false; } /* * DataValueDescriptor interface */ /** @see DataValueDescriptor#typePrecedence */ public int typePrecedence() { return TypeId.DOUBLE_PRECEDENCE; } /* ** SQL Operators */ /** * The = operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the = * @param right The value on the right side of the = * is not. * * @return A SQL boolean value telling whether the two parameters are equal * * @exception StandardException Thrown on error */ public BooleanDataValue equals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() == right.getDouble()); } /** * The <> operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the <> * @param right The value on the right side of the <> * is not. * * @return A SQL boolean value telling whether the two parameters * are not equal * * @exception StandardException Thrown on error */ public BooleanDataValue notEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() != right.getDouble()); } /** * The < operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the < * @param right The value on the right side of the < * * @return A SQL boolean value telling whether the first operand is less * than the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue lessThan(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() < right.getDouble()); } /** * The > operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the > * @param right The value on the right side of the > * * @return A SQL boolean value telling whether the first operand is greater * than the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue greaterThan(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() > right.getDouble()); } /** * The <= operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the <= * @param right The value on the right side of the <= * * @return A SQL boolean value telling whether the first operand is less * than or equal to the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue lessOrEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() <= right.getDouble()); } /** * The >= operator as called from the language module, as opposed to * the storage module. * * @param left The value on the left side of the >= * @param right The value on the right side of the >= * * @return A SQL boolean value telling whether the first operand is greater * than or equal to the second operand * * @exception StandardException Thrown on error */ public BooleanDataValue greaterOrEquals(DataValueDescriptor left, DataValueDescriptor right) throws StandardException { return SQLBoolean.truthValue(left, right, left.getDouble() >= right.getDouble()); } /** * This method implements the + operator for "double + double". * * @param addend1 One of the addends * @param addend2 The other addend * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the addition * * @exception StandardException Thrown on error */ public NumberDataValue plus(NumberDataValue addend1, NumberDataValue addend2, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (addend1.isNull() || addend2.isNull()) { result.setToNull(); return result; } double tmpresult = addend1.getDouble() + addend2.getDouble(); // No need to check underflow (result rounded to 0.0), // since the difference between two non-equal valid DB2 DOUBLE values is always non-zero in java.lang.Double precision. result.setValue(tmpresult); return result; } /** * This method implements the - operator for "double - double". * * @param left The value to be subtracted from * @param right The value to be subtracted * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the subtraction * * @exception StandardException Thrown on error */ public NumberDataValue minus(NumberDataValue left, NumberDataValue right, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (left.isNull() || right.isNull()) { result.setToNull(); return result; } double tmpresult = left.getDouble() - right.getDouble(); // No need to check underflow (result rounded to 0.0), // since no difference between two valid DB2 DOUBLE values can be rounded off to 0.0 in java.lang.Double result.setValue(tmpresult); return result; } /** * This method implements the * operator for "double * double". * * @param left The first value to be multiplied * @param right The second value to be multiplied * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the multiplication * * @exception StandardException Thrown on error */ public NumberDataValue times(NumberDataValue left, NumberDataValue right, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (left.isNull() || right.isNull()) { result.setToNull(); return result; } double leftValue = left.getDouble(); double rightValue = right.getDouble(); double tempResult = leftValue * rightValue; // check underflow (result rounded to 0.0) if ( (tempResult == 0.0) && ( (leftValue != 0.0) && (rightValue != 0.0) ) ) { throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME); } result.setValue(tempResult); return result; } /** * This method implements the / operator for "double / double". * * @param dividend The numerator * @param divisor The denominator * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the division * * @exception StandardException Thrown on error */ public NumberDataValue divide(NumberDataValue dividend, NumberDataValue divisor, NumberDataValue result) throws StandardException { if (result == null) { result = new SQLDouble(); } if (dividend.isNull() || divisor.isNull()) { result.setToNull(); return result; } /* ** For double division, we can't catch divide by zero with Double.NaN; ** So we check the divisor before the division. */ double divisorValue = divisor.getDouble(); if (divisorValue == 0.0e0D) { throw StandardException.newException(SQLState.LANG_DIVIDE_BY_ZERO); } double dividendValue = dividend.getDouble(); double divideResult = dividendValue / divisorValue; if (Double.isNaN(divideResult)) { throw StandardException.newException(SQLState.LANG_DIVIDE_BY_ZERO); } // check underflow (result rounded to 0.0d) if ((divideResult == 0.0d) && (dividendValue != 0.0d)) { throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME); } result.setValue(divideResult); return result; } /** * This method implements the unary minus operator for double. * * @param result The result of a previous call to this method, null * if not called yet * * @return A SQLDouble containing the result of the division * * @exception StandardException Thrown on error */ public NumberDataValue minus(NumberDataValue result) throws StandardException { double minusResult; if (result == null) { result = new SQLDouble(); } if (this.isNull()) { result.setToNull(); return result; } /* ** Doubles are assumed to be symmetric -- that is, their ** smallest negative value is representable as a positive ** value, and vice-versa. */ minusResult = -(this.getDouble()); result.setValue(minusResult); return result; } /** * This method implements the isNegative method. * * @return A boolean. If this.value is negative, return true. * For positive values or null, return false. */ protected boolean isNegative() { return !isNull() && (value < 0.0d); } /* * String display of value */ public String toString() { if (isNull()) return "NULL"; else return Double.toString(value); } /* * Hash code */ public int hashCode() { long longVal = (long) value; double doubleLongVal = (double) longVal; /* ** NOTE: This is coded to work around a bug in Visual Cafe 3.0. ** If longVal is compared directly to value on that platform ** with the JIT enabled, the values will not always compare ** as equal even when they should be equal. This happens with ** the value Long.MAX_VALUE, for example. ** ** Assigning the long value back to a double and then doing ** the comparison works around the bug. ** ** This fixes Cloudscape bug number 1757. ** ** - Jeff Lichtman */ if (doubleLongVal != value) { longVal = Double.doubleToLongBits(value); } return (int) (longVal ^ (longVal >> 32)); } /* * useful constants... */ static final int DOUBLE_LENGTH = 32; // must match the number of bytes written by DataOutput.writeDouble() private static final int BASE_MEMORY_USAGE = ClassSize.estimateBaseFromCatalog( SQLDouble.class); public int estimateMemoryUsage() { return BASE_MEMORY_USAGE; } /* * object state */ private double value; private boolean isnull; }
package com.koch.ambeth.util.collections; /*- * #%L * jambeth-util * %% * Copyright (C) 2017 Koch Softwaredevelopment * %% * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. * #L% */ import java.io.IOException; import java.io.ObjectInput; import java.io.ObjectOutput; import java.lang.reflect.Array; import java.util.Collection; import java.util.Iterator; import java.util.List; import com.koch.ambeth.util.IPrintable; import com.koch.ambeth.util.StringBuilderUtil; import com.koch.ambeth.util.exception.RuntimeExceptionUtil; /** * Abstrakte HashSet als Basisklasse fuer verschiedene spezialisierte Anwendungsfaelle * * @author kochd * * @param <E> * Typ der Entrys der Set * @param <K> * Typ der Keys * @param <V> * Typ der Values */ public abstract class AbstractHashSet<K> implements ISet<K>, IPrintable, Cloneable { public static final int DEFAULT_INITIAL_CAPACITY = 16; public static final int MAXIMUM_CAPACITY = 1 << 30; public static final float DEFAULT_LOAD_FACTOR = 0.75f; protected final float loadFactor; protected int threshold; protected ISetEntry<K>[] table; @SuppressWarnings("rawtypes") public AbstractHashSet(int initialCapacity, final float loadFactor, final Class<? extends ISetEntry> entryClass) { this.loadFactor = loadFactor; if (initialCapacity < 0) { throw new IllegalArgumentException("Illegal initial capacity: " + initialCapacity); } if (initialCapacity > MAXIMUM_CAPACITY) { initialCapacity = MAXIMUM_CAPACITY; } if (loadFactor <= 0 || Float.isNaN(loadFactor)) { throw new IllegalArgumentException("Illegal load factor: " + loadFactor); } // Find a power of 2 >= initialCapacity int capacity = 1; while (capacity < initialCapacity) { capacity <<= 1; } threshold = (int) (capacity * loadFactor); table = createTable(entryClass, capacity); init(); } @SuppressWarnings("unchecked") protected ISetEntry<K>[] createTable(final int capacity) { return (ISetEntry<K>[]) Array.newInstance(table.getClass().getComponentType(), capacity); } @SuppressWarnings("unchecked") protected ISetEntry<K>[] createTable(final Class<?> entryClass, final int capacity) { return (ISetEntry<K>[]) Array.newInstance(entryClass, capacity); } protected void init() { } protected int extractHash(final K key) { return key.hashCode(); } protected int hash(int hash) { hash += ~(hash << 9); hash ^= hash >>> 14; hash += hash << 4; hash ^= hash >>> 10; return hash; } protected void addEntry(final int hash, final K key, final int bucketIndex) { ISetEntry<K>[] table = this.table; ISetEntry<K> e = table[bucketIndex]; e = createEntry(hash, key, e); table[bucketIndex] = e; entryAdded(e); if (isResizeNeeded()) { resize(2 * table.length); } } protected boolean isResizeNeeded() { return size() >= threshold; } @Override public K get(K key) { final int hash = hash(extractHash(key)); ISetEntry<K>[] table = this.table; final int i = hash & table.length - 1; ISetEntry<K> entry = table[i]; while (entry != null) { if (equalKeys(key, entry)) { return entry.getKey(); } entry = entry.getNextEntry(); } return null; } protected void entryAdded(final ISetEntry<K> entry) { // Intended blank } protected void entryRemoved(final ISetEntry<K> entry) { // Intended blank } @SuppressWarnings("unchecked") public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException { int size = in.readInt(); Object[] tempArray = new Object[size]; for (int a = 0; a < size; a++) { tempArray[a] = in.readObject(); } addAll((K[]) tempArray); } public void writeExternal(ObjectOutput out) throws IOException { out.writeInt(size()); ISetEntry<K>[] table = this.table; for (int a = table.length; a-- > 0;) { ISetEntry<K> entry = table[a]; while (entry != null) { out.writeObject(entry.getKey()); entry = entry.getNextEntry(); } } } /** * Rehashes the contents of this map into a new array with a larger capacity. This method is * called automatically when the number of keys in this map reaches its threshold. * * If current capacity is MAXIMUM_CAPACITY, this method does not resize the map, but sets * threshold to Integer.MAX_VALUE. This has the effect of preventing future calls. * * @param newCapacity * the new capacity, MUST be a power of two; must be greater than current capacity unless * current capacity is MAXIMUM_CAPACITY (in which case value is irrelevant). */ protected void resize(final int newCapacity) { final ISetEntry<K>[] oldTable = table; final int oldCapacity = oldTable.length; if (oldCapacity == MAXIMUM_CAPACITY) { threshold = Integer.MAX_VALUE; return; } final ISetEntry<K>[] newTable = createTable(newCapacity); transfer(newTable); table = newTable; threshold = (int) (newCapacity * loadFactor); } protected void transfer(final ISetEntry<K>[] newTable) { final int newCapacityMinus1 = newTable.length - 1; ISetEntry<K>[] table = this.table; if (table == newTable) { // re-check entries on existing table for (int a = table.length; a-- > 0;) { ISetEntry<K> entry = table[a], previous = null, next; while (entry != null) { next = entry.getNextEntry(); if (isEntryValid(entry)) { previous = entry; } else { if (entry == table[a]) { // first entry in bucket table[a] = next; } else { setNextEntry(previous, next); } entryRemoved(entry); } entry = next; } } return; } for (int a = table.length; a-- > 0;) { ISetEntry<K> entry = table[a], next; while (entry != null) { next = entry.getNextEntry(); if (isEntryValid(entry)) { int i = entry.getHash() & newCapacityMinus1; setNextEntry(entry, newTable[i]); newTable[i] = entry; } else { entryRemoved(entry); } entry = next; } } } protected boolean isEntryValid(ISetEntry<K> entry) { return true; } /** * @see java.util.Set#clear() */ @Override public void clear() { if (isEmpty()) { return; } ISetEntry<K>[] table = this.table; for (int a = table.length; a-- > 0;) { ISetEntry<K> entry = table[a]; if (entry != null) { table[a] = null; while (entry != null) { ISetEntry<K> nextEntry = entry.getNextEntry(); entryRemoved(entry); entry = nextEntry; } } } } /** * Returns a shallow copy of this <tt>HashSet</tt> instance: the keys themselves are not cloned. * * @return a shallow copy of this set */ @SuppressWarnings("unchecked") @Override public Object clone() { AbstractHashSet<K> result = null; try { result = (AbstractHashSet<K>) super.clone(); } catch (CloneNotSupportedException e) { throw RuntimeExceptionUtil.mask(e); } for (K key : this) { result.add(key); } return result; } /** * @see java.util.Set#contains(java.lang.Object) */ @Override @SuppressWarnings("unchecked") public boolean contains(final Object key) { final K realKey = (K) key; final int hash = hash(extractHash(realKey)); ISetEntry<K>[] table = this.table; final int i = hash & table.length - 1; ISetEntry<K> entry = table[i]; while (entry != null) { if (equalKeys(realKey, entry)) { return true; } entry = entry.getNextEntry(); } return false; } protected boolean equalKeys(final K key, final ISetEntry<K> entry) { return key.equals(entry.getKey()); } /** * @see java.util.Set#add(java.lang.Object) */ @Override public boolean add(K key) { return addIntern(key); } protected boolean addIntern(K key) { if (key == null) { return false; } final int hash = hash(extractHash(key)); ISetEntry<K>[] table = this.table; final int i = hash & table.length - 1; ISetEntry<K> entry = table[i]; while (entry != null) { if (equalKeys(key, entry)) { return false; } entry = entry.getNextEntry(); } addEntry(hash, key, i); return true; } /** * @see java.util.Set#remove(java.lang.Object) */ @Override @SuppressWarnings("unchecked") public boolean remove(final Object key) { return removeEntryForKey((K) key) != null; } @Override public K removeAndGet(final K key) { return removeEntryForKey(key); } protected K removeEntryForKey(final K key) { final int hash = hash(extractHash(key)); ISetEntry<K>[] table = this.table; final int i = hash & table.length - 1; ISetEntry<K> entry = table[i]; if (entry != null) { if (equalKeys(key, entry)) { table[i] = entry.getNextEntry(); K keyOfEntry = entry.getKey(); entryRemoved(entry); return keyOfEntry; } ISetEntry<K> prevEntry = entry; entry = entry.getNextEntry(); while (entry != null) { if (equalKeys(key, entry)) { setNextEntry(prevEntry, entry.getNextEntry()); K keyOfEntry = entry.getKey(); entryRemoved(entry); return keyOfEntry; } prevEntry = entry; entry = entry.getNextEntry(); } } return null; } protected abstract void setNextEntry(final ISetEntry<K> entry, final ISetEntry<K> nextEntry); protected abstract ISetEntry<K> createEntry(final int hash, final K key, final ISetEntry<K> nextEntry); /** * @see java.util.Set#size() */ @Override public abstract int size(); /** * @see java.util.Set#isEmpty() */ @Override public boolean isEmpty() { return size() == 0; } /** * @see java.util.Set#addAll(java.util.Collection) */ @Override public boolean addAll(Collection<? extends K> c) { boolean changed = false; if (c instanceof List) { List<? extends K> list = (List<? extends K>) c; for (int a = 0, size = list.size(); a < size; a++) { changed |= addIntern(list.get(a)); } return !list.isEmpty(); } Iterator<? extends K> iter = c.iterator(); while (iter.hasNext()) { K key = iter.next(); changed |= addIntern(key); } return changed; } @Override public boolean addAll(Iterable<? extends K> c) { boolean changed = false; if (c instanceof List) { List<? extends K> list = (List<? extends K>) c; for (int a = 0, size = list.size(); a < size; a++) { changed |= addIntern(list.get(a)); } return !list.isEmpty(); } Iterator<? extends K> iter = c.iterator(); while (iter.hasNext()) { K key = iter.next(); changed |= addIntern(key); } return changed; } @Override public <S extends K> boolean addAll(S[] array) { boolean changed = false; for (int a = 0, size = array.length; a < size; a++) { changed |= addIntern(array[a]); } return changed; } /** * @see java.util.Set#containsAll(java.util.Collection) */ @Override public boolean containsAll(Collection<?> c) { Iterator<?> iter = c.iterator(); while (iter.hasNext()) { Object key = iter.next(); if (!contains(key)) { return false; } } return true; } @Override public boolean containsAny(Collection<?> coll) { Iterator<?> iter = coll.iterator(); while (iter.hasNext()) { Object key = iter.next(); if (contains(key)) { return true; } } return false; } @Override public <S extends K> boolean containsAny(S[] array) { for (S key : array) { if (contains(key)) { return true; } } return false; } /** * @see java.util.Set#removeAll(java.util.Collection) */ @Override public boolean removeAll(Collection<?> c) { boolean changed = false; if (c instanceof List) { List<?> list = (List<?>) c; for (int a = list.size(); a-- > 0;) { changed |= remove(list.get(a)); } } else { Iterator<?> iter = c.iterator(); while (iter.hasNext()) { Object key = iter.next(); changed |= remove(key); } } return changed; } @Override public <S extends K> boolean removeAll(S[] array) { boolean changed = false; for (int a = 0, size = array.length; a < size; a++) { changed |= remove(array[a]); } return changed; } /** * @see java.util.Set#retainAll(java.util.Collection) */ @Override public boolean retainAll(Collection<?> c) { boolean changed = false; Iterator<K> iter = iterator(); while (iter.hasNext()) { Object key = iter.next(); if (!c.contains(key)) { iter.remove(); changed = true; } } return changed; } /** * @see java.util.Set#toArray() */ @Override public Object[] toArray() { return toArray(new Object[size()]); } @SuppressWarnings("unchecked") @Override public <T> T[] toArray(Class<T> componentType) { T[] array = (T[]) Array.newInstance(componentType, size()); return toArray(array); } /** * @see java.util.Set#toArray(T[]) */ @Override @SuppressWarnings("unchecked") public <T> T[] toArray(T[] array) { int index = 0; int length = array.length; ISetEntry<K>[] table = this.table; for (int a = table.length; a-- > 0;) { ISetEntry<K> entry = table[a]; while (entry != null) { if (entry.isValid()) { if (length <= index) { length = size(); T[] newArray = (T[]) Array.newInstance(array.getClass().getComponentType(), length); System.arraycopy(array, 0, newArray, 0, index); array = newArray; } array[index++] = (T) entry.getKey(); } entry = entry.getNextEntry(); } } for (int a = array.length; a-- > index;) { array[a] = null; } return array; } @Override public IList<K> toList() { ArrayList<K> list = new ArrayList<>(size()); toList(list); return list; } @Override public void toList(Collection<K> list) { ISetEntry<K>[] table = this.table; for (int a = table.length; a-- > 0;) { ISetEntry<K> entry = table[a]; while (entry != null) { if (entry.isValid()) { list.add(entry.getKey()); } entry = entry.getNextEntry(); } } } @Override public String toString() { StringBuilder sb = new StringBuilder(); toString(sb); return sb.toString(); } @Override public void toString(StringBuilder sb) { sb.append(size()).append(" items: ["); boolean first = true; Iterator<K> iter = iterator(); while (iter.hasNext()) { K item = iter.next(); if (first) { first = false; } else { sb.append(','); } StringBuilderUtil.appendPrintable(sb, item); } sb.append(']'); } @Override public Iterator<K> iterator() { return new SetIterator<>(this, true); } @Override public Iterator<K> iterator(boolean removeAllowed) { return new SetIterator<>(this, removeAllowed); } }
package com.winsun.fruitmix.model; import android.os.Parcel; import android.os.Parcelable; import java.util.ArrayList; import java.util.List; /** * Created by Administrator on 2016/7/16. */ public class MediaShare implements Parcelable { private int id; private String uuid; private String creatorUUID; private String time; private String title; private String desc; private List<String> imageDigests; private List<String> viewer; private List<String> maintainer; private boolean isAlbum; private boolean isArchived; private String date; private String coverImageDigest; private boolean isLocked; public MediaShare() { imageDigests = new ArrayList<>(); viewer = new ArrayList<>(); maintainer = new ArrayList<>(); } protected MediaShare(Parcel in) { id = in.readInt(); uuid = in.readString(); creatorUUID = in.readString(); time = in.readString(); title = in.readString(); desc = in.readString(); imageDigests = in.createStringArrayList(); viewer = in.createStringArrayList(); maintainer = in.createStringArrayList(); isAlbum = in.readByte() != 0; isArchived = in.readByte() != 0; date = in.readString(); coverImageDigest = in.readString(); isLocked = in.readByte() != 0; } @Override public void writeToParcel(Parcel dest, int flags) { dest.writeInt(id); dest.writeString(uuid); dest.writeString(creatorUUID); dest.writeString(time); dest.writeString(title); dest.writeString(desc); dest.writeStringList(imageDigests); dest.writeStringList(viewer); dest.writeStringList(maintainer); dest.writeByte((byte) (isAlbum ? 1 : 0)); dest.writeByte((byte) (isArchived ? 1 : 0)); dest.writeString(date); dest.writeString(coverImageDigest); dest.writeByte((byte) (isLocked ? 1 : 0)); } @Override public int describeContents() { return 0; } public static final Creator<MediaShare> CREATOR = new Creator<MediaShare>() { @Override public MediaShare createFromParcel(Parcel in) { return new MediaShare(in); } @Override public MediaShare[] newArray(int size) { return new MediaShare[size]; } }; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getUuid() { return uuid; } public void setUuid(String uuid) { this.uuid = uuid; } public String getCreatorUUID() { return creatorUUID; } public void setCreatorUUID(String creatorUUID) { this.creatorUUID = creatorUUID; } public String getTime() { return time; } public void setTime(String time) { this.time = time; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getDesc() { return desc; } public void setDesc(String desc) { this.desc = desc; } public List<String> getViewer() { return viewer; } public void setViewer(List<String> viewer) { this.viewer = viewer; } public List<String> getMaintainer() { return maintainer; } public void setMaintainer(List<String> maintainer) { this.maintainer = maintainer; } public List<String> getImageDigests() { return imageDigests; } public void setImageDigests(List<String> imageDigests) { this.imageDigests = imageDigests; } public String getCoverImageDigest() { return coverImageDigest; } public void setCoverImageDigest(String coverImageDigest) { this.coverImageDigest = coverImageDigest; } public boolean isLocked() { return isLocked; } public void setLocked(boolean locked) { isLocked = locked; } public boolean isAlbum() { return isAlbum; } public void setAlbum(boolean album) { isAlbum = album; } public boolean isArchived() { return isArchived; } public void setArchived(boolean archived) { isArchived = archived; } public String getDate() { return date; } public void setDate(String date) { this.date = date; } public MediaShare cloneMyself() { MediaShare cloneMediaShare = new MediaShare(); cloneMediaShare.setUuid(getUuid()); cloneMediaShare.setCreatorUUID(getCreatorUUID()); cloneMediaShare.setTime(getTime()); cloneMediaShare.setTitle(getTitle()); cloneMediaShare.setDesc(getDesc()); cloneMediaShare.setImageDigests(getImageDigests()); cloneMediaShare.setViewer(getViewer()); cloneMediaShare.setMaintainer(getMaintainer()); cloneMediaShare.setAlbum(isAlbum()); cloneMediaShare.setArchived(isArchived()); cloneMediaShare.setDate(getDate()); cloneMediaShare.setCoverImageDigest(getCoverImageDigest()); cloneMediaShare.setLocked(isLocked()); return cloneMediaShare; } }
package few.common.users.service; import few.common.BaseTest; import few.common.users.dao.UserService; import few.common.users.dto.CustomField; import few.common.users.dto.SimpleUser; import few.utils.Utils; import java.util.*; import java.util.ArrayList; import java.util.List; /** * User: gerbylev * Date: 01.11.11 */ public class UserServiceTest extends BaseTest { private List<Integer> createdUsersId = new ArrayList<Integer>(); private UserService users = UserService.get(); private final String displayName = "displayName"; private final String email = "my.name@some.org"; private final String role = "user"; private final String login = "my.login"; private final String pwd = "pwd"; private final boolean active = true; public void setUp() throws Exception { users = UserService.get(); super.setUp(); } public void tearDown() throws Exception { removeAllUsers(); } private void removeAllUsers () { List<SimpleUser> usersForDelete = users.selectUsers(); for (SimpleUser user : usersForDelete) { users.deleteUser(user.getUser_id()); } } public void test1() { System.out.println(Utils.produceSHA1fromPassword("admin", "admin")); } public String getTestLogin() { return "test_user" + Utils.generateUID(); } public String getTestPassword() { return "password"; } public void test_created_user_has_fields_passed_to_create_method() { SimpleUser user = createAndSelectUser(); assertEquals(displayName, user.getDisplay_name()); assertEquals(email, user.getEmail()); assertTrue(user.getRoles().contains(role)); assertEquals(active ? 1 : 0, user.getStatus_id()); } public void test_deleted_user_cannot_be_selected() { SimpleUser user = createAndSelectUser(); users.deleteUser(user.user_id); assertNull(users.selectUser(user.user_id)); } public void test_user_with_updated_login_has_same_fields() { Integer userId = createUser(displayName, email, role, login, pwd, active); users.updateLogin(userId, "new_login"); SimpleUser user_selected_by_login = users.selectUserByLogin("new_login"); SimpleUser user_selected_by_id = users.selectUser(userId); assertTrue(users_has_same_fields(user_selected_by_login, user_selected_by_id)); } public void test_update_last_login() { Integer userId = createUser(displayName, email, role, login, pwd, active); users.updateLastLogin(userId); assertNotNull(users.selectUser(userId)); } public void test_user_with_updated_password_has_same_fields() { Integer userId = createUser(displayName, email, role, login, pwd, active); users.updateUserPassword(login, "new_pwd"); SimpleUser user = users.selectUser(userId); SimpleUser same_user_with_new_pwd = users.selectUserBySimpleAuth(login, "new_pwd"); assertTrue(users_has_same_fields(user, same_user_with_new_pwd)); } public void test_activated_user_has_active_status() { Integer userId = createUser(displayName, email, role, login, pwd, false); users.activateUser(userId); SimpleUser user = users.selectUser(userId); assertEquals(SimpleUser.ACTIVE, user.getStatus_id()); } public void test_select_user_by_simple_authorization() { SimpleUser user_selected_by_id = createAndSelectUser(); SimpleUser user_selected_by_simple_auth = users.selectUserBySimpleAuth(login, pwd); assertTrue(users_has_same_fields(user_selected_by_id, user_selected_by_simple_auth)); } public void test_select_user_by_login() { SimpleUser user_selected_by_id = createAndSelectUser(); SimpleUser user_selected_by_login = users.selectUserByLogin(login); assertTrue(users_has_same_fields(user_selected_by_id, user_selected_by_login)); } public void test_select_user_by_email() { SimpleUser user_selected_by_id = createAndSelectUser(); SimpleUser user_selected_by_email = users.selectUserByEMail(email); assertTrue(users_has_same_fields(user_selected_by_id, user_selected_by_email)); } public void test_select_users() { SimpleUser user = createAndSelectUser(); List<SimpleUser> simpleUsersList = users.selectUsers(); assertTrue(simpleUsersList.contains(user)); } public void test_select_users_resets_cache() { int number_of_users_before_insert = users.selectUsers().size(); createUser(displayName, email, role, login, pwd, active); int number_of_users_after_insert = users.selectUsers().size(); assertEquals(number_of_users_before_insert + 1, number_of_users_after_insert); } public void test_select_users_by_role() { SimpleUser user = createAndSelectUser(); assertTrue(users.selectUsersByRole(role).contains(user)); } public void test_select_display_roles() { assertNotNull(users.selectDisplayRoles()); } public void test_update_simple_user() { SimpleUser user = createAndSelectUser(); user.display_name = "new_name"; user.email = "new_email"; users.updateSimpleUser(user); SimpleUser user_after_update = users.selectUser(user.user_id); assertEquals("new_name", user_after_update.getDisplay_name()); assertEquals("new_email", user_after_update.getEmail()); } public void test_select_login_by_user_id() { SimpleUser user = createAndSelectUser(); SimpleUser user_selected_by_login = users.selectUserByLogin(users.selectLoginByUserID(user.user_id)); assertTrue(users_has_same_fields(user, user_selected_by_login)); } public void test_select_login_by_email() { SimpleUser user = createAndSelectUser(); SimpleUser user_selected_by_login = users.selectUserByLogin(users.selectLoginByEMail(email)); assertTrue(users_has_same_fields(user, user_selected_by_login)); } private SimpleUser createAndSelectUser() { Integer userId = createUser(displayName, email, role, login, pwd, active); return users.selectUser(userId); } private Integer createUser(String displayName, String email, String role, String login, String pwd, boolean active) { Integer userId = users.createNewUser(displayName, email, role, login, pwd, active, Collections.<CustomField>emptyList()); createdUsersId.add(userId); return userId; } public void testSimpleUserCRD() { String login = getTestLogin(); String password = getTestPassword(); try { Integer uid = users.createNewUser("TEST USER", "email", "user", login, password, true, Collections.<CustomField>emptyList()); assertNotNull(uid); uid = users.selectUserBySimpleAuth(login, password).user_id; assertNotNull(uid); SimpleUser user = users.selectUser(uid); assertEquals((int)uid, user.user_id); assertEquals("TEST USER", user.display_name); assertFalse(user.roles.isEmpty()); assertTrue(user.roles.contains("user")); users.deleteUser(uid); SimpleUser u = users.selectUserBySimpleAuth(login, password); assertNull(u); user = users.selectUser(uid); assertNull(user); } finally { SimpleUser u = users.selectUserByLogin(login); if( u != null ) users.deleteUser(u.user_id); } } public boolean users_has_same_fields(Object o1, Object o2){ if(!((o1 instanceof SimpleUser) && (o2 instanceof SimpleUser))) { return false; } SimpleUser user1 = (SimpleUser)o1; SimpleUser user2 = (SimpleUser)o2; return user1.getDisplay_name().equals(user2.getDisplay_name()) && user1.getDisplay_role().equals(user2.getDisplay_role()) && user1.getEmail().equals(user2.getEmail()) && user1.getRoles().equals(user2.getRoles()) && user1.getStatus_id() == user2.getStatus_id() && user1.getUser_id() == user2.getUser_id(); } }
package com.mapswithme.maps.downloader; import android.location.Location; import android.text.TextUtils; import android.view.View; import android.view.ViewGroup; import android.widget.Button; import android.widget.TextView; import java.util.List; import java.util.Locale; import com.mapswithme.maps.MwmActivity; import com.mapswithme.maps.R; import com.mapswithme.maps.background.Notifier; import com.mapswithme.maps.location.LocationHelper; import com.mapswithme.maps.routing.RoutingController; import com.mapswithme.maps.widget.WheelProgressView; import com.mapswithme.util.Config; import com.mapswithme.util.ConnectionState; import com.mapswithme.util.StringUtils; import com.mapswithme.util.UiUtils; import com.mapswithme.util.statistics.Statistics; public class OnmapDownloader implements MwmActivity.LeftAnimationTrackListener { private static boolean sAutodownloadLocked; private final MwmActivity mActivity; private final View mFrame; private final TextView mParent; private final TextView mTitle; private final TextView mSize; private final WheelProgressView mProgress; private final Button mButton; private int mStorageSubscriptionSlot; private CountryItem mCurrentCountry; private final MapManager.StorageCallback mStorageCallback = new MapManager.StorageCallback() { @Override public void onStatusChanged(List<MapManager.StorageCallbackData> data) { if (mCurrentCountry == null) return; for (MapManager.StorageCallbackData item : data) { if (!item.isLeafNode) continue; if (item.newStatus == CountryItem.STATUS_FAILED) MapManager.showError(mActivity, item, null); if (mCurrentCountry.id.equals(item.countryId)) { mCurrentCountry.update(); updateState(false); return; } } } @Override public void onProgress(String countryId, long localSize, long remoteSize) { if (mCurrentCountry != null && mCurrentCountry.id.equals(countryId)) { mCurrentCountry.update(); updateState(false); } } }; private final MapManager.CurrentCountryChangedListener mCountryChangedListener = new MapManager.CurrentCountryChangedListener() { @Override public void onCurrentCountryChanged(String countryId) { mCurrentCountry = (TextUtils.isEmpty(countryId) ? null : CountryItem.fill(countryId)); updateState(true); } }; public void updateState(boolean shouldAutoDownload) { boolean showFrame = (mCurrentCountry != null && !mCurrentCountry.present && !RoutingController.get().isNavigating()); if (showFrame) { boolean enqueued = (mCurrentCountry.status == CountryItem.STATUS_ENQUEUED); boolean progress = (mCurrentCountry.status == CountryItem.STATUS_PROGRESS); boolean failed = (mCurrentCountry.status == CountryItem.STATUS_FAILED); showFrame = (enqueued || progress || failed || mCurrentCountry.status == CountryItem.STATUS_DOWNLOADABLE); if (showFrame) { boolean hasParent = !CountryItem.isRoot(mCurrentCountry.topmostParentId); UiUtils.showIf(progress || enqueued, mProgress); UiUtils.showIf(!progress && !enqueued, mButton); UiUtils.showIf(hasParent, mParent); if (hasParent) mParent.setText(mCurrentCountry.topmostParentName); mTitle.setText(mCurrentCountry.name); String sizeText; if (progress) { mProgress.setPending(false); mProgress.setProgress(mCurrentCountry.progress); sizeText = String.format(Locale.US, "%1$s %2$d%%", mActivity.getString(R.string.downloader_downloading), mCurrentCountry.progress); } else { if (enqueued) { sizeText = mActivity.getString(R.string.downloader_queued); mProgress.setPending(true); } else { sizeText = (MapManager.nativeIsLegacyMode() ? "" : StringUtils.getFileSizeString(mCurrentCountry.totalSize)); if (shouldAutoDownload && Config.isAutodownloadEnabled() && !sAutodownloadLocked && !failed && !MapManager.nativeIsLegacyMode() && ConnectionState.isWifiConnected()) { Location loc = LocationHelper.INSTANCE.getSavedLocation(); if (loc != null) { String country = MapManager.nativeFindCountry(loc.getLatitude(), loc.getLongitude()); if (TextUtils.equals(mCurrentCountry.id, country) && MapManager.nativeHasSpaceToDownloadCountry(country)) { MapManager.nativeDownload(mCurrentCountry.id); Statistics.INSTANCE.trackEvent(Statistics.EventName.DOWNLOADER_ACTION, Statistics.params().add(Statistics.EventParam.ACTION, "download") .add(Statistics.EventParam.FROM, "map") .add("is_auto", "true") .add("scenario", "download")); } } } mButton.setText(failed ? R.string.downloader_retry : R.string.download); } } mSize.setText(sizeText); } } UiUtils.showIf(showFrame, mFrame); } public OnmapDownloader(MwmActivity activity) { mActivity = activity; mFrame = activity.findViewById(R.id.onmap_downloader); mParent = (TextView)mFrame.findViewById(R.id.downloader_parent); mTitle = (TextView)mFrame.findViewById(R.id.downloader_title); mSize = (TextView)mFrame.findViewById(R.id.downloader_size); View controls = mFrame.findViewById(R.id.downloader_controls_frame); mProgress = (WheelProgressView) controls.findViewById(R.id.wheel_downloader_progress); mButton = (Button) controls.findViewById(R.id.downloader_button); mProgress.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { MapManager.nativeCancel(mCurrentCountry.id); Statistics.INSTANCE.trackEvent(Statistics.EventName.DOWNLOADER_CANCEL, Statistics.params().add(Statistics.EventParam.FROM, "map")); setAutodownloadLocked(true); } }); mButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { if (MapManager.nativeIsLegacyMode()) { mActivity.showDownloader(false); return; } MapManager.warnOn3g(mActivity, mCurrentCountry.id, new Runnable() { @Override public void run() { if (mCurrentCountry == null) return; boolean retry = (mCurrentCountry.status == CountryItem.STATUS_FAILED); if (retry) { Notifier.cancelDownloadFailed(); MapManager.nativeRetry(mCurrentCountry.id); } else MapManager.nativeDownload(mCurrentCountry.id); Statistics.INSTANCE.trackEvent(Statistics.EventName.DOWNLOADER_ACTION, Statistics.params().add(Statistics.EventParam.ACTION, (retry ? "retry" : "download")) .add(Statistics.EventParam.FROM, "map") .add("is_auto", "false") .add("scenario", "download")); } }); } }); UiUtils.updateAccentButton(mButton); } @Override public void onTrackStarted(boolean collapsed) {} @Override public void onTrackFinished(boolean collapsed) {} @Override public void onTrackLeftAnimation(float offset) { ViewGroup.MarginLayoutParams lp = (ViewGroup.MarginLayoutParams)mFrame.getLayoutParams(); lp.leftMargin = (int)offset; mFrame.setLayoutParams(lp); } public void onPause() { if (mStorageSubscriptionSlot > 0) { MapManager.nativeUnsubscribe(mStorageSubscriptionSlot); mStorageSubscriptionSlot = 0; MapManager.nativeUnsubscribeOnCountryChanged(); } } public void onResume() { if (mStorageSubscriptionSlot == 0) { mStorageSubscriptionSlot = MapManager.nativeSubscribe(mStorageCallback); MapManager.nativeSubscribeOnCountryChanged(mCountryChangedListener); } } public static void setAutodownloadLocked(boolean locked) { sAutodownloadLocked = locked; } }
/* * IzPack - Copyright 2001-2008 Julien Ponge, All Rights Reserved. * * http://izpack.org/ * http://izpack.codehaus.org/ * * Copyright 2004 Klaus Bartz * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.izforge.izpack.panels.userpath; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.logging.Level; import java.util.logging.Logger; import com.izforge.izpack.api.data.Panel; import com.izforge.izpack.api.exception.ResourceNotFoundException; import com.izforge.izpack.api.handler.AbstractUIHandler; import com.izforge.izpack.api.resource.Resources; import com.izforge.izpack.gui.IzPanelLayout; import com.izforge.izpack.gui.log.Log; import com.izforge.izpack.installer.data.GUIInstallData; import com.izforge.izpack.installer.gui.InstallerFrame; import com.izforge.izpack.installer.gui.IzPanel; import com.izforge.izpack.util.IoHelper; import com.izforge.izpack.util.OsVersion; import com.izforge.izpack.util.Platform; /** * Base class for panels which asks for paths. * * @author Klaus Bartz * @author Jeff Gordon */ public class UserPathInputPanel extends IzPanel implements ActionListener { private static final long serialVersionUID = 3257566217698292531L; private static final transient Logger logger = Logger.getLogger(UserPathInputPanel.class.getName()); /** * Flag whether the choosen path must exist or not */ protected boolean _mustExist = false; protected boolean _loadedDefaultDir = false; /** * Files which should be exist */ protected String[] _existFiles = null; /** The path which was chosen */ // protected String chosenPath; /** * The path selection sub panel */ protected UserPathSelectionPanel _pathSelectionPanel; protected String _error; protected String _warn; protected String _emptyTargetMsg; protected String _warnMsg; protected String _reqMsg; protected String _notValidMsg; protected String _notWritableMsg; protected String _createDirMsg; protected String _defaultDir = null; protected String _thisPanel = "UserPathInputPanel"; protected String _defaultPanelName = "TargetPanel"; protected String _targetPanel = "UserPathPanel"; protected String _variableName = "pathVariable"; /** * Constructs an <tt>UserPathInputPanel</tt>. * * @param panel the panel meta-data * @param parent the parent window * @param installData the installation data * @param targetPanel the target panel * @param resources the resources * @param log the log */ public UserPathInputPanel(Panel panel, InstallerFrame parent, GUIInstallData installData, String targetPanel, Resources resources, Log log) { super(panel, parent, installData, new IzPanelLayout(log), resources); _targetPanel = targetPanel; _variableName = getString(targetPanel + ".variableName"); String mustExist; if ((mustExist = panel.getConfiguration("mustExist")) != null) { this._mustExist = Boolean.parseBoolean(mustExist); } // Set default values loadMessages(); String introText = getI18nStringForClass("extendedIntro", _thisPanel); if (introText == null || introText.endsWith("extendedIntro") || introText.indexOf('$') > -1) { introText = getI18nStringForClass("intro", _thisPanel); if (introText == null || introText.endsWith("intro")) { introText = ""; } } // Intro // row 0 column 0 add(createMultiLineLabel(introText)); add(IzPanelLayout.createParagraphGap()); // Label for input // row 1 column 0. add(createLabel("info", _targetPanel, "open", LEFT, true), NEXT_LINE); // Create path selection components and add they to this panel. _pathSelectionPanel = new UserPathSelectionPanel(this, installData, _targetPanel, _variableName, log); add(_pathSelectionPanel, NEXT_LINE); createLayoutBottom(); getLayoutHelper().completeLayout(); } /** * This method does nothing. It is called from ctor of UserPathInputPanel, to give in a derived * class the possibility to add more components under the path input components. */ public void createLayoutBottom() { // Derived classes implements additional elements. } /** * Actions-handling method. * * @param e The event. */ @Override public void actionPerformed(ActionEvent e) { Object source = e.getSource(); if (source == _pathSelectionPanel.getPathInputField()) { parent.navigateNext(); } } private void loadMessages() { _error = getString("installer.error"); _warn = getString("installer.warning"); _reqMsg = getMessage("required"); _emptyTargetMsg = getMessage("empty_target"); _warnMsg = getMessage("exists_warn"); _notValidMsg = getMessage("notValid"); _notWritableMsg = getMessage("notwritable"); _createDirMsg = getMessage("createdir"); } private String getMessage(String type) { String msg = null; msg = getI18nStringForClass(type, _targetPanel); if (msg == null) { msg = getI18nStringForClass(type, _defaultPanelName); } return msg; } /** * Indicates whether the panel has been validated or not. * * @return Whether the panel has been validated or not. */ @Override public boolean isValidated() { String chosenPath = _pathSelectionPanel.getPath(); boolean ok = true; // We put a warning if the specified target is nameless if (chosenPath.length() == 0) { if (isMustExist()) { emitError(_error, _reqMsg); return false; } ok = emitWarning(_warn, _emptyTargetMsg); } if (!ok) { return ok; } // Normalize the path File path = new File(chosenPath).getAbsoluteFile(); chosenPath = path.toString(); _pathSelectionPanel.setPath(chosenPath); if (isMustExist()) { if (!path.exists()) { emitError(_error, _reqMsg); return false; } if (!pathIsValid()) { emitError(_error, _notValidMsg); return false; } } else { // We assume, that we would install something into this dir if (!isWriteable()) { emitError(_error, _notWritableMsg); return false; } // We put a warning if the directory exists else we warn // that it will be created if (path.exists()) { int res = askQuestion(_warn, _warnMsg, AbstractUIHandler.CHOICES_YES_NO, AbstractUIHandler.ANSWER_YES); ok = res == AbstractUIHandler.ANSWER_YES; } else { ok = this.emitNotificationFeedback(_createDirMsg + "\n" + chosenPath); } } return ok; } /** * Returns whether the chosen path is true or not. If existFiles are not null, the existence of * it under the choosen path are detected. This method can be also implemented in derived * classes to handle special verification of the path. * * @return true if existFiles are exist or not defined, else false */ protected boolean pathIsValid() { if (_existFiles == null) { return true; } for (String _existFile : _existFiles) { File path = new File(_pathSelectionPanel.getPath(), _existFile).getAbsoluteFile(); if (!path.exists()) { return false; } } return true; } /** * Returns the must exist state. * * @return the must exist state */ public boolean isMustExist() { return _mustExist; } /** * Sets the must exist state. If it is true, the path must exist. * * @param mustExist must exist state */ public void setMustExist(boolean mustExist) { _mustExist = mustExist; } /** * Returns the array of strings which are described the files which must exist. * * @return paths of files which must exist */ public String[] getExistFiles() { return _existFiles; } /** * Sets the paths of files which must exist under the chosen path. * * @param strings paths of files which must exist under the chosen path */ public void setExistFiles(String[] strings) { _existFiles = strings; } /** * "targetPanel" is typically the class name of the implementing panel, such as * "UserPathPanel" or "TargetPanel" set when the class is created, but can be set * with setDefaultDir(). * Loads up the "dir" resource associated with targetPanel. Acceptable dir resource names: * <code> * targetPanel.dir.macosx * targetPanel.dir.mac * targetPanel.dir.windows * targetPanel.dir.unix * targetPanel.dir.xxx, * where xxx is the lower case version of System.getProperty("os.name"), * with any spaces replace with underscores * targetPanel.dir (generic that will be applied if none of above is found) * </code> * As with all IzPack resources, each the above ids should be associated with a separate * filename, which is set in the install.xml file at compile time. */ private void loadDefaultDir() { // Load only once ... if (!(_loadedDefaultDir)) { Resources resources = getResources(); BufferedReader reader = null; try { InputStream in = null; String os = System.getProperty("os.name"); // first try to look up by specific os name os = os.replace(' ', '_'); // avoid spaces in file names os = os.toLowerCase(); // for consistency among targetPanel res files try { in = resources.getInputStream(_targetPanel + ".dir.".concat(os)); } catch (ResourceNotFoundException rnfe) { } if (in == null) { Platform platform = installData.getPlatform(); if (platform.isA(Platform.Name.WINDOWS)) { try { in = resources.getInputStream(_targetPanel + ".dir.windows"); } catch (ResourceNotFoundException rnfe) { }//it's usual, that the resource does not exist } else if (platform.isA(Platform.Name.MAC_OSX)) { try { in = resources.getInputStream(_targetPanel + ".dir.mac"); } catch (ResourceNotFoundException rnfe) { }//it's usual, that the resource does not exist } else { try { in = resources.getInputStream(_targetPanel + ".dir.unix"); } catch (ResourceNotFoundException eee) { }//it's usual, that the resource does not exist } } // if all above tests failed, there is no resource file, // so use system default if (in == null) { try { in = resources.getInputStream(_targetPanel + ".dir"); } catch (ResourceNotFoundException eee) { } } if (in != null) { // now read the file, once we've identified which one to read InputStreamReader isr = new InputStreamReader(in); reader = new BufferedReader(isr); String line; while ((line = reader.readLine()) != null) { line = line.trim(); // use the first non-blank line if (!"".equals(line)) { break; } } _defaultDir = line; _defaultDir = installData.getVariables().replace(_defaultDir); } } catch (Exception e) { logger.log(Level.WARNING, e.getMessage(), e); _defaultDir = null; // leave unset to take the system default set by Installer class } finally { try { if (reader != null) { reader.close(); } } catch (IOException ignored) { } } } _loadedDefaultDir = true; } /** * This method determines whether the chosen dir is writeable or not. * * @return whether the chosen dir is writeable or not */ public boolean isWriteable() { File existParent = IoHelper.existingParent(new File(_pathSelectionPanel.getPath())); if (existParent == null) { return false; } // On windows we cannot use canWrite because // it looks to the dos flags which are not valid // on NT or 2k XP or ... if (OsVersion.IS_WINDOWS) { File tmpFile; try { tmpFile = File.createTempFile("izWrTe", ".tmp", existParent); tmpFile.deleteOnExit(); } catch (IOException e) { logger.log(Level.WARNING, e.toString(), e); return false; } return true; } return existParent.canWrite(); } /** * Returns the default for the directory. * * @return the default for the directory */ public String getDefaultDir() { if (_defaultDir == null && (!(_loadedDefaultDir))) { loadDefaultDir(); } return _defaultDir; } /** * Sets the default for the directory to the given string. * * @param defaultDir path for default directory */ public void setDefaultDir(String defaultDir) { _defaultDir = defaultDir; } /** * Returns the panel name extending this class. * Used for looking up localized text and resources. * * @return the default for the directory */ public String getTargetPanel() { return _targetPanel; } /** * Sets the panel name extending this class. * Used for looking up localized text and resources. * * @param targetPanel path for default directory */ public void setTargetPanel(String targetPanel) { _targetPanel = targetPanel; } }
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.world.internal; import com.google.common.base.Objects; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.engine.ComponentSystemManager; import org.terasology.engine.GameThread; import org.terasology.entitySystem.Component; import org.terasology.entitySystem.ComponentContainer; import org.terasology.entitySystem.entity.EntityBuilder; import org.terasology.entitySystem.entity.EntityManager; import org.terasology.entitySystem.entity.EntityRef; import org.terasology.entitySystem.entity.internal.EngineEntityManager; import org.terasology.entitySystem.entity.internal.EntityChangeSubscriber; import org.terasology.entitySystem.entity.lifecycleEvents.BeforeDeactivateComponent; import org.terasology.entitySystem.entity.lifecycleEvents.BeforeEntityCreated; import org.terasology.entitySystem.entity.lifecycleEvents.OnActivatedComponent; import org.terasology.entitySystem.entity.lifecycleEvents.OnChangedComponent; import org.terasology.entitySystem.event.ReceiveEvent; import org.terasology.entitySystem.metadata.ComponentMetadata; import org.terasology.entitySystem.prefab.Prefab; import org.terasology.entitySystem.systems.UpdateSubscriberSystem; import org.terasology.logic.health.HealthComponent; import org.terasology.logic.location.LocationComponent; import org.terasology.math.Region3i; import org.terasology.math.geom.Vector3f; import org.terasology.math.geom.Vector3i; import org.terasology.monitoring.PerformanceMonitor; import org.terasology.network.NetworkComponent; import org.terasology.reflection.metadata.FieldMetadata; import org.terasology.registry.CoreRegistry; import org.terasology.world.BlockEntityRegistry; import org.terasology.world.OnChangedBlock; import org.terasology.world.block.Block; import org.terasology.world.block.BlockComponent; import org.terasology.world.block.regions.BlockRegionComponent; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; /** * @author Immortius */ public class EntityAwareWorldProvider extends AbstractWorldProviderDecorator implements BlockEntityRegistry, UpdateSubscriberSystem, EntityChangeSubscriber { private static final Logger logger = LoggerFactory.getLogger(EntityAwareWorldProvider.class); private static final Set<Class<? extends Component>> COMMON_BLOCK_COMPONENTS = ImmutableSet.of(NetworkComponent.class, BlockComponent.class, LocationComponent.class, HealthComponent.class); private static final float BLOCK_REGEN_SECONDS = 4.0f; private EngineEntityManager entityManager; // TODO: Perhaps a better datastructure for spatial lookups // TODO: Or perhaps a build in indexing system for entities private Map<Vector3i, EntityRef> blockEntityLookup = Maps.newHashMap(); private Map<Vector3i, EntityRef> blockRegionLookup = Maps.newHashMap(); private Map<EntityRef, Region3i> blockRegions = Maps.newHashMap(); private Set<EntityRef> temporaryBlockEntities = Sets.newLinkedHashSet(); public EntityAwareWorldProvider(WorldProviderCore base) { super(base); entityManager = (EngineEntityManager) CoreRegistry.get(EntityManager.class); CoreRegistry.get(ComponentSystemManager.class).register(getTime()); } public EntityAwareWorldProvider(WorldProviderCore base, EngineEntityManager entityManager) { this(base); this.entityManager = entityManager; } @Override public void initialise() { entityManager.subscribeForChanges(this); } @Override public void preBegin() { } @Override public void postBegin() { } @Override public void preSave() { } @Override public void postSave() { } @Override public void shutdown() { entityManager.unsubscribe(this); } @Override public Block setBlock(Vector3i pos, Block type) { if (GameThread.isCurrentThread()) { EntityRef blockEntity = getBlockEntityAt(pos); Block oldType = super.setBlock(pos, type); if (oldType != null) { updateBlockEntity(blockEntity, pos, oldType, type, false, Collections.<Class<? extends Component>>emptySet()); } return oldType; } return null; } @Override @SafeVarargs public final Block setBlockRetainComponent(Vector3i pos, Block type, Class<? extends Component>... components) { if (GameThread.isCurrentThread()) { EntityRef blockEntity = getBlockEntityAt(pos); Block oldType = super.setBlock(pos, type); if (oldType != null) { updateBlockEntity(blockEntity, pos, oldType, type, false, Sets.newHashSet(components)); } return oldType; } return null; } private void updateBlockEntity(EntityRef blockEntity, Vector3i pos, Block oldType, Block type, boolean forceEntityUpdate, Set<Class<? extends Component>> retainComponents) { if (type.isKeepActive()) { temporaryBlockEntities.remove(blockEntity); } else if (oldType.isKeepActive() && isTemporaryBlock(blockEntity, type)) { temporaryBlockEntities.add(blockEntity); } if (forceEntityUpdate || !(Objects.equal(oldType.getBlockFamily(), type.getBlockFamily()) && Objects.equal(oldType.getPrefab(), type.getPrefab()))) { updateBlockEntityComponents(blockEntity, oldType, type, retainComponents); } EntityRef regionEntity = blockRegionLookup.get(pos); if (regionEntity != null) { regionEntity.send(new OnChangedBlock(pos, type, oldType)); } blockEntity.send(new OnChangedBlock(new Vector3i(pos), type, oldType)); } @Override public EntityRef getExistingBlockEntityAt(Vector3i blockPosition) { if (GameThread.isCurrentThread()) { EntityRef result = blockEntityLookup.get(blockPosition); return (result == null) ? EntityRef.NULL : result; } logger.error("Attempted to get block entity off-thread"); return EntityRef.NULL; } @Override public Block setBlockForceUpdateEntity(Vector3i pos, Block type) { if (GameThread.isCurrentThread()) { EntityRef blockEntity = getBlockEntityAt(pos); Block oldType = super.setBlock(pos, type); if (oldType != null) { updateBlockEntity(blockEntity, pos, oldType, type, true, Collections.<Class<? extends Component>>emptySet()); } return oldType; } return null; } @Override public EntityRef getBlockEntityAt(Vector3f position) { Vector3i pos = new Vector3i(position, 0.5f); return getBlockEntityAt(pos); } @Override public EntityRef getBlockEntityAt(Vector3i blockPosition) { if (GameThread.isCurrentThread()) { EntityRef blockEntity = getExistingBlockEntityAt(blockPosition); if (!blockEntity.exists() && isBlockRelevant(blockPosition.x, blockPosition.y, blockPosition.z)) { Block block = getBlock(blockPosition.x, blockPosition.y, blockPosition.z); blockEntity = createBlockEntity(blockPosition, block); } return blockEntity; } logger.error("Attempted to get block entity off-thread"); return EntityRef.NULL; } private boolean isTemporaryBlock(ComponentContainer entity, Block block) { return isTemporaryBlock(entity, block, null); } private boolean isTemporaryBlock(ComponentContainer entity, Block block, Class<? extends Component> ignoreComponent) { if (block.isKeepActive()) { return false; } for (ComponentMetadata<?> metadata : entityManager.getComponentLibrary().iterateComponentMetadata()) { if (metadata.isForceBlockActive() && ignoreComponent != metadata.getType()) { if (entity.hasComponent(metadata.getType())) { return false; } } } return true; } /** * Transforms a block entity with the change of block type. This is driven from the delta between the old and new * block type prefabs, but takes into account changes made to the block entity. * * @param blockEntity The entity to update * @param oldType The previous type of the block * @param type The new type of the block */ private void updateBlockEntityComponents(EntityRef blockEntity, Block oldType, Block type, Set<Class<? extends Component>> retainComponents) { BlockComponent blockComponent = blockEntity.getComponent(BlockComponent.class); Optional<Prefab> oldPrefab = oldType.getPrefab(); EntityBuilder oldEntityBuilder = entityManager.newBuilder(oldPrefab.orElse(null)); oldEntityBuilder.addComponent(new BlockComponent(oldType, new Vector3i(blockComponent.getPosition()))); BeforeEntityCreated oldEntityEvent = new BeforeEntityCreated(oldPrefab.orElse(null), oldEntityBuilder.iterateComponents()); blockEntity.send(oldEntityEvent); for (Component comp : oldEntityEvent.getResultComponents()) { oldEntityBuilder.addComponent(comp); } Optional<Prefab> newPrefab = type.getPrefab(); EntityBuilder newEntityBuilder = entityManager.newBuilder(newPrefab.orElse(null)); newEntityBuilder.addComponent(new BlockComponent(type, new Vector3i(blockComponent.getPosition()))); BeforeEntityCreated newEntityEvent = new BeforeEntityCreated(newPrefab.orElse(null), newEntityBuilder.iterateComponents()); blockEntity.send(newEntityEvent); for (Component comp : newEntityEvent.getResultComponents()) { newEntityBuilder.addComponent(comp); } for (Component component : blockEntity.iterateComponents()) { if (!COMMON_BLOCK_COMPONENTS.contains(component.getClass()) && !entityManager.getComponentLibrary().getMetadata(component.getClass()).isRetainUnalteredOnBlockChange() && !newEntityBuilder.hasComponent(component.getClass()) && !retainComponents.contains(component.getClass())) { blockEntity.removeComponent(component.getClass()); } } blockComponent.setBlock(type); blockEntity.saveComponent(blockComponent); HealthComponent health = blockEntity.getComponent(HealthComponent.class); if (health == null && type.isDestructible()) { blockEntity.addComponent(new HealthComponent(type.getHardness(), type.getHardness() / BLOCK_REGEN_SECONDS, 1.0f)); } else if (health != null && !type.isDestructible()) { blockEntity.removeComponent(HealthComponent.class); } else if (health != null && type.isDestructible()) { health.maxHealth = type.getHardness(); health.currentHealth = Math.min(health.currentHealth, health.maxHealth); blockEntity.saveComponent(health); } for (Component comp : newEntityBuilder.iterateComponents()) { copyIntoPrefab(blockEntity, comp, retainComponents); } } @SuppressWarnings("unchecked") private <T extends Component> void copyIntoPrefab(EntityRef blockEntity, T comp, Set<Class<? extends Component>> retainComponents) { ComponentMetadata<T> metadata = entityManager.getComponentLibrary().getMetadata((Class<T>) comp.getClass()); if (!blockEntity.hasComponent(comp.getClass())) { blockEntity.addComponent(metadata.copyRaw(comp)); } else if (!metadata.isRetainUnalteredOnBlockChange() && !retainComponents.contains(metadata.getType())) { updateComponent(blockEntity, metadata, comp); } } private <T extends Component> void updateComponent(EntityRef blockEntity, ComponentMetadata<T> metadata, T targetComponent) { T currentComp = blockEntity.getComponent(metadata.getType()); if (currentComp != null) { boolean changed = false; for (FieldMetadata<T, ?> field : metadata.getFields()) { Object newVal = field.getValue(targetComponent); if (!Objects.equal(field.getValue(currentComp), newVal)) { field.setValue(currentComp, newVal); changed = true; } } if (changed) { blockEntity.saveComponent(currentComp); } } } private EntityRef createBlockEntity(Vector3i blockPosition, Block block) { EntityBuilder builder = entityManager.newBuilder(block.getPrefab().orElse(null)); builder.addComponent(new LocationComponent(blockPosition.toVector3f())); builder.addComponent(new BlockComponent(block, blockPosition)); if (block.isDestructible() && !builder.hasComponent(HealthComponent.class)) { // Block regen should always take the same amount of time, regardless of its hardness builder.addComponent(new HealthComponent(block.getHardness(), block.getHardness() / BLOCK_REGEN_SECONDS, 1.0f)); } boolean isTemporary = isTemporaryBlock(builder, block); if (!isTemporary && !builder.hasComponent(NetworkComponent.class)) { builder.addComponent(new NetworkComponent()); } EntityRef blockEntity; if (isTemporary) { blockEntity = builder.buildWithoutLifecycleEvents(); temporaryBlockEntities.add(blockEntity); } else { blockEntity = builder.build(); } blockEntityLookup.put(new Vector3i(blockPosition), blockEntity); return blockEntity; } @Override public EntityRef getExistingEntityAt(Vector3i blockPosition) { if (GameThread.isCurrentThread()) { EntityRef result = blockRegionLookup.get(blockPosition); if (result == null) { return getExistingBlockEntityAt(blockPosition); } return result; } logger.error("Attempted to get block entity off-thread"); return EntityRef.NULL; } @Override public EntityRef getEntityAt(Vector3i blockPosition) { if (GameThread.isCurrentThread()) { EntityRef entity = getExistingEntityAt(blockPosition); if (!entity.exists()) { return getBlockEntityAt(blockPosition); } return entity; } logger.error("Attempted to get block entity off-thread"); return EntityRef.NULL; } @Override public boolean hasPermanentBlockEntity(Vector3i blockPos) { if (GameThread.isCurrentThread()) { EntityRef blockEntity = blockEntityLookup.get(blockPos); return blockEntity != null && !temporaryBlockEntities.contains(blockEntity); } logger.error("Attempted check whether a block entity is permanent, off thread"); return false; } @ReceiveEvent(components = {BlockComponent.class}) public void onActivateBlock(OnActivatedComponent event, EntityRef entity) { BlockComponent block = entity.getComponent(BlockComponent.class); EntityRef oldEntity = blockEntityLookup.put(new Vector3i(block.getPosition()), entity); // If this is a client, then an existing block entity may exist. Destroy it. if (oldEntity != null && !Objects.equal(oldEntity, entity)) { oldEntity.destroy(); } } @ReceiveEvent(components = {BlockComponent.class}) public void onDeactivateBlock(BeforeDeactivateComponent event, EntityRef entity) { BlockComponent block = entity.getComponent(BlockComponent.class); Vector3i pos = new Vector3i(block.getPosition()); if (blockEntityLookup.get(pos) == entity) { blockEntityLookup.remove(pos); } } @ReceiveEvent(components = {BlockRegionComponent.class}) public void onBlockRegionActivated(OnActivatedComponent event, EntityRef entity) { BlockRegionComponent regionComp = entity.getComponent(BlockRegionComponent.class); blockRegions.put(entity, regionComp.region); for (Vector3i pos : regionComp.region) { blockRegionLookup.put(pos, entity); } } @ReceiveEvent(components = {BlockRegionComponent.class}) public void onBlockRegionChanged(OnChangedComponent event, EntityRef entity) { Region3i oldRegion = blockRegions.get(entity); for (Vector3i pos : oldRegion) { blockRegionLookup.remove(pos); } BlockRegionComponent regionComp = entity.getComponent(BlockRegionComponent.class); blockRegions.put(entity, regionComp.region); for (Vector3i pos : regionComp.region) { blockRegionLookup.put(pos, entity); } } @ReceiveEvent(components = {BlockRegionComponent.class}) public void onBlockRegionDeactivated(BeforeDeactivateComponent event, EntityRef entity) { Region3i oldRegion = blockRegions.get(entity); for (Vector3i pos : oldRegion) { blockRegionLookup.remove(pos); } blockRegions.remove(entity); } @Override public void update(float delta) { PerformanceMonitor.startActivity("Temp Blocks Cleanup"); List<EntityRef> toRemove = Lists.newArrayList(temporaryBlockEntities); temporaryBlockEntities.clear(); for (EntityRef entity : toRemove) { cleanUpTemporaryEntity(entity); } PerformanceMonitor.endActivity(); } private void cleanUpTemporaryEntity(EntityRef entity) { Prefab prefab = entity.getParentPrefab(); for (Component comp : entity.iterateComponents()) { if (!COMMON_BLOCK_COMPONENTS.contains(comp.getClass()) && (prefab == null || !prefab.hasComponent(comp.getClass()))) { entity.removeComponent(comp.getClass()); } } entity.removeComponent(NetworkComponent.class); if (prefab != null) { for (Component comp : prefab.iterateComponents()) { Component currentComp = entity.getComponent(comp.getClass()); if (currentComp == null) { entity.addComponent(entityManager.getComponentLibrary().copy(comp)); } else { ComponentMetadata<?> metadata = entityManager.getComponentLibrary().getMetadata(comp.getClass()); boolean changed = false; for (FieldMetadata field : metadata.getFields()) { Object expected = field.getValue(comp); if (!Objects.equal(expected, field.getValue(currentComp))) { field.setValue(currentComp, expected); changed = true; } } if (changed) { entity.saveComponent(currentComp); } } } } entityManager.destroyEntityWithoutEvents(entity); } @Override public void onEntityComponentAdded(EntityRef entity, Class<? extends Component> component) { if (temporaryBlockEntities.contains(entity) && entityManager.getComponentLibrary().getMetadata(component).isForceBlockActive()) { temporaryBlockEntities.remove(entity); if (!entity.hasComponent(NetworkComponent.class)) { entity.addComponent(new NetworkComponent()); } } } @Override public void onEntityComponentChange(EntityRef entity, Class<? extends Component> component) { } @Override public void onEntityComponentRemoved(EntityRef entity, Class<? extends Component> component) { if (entityManager.getComponentLibrary().getMetadata(component).isForceBlockActive()) { BlockComponent blockComp = entity.getComponent(BlockComponent.class); if (blockComp != null) { Block block = getBlock(blockComp.getPosition().x, blockComp.getPosition().y, blockComp.getPosition().z); if (isTemporaryBlock(entity, block, component)) { temporaryBlockEntities.add(entity); } } } } @Override public void onReactivation(EntityRef entity, Collection<Component> components) { // TODO check if implementation makes sense } @Override public void onBeforeDeactivation(EntityRef entity, Collection<Component> components) { // TODO check if implementation makes sense } }
package net.glowstone; import net.glowstone.block.GlowBlock; import net.glowstone.block.blocktype.BlockTNT; import net.glowstone.entity.GlowEntity; import net.glowstone.entity.GlowHumanEntity; import net.glowstone.entity.GlowLivingEntity; import net.glowstone.entity.GlowPlayer; import net.glowstone.net.message.play.game.ExplosionMessage; import org.bukkit.Effect; import org.bukkit.Location; import org.bukkit.Material; import org.bukkit.Sound; import org.bukkit.block.Block; import org.bukkit.block.BlockFace; import org.bukkit.entity.Entity; import org.bukkit.entity.EntityType; import org.bukkit.entity.LivingEntity; import org.bukkit.event.block.BlockIgniteEvent; import org.bukkit.event.entity.EntityDamageEvent; import org.bukkit.event.entity.EntityExplodeEvent; import org.bukkit.util.BlockVector; import org.bukkit.util.Vector; import java.util.*; public final class Explosion { public static final int POWER_TNT = 4; public static final int POWER_BED = 5; public static final int POWER_CREEPER = 3; public static final int POWER_CHARGED_CREEPER = 6; public static final int POWER_GHAST = 1; public static final int POWER_WITHER_SKULL = 1; public static final int POWER_WITHER_CREATION = 7; public static final int POWER_ENDER_CRYSTAL = 6; private float power; private final Entity source; private final Location location; private final boolean incendiary; private final boolean breakBlocks; private final GlowWorld world; private float yield = 0.3f; private static final Random random = new Random(); /** * Creates a new explosion * @param source The entity causing this explosion * @param world The world this explosion is in * @param x The X location of the explosion * @param y The Y location of the explosion * @param z The Z location of the explosion * @param power The power of the explosion * @param incendiary Whether or not blocks should be set on fire * @param breakBlocks Whether blocks should break through this explosion */ public Explosion(Entity source, GlowWorld world, double x, double y, double z, float power, boolean incendiary, boolean breakBlocks) { this(source, new Location(world, x, y, z), power, incendiary, breakBlocks); } /** * Creates a new explosion * @param source The entity causing this explosion * @param location The location this explosion is occuring at. Must contain a GlowWorld * @param power The power of the explosion * @param incendiary Whether or not blocks should be set on fire * @param breakBlocks Whether blocks should break through this explosion */ public Explosion(Entity source, Location location, float power, boolean incendiary, boolean breakBlocks) { if (!(location.getWorld() instanceof GlowWorld)) { throw new IllegalArgumentException("Supplied location does not have a valid GlowWorld"); } this.source = source; this.location = location.clone(); this.power = power; this.incendiary = incendiary; this.breakBlocks = breakBlocks; this.world = (GlowWorld) location.getWorld(); } public boolean explodeWithEvent() { if (power < 0.1f) return true; Set<BlockVector> droppedBlocks = calculateBlocks(); EntityExplodeEvent event = EventFactory.callEvent(new EntityExplodeEvent(source, location, toBlockList(droppedBlocks), yield)); if (event.isCancelled()) return false; this.yield = event.getYield(); playOutSoundAndParticles(); List<Block> blocks = toBlockList(droppedBlocks); for (Block block : blocks) { handleBlockExplosion((GlowBlock) block); } if (incendiary) { for (Block block : blocks) { setBlockOnFire((GlowBlock) block); } } Collection<GlowPlayer> affectedPlayers = damageEntities(); for (GlowPlayer player : affectedPlayers) { playOutExplosion(player, droppedBlocks); } return true; } /////////////////////////////////////////////////// // Calculate all the dropping blocks private Set<BlockVector> calculateBlocks() { if (!breakBlocks) return new HashSet<>(); Set<BlockVector> blocks = new HashSet<>(); final int value = 16; for (int x = 0; x < value; x++) { for (int y = 0; y < value; y++) { for (int z = 0; z < value; z++) { if (!(x == 0 || x == value - 1 || y == 0 || y == value - 1 || z == 0 || z == value - 1)) { continue; } calculateRay(x, y, z, blocks); } } } return blocks; } private void calculateRay(int ox, int oy, int oz, Collection<BlockVector> result) { double x = ox / 7.5 - 1; double y = oy / 7.5 - 1; double z = oz / 7.5 - 1; Vector direction = new Vector(x, y, z); direction.normalize(); direction.multiply(0.3f); // 0.3 blocks away with each step Location current = location.clone(); float currentPower = calculateStartPower(); while (currentPower > 0) { GlowBlock block = world.getBlockAt(current); if (block.getType() != Material.AIR) { double blastDurability = getBlastDurability(block) / 5d; blastDurability += 0.3F; blastDurability *= 0.3F; currentPower -= blastDurability; if (currentPower > 0) { result.add(new BlockVector(block.getX(), block.getY(), block.getZ())); } } current.add(direction); currentPower -= 0.225f; } } private void handleBlockExplosion(GlowBlock block) { if (block.getType() == Material.AIR) { return; } else if (block.getType() == Material.TNT) { BlockTNT.igniteBlock(block, true); return; } block.breakNaturally(yield); } private float calculateStartPower() { float rand = random.nextFloat(); rand *= 0.6F; // (max - 0.7) rand += 0.7; // min return rand * power; } private double getBlastDurability(GlowBlock block) { return block.getMaterialValues().getBlastResistance(); } private List<Block> toBlockList(Collection<BlockVector> locs) { List<Block> blocks = new ArrayList<>(locs.size()); for (BlockVector location : locs) blocks.add(world.getBlockAt(location.getBlockX(), location.getBlockY(), location.getBlockZ())); return blocks; } private void setBlockOnFire(GlowBlock block) { if (random.nextInt(3) != 0) return; Block below = block.getRelative(BlockFace.DOWN); // TODO: check for flammable blocks Material belowType = below.getType(); if (belowType == Material.AIR || belowType == Material.FIRE) return; BlockIgniteEvent event = EventFactory.callEvent(new BlockIgniteEvent(block, BlockIgniteEvent.IgniteCause.EXPLOSION, source)); if (event.isCancelled()) return; block.setType(Material.FIRE); } ///////////////////////////////////////// // Damage entities private Collection<GlowPlayer> damageEntities() { float power = this.power; this.power *= 2f; Collection<GlowPlayer> affectedPlayers = new ArrayList<>(); Collection<GlowLivingEntity> entities = getNearbyEntities(); for (GlowLivingEntity entity : entities) { double disDivPower = distanceTo(entity) / (double) this.power; if (disDivPower > 1.0D) continue; Vector vecDistance = distanceToHead(entity); if (vecDistance.length() == 0.0) continue; vecDistance.normalize(); double basicDamage = calculateDamage(entity, disDivPower); int explosionDamage = (int) ((basicDamage * basicDamage + basicDamage) * 4 * (double) power + 1.0D); if (!(entity instanceof GlowHumanEntity)) { EntityDamageEvent.DamageCause damageCause; if (source == null || source.getType() == EntityType.PRIMED_TNT) { damageCause = EntityDamageEvent.DamageCause.BLOCK_EXPLOSION; } else { damageCause = EntityDamageEvent.DamageCause.ENTITY_EXPLOSION; } entity.damage(explosionDamage, source, damageCause); } double enchantedDamage = calculateEnchantedDamage(basicDamage, entity); vecDistance.multiply(enchantedDamage); Vector currentVelocity = entity.getVelocity(); currentVelocity.add(vecDistance); entity.setVelocity(currentVelocity); if (entity instanceof GlowPlayer) { affectedPlayers.add((GlowPlayer) entity); } } this.power = power; return affectedPlayers; } private double calculateEnchantedDamage(double basicDamage, GlowLivingEntity entity) { int level = 0; // TODO: calculate explosion protection level of entity's equipment if (level > 0) { float sub = level * 0.15f; double damage = basicDamage * sub; damage = Math.floor(damage); return basicDamage - damage; } return basicDamage; } private double calculateDamage(GlowEntity entity, double disDivPower) { double damage = world.rayTrace(location, entity); return (damage * (1D - disDivPower)); } private Collection<GlowLivingEntity> getNearbyEntities() { // TODO: fetch only necessary entities List<LivingEntity> entities = world.getLivingEntities(); List<GlowLivingEntity> nearbyEntities = new ArrayList<>(); for (LivingEntity entity : entities) { if (distanceTo(entity) / (double) power < 1.) { nearbyEntities.add((GlowLivingEntity) entity); } } return nearbyEntities; } private double distanceTo(LivingEntity entity) { return location.clone().subtract(entity.getLocation()).length(); } private Vector distanceToHead(LivingEntity entity) { return entity.getLocation().clone().subtract(location.clone().subtract(0, entity.getEyeHeight(), 0)).toVector(); } /////////////////////////////////////// // Visualize private void playOutSoundAndParticles() { world.playSound(location, Sound.EXPLODE, 4, (1.0F + (random.nextFloat() - random.nextFloat()) * 0.2F) * 0.7F); if (this.power >= 2.0F && this.breakBlocks) { // send huge explosion world.spigot().playEffect(location, Effect.EXPLOSION_HUGE); } else { // send large explosion world.spigot().playEffect(location, Effect.EXPLOSION_LARGE); } } private void playOutExplosion(GlowPlayer player, Iterable<BlockVector> blocks) { Collection<ExplosionMessage.Record> records = new ArrayList<>(); Location clientLoc = location.clone(); clientLoc.setX((int) clientLoc.getX()); clientLoc.setY((int) clientLoc.getY()); clientLoc.setZ((int) clientLoc.getZ()); for (BlockVector block : blocks) { byte x = (byte) (block.getBlockX() - clientLoc.getBlockX()); byte y = (byte) (block.getBlockY() - clientLoc.getBlockY()); byte z = (byte) (block.getBlockZ() - clientLoc.getBlockZ()); records.add(new ExplosionMessage.Record(x, y, z)); } Vector velocity = player.getVelocity(); ExplosionMessage message = new ExplosionMessage((float) location.getX(), (float) location.getY(), (float) location.getZ(), 5, (float) velocity.getX(), (float) velocity.getY(), (float) velocity.getZ(), records); player.getSession().send(message); } }
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespa.config.proxy; import com.yahoo.jrt.Acceptor; import com.yahoo.jrt.Int32Value; import com.yahoo.jrt.ListenFailedException; import com.yahoo.jrt.Method; import com.yahoo.jrt.Request; import com.yahoo.jrt.Spec; import com.yahoo.jrt.StringArray; import com.yahoo.jrt.StringValue; import com.yahoo.jrt.Supervisor; import com.yahoo.jrt.Target; import com.yahoo.jrt.TargetWatcher; import com.yahoo.vespa.config.JRTMethods; import com.yahoo.vespa.config.RawConfig; import com.yahoo.vespa.config.protocol.JRTServerConfigRequest; import com.yahoo.vespa.config.protocol.JRTServerConfigRequestV3; import java.util.Arrays; import java.util.Iterator; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.logging.Level; import java.util.logging.Logger; /** * An RPC server that handles config and file distribution requests. * * @author hmusum */ public class ConfigProxyRpcServer implements Runnable, TargetWatcher { private final static Logger log = Logger.getLogger(ConfigProxyRpcServer.class.getName()); static final int TRACELEVEL = 6; private final Spec spec; private final Supervisor supervisor; private final ProxyServer proxyServer; private final ExecutorService rpcExecutor = Executors.newFixedThreadPool(8); ConfigProxyRpcServer(ProxyServer proxyServer, Supervisor supervisor, Spec spec) { this.proxyServer = proxyServer; this.spec = spec; this.supervisor = supervisor; declareConfigMethods(); } public void run() { try { Acceptor acceptor = supervisor.listen(spec); log.log(Level.FINE, () -> "Ready for requests on " + spec); supervisor.transport().join(); acceptor.shutdown().join(); } catch (ListenFailedException e) { proxyServer.stop(); throw new RuntimeException("Could not listen on " + spec, e); } } void shutdown() { try { rpcExecutor.shutdownNow(); rpcExecutor.awaitTermination(10, TimeUnit.SECONDS); } catch (InterruptedException e) { throw new RuntimeException(e); } supervisor.transport().shutdown().join(); } Spec getSpec() { return spec; } private void declareConfigMethods() { supervisor.addMethod(JRTMethods.createConfigV3GetConfigMethod(this::getConfigV3)); supervisor.addMethod(new Method("ping", "", "i", this::ping) .methodDesc("ping") .returnDesc(0, "ret code", "return code, 0 is OK")); supervisor.addMethod(new Method("listCachedConfig", "", "S", this::listCachedConfig) .methodDesc("list cached configs)") .returnDesc(0, "data", "string array of configs")); supervisor.addMethod(new Method("listCachedConfigFull", "", "S", this::listCachedConfigFull) .methodDesc("list cached configs with cache content)") .returnDesc(0, "data", "string array of configs")); supervisor.addMethod(new Method("listSourceConnections", "", "S", this::listSourceConnections) .methodDesc("list config source connections)") .returnDesc(0, "data", "string array of source connections")); supervisor.addMethod(new Method("invalidateCache", "", "S", this::invalidateCache) .methodDesc("list config source connections)") .returnDesc(0, "data", "0 if success, 1 otherwise")); supervisor.addMethod(new Method("updateSources", "s", "s", this::updateSources) .methodDesc("update list of config sources") .returnDesc(0, "ret", "list of updated config sources")); supervisor.addMethod(new Method("setMode", "s", "S", this::setMode) .methodDesc("Set config proxy mode { default | memorycache }") .returnDesc(0, "ret", "0 if success, 1 otherwise as first element, description as second element")); supervisor.addMethod(new Method("getMode", "", "s", this::getMode) .methodDesc("What serving mode the config proxy is in (default, memorycache)") .returnDesc(0, "ret", "mode as a string")); supervisor.addMethod(new Method("dumpCache", "s", "s", this::dumpCache) .methodDesc("Dump cache to disk") .paramDesc(0, "path", "path to write cache contents to") .returnDesc(0, "ret", "Empty string or error message")); } //---------------- RPC methods ------------------------------------ /** * Handles RPC method "config.v3.getConfig" requests. * * @param req a Request */ private void getConfigV3(Request req) { dispatchRpcRequest(req, () -> { JRTServerConfigRequest request = JRTServerConfigRequestV3.createFromRequest(req); req.target().addWatcher(this); getConfigImpl(request); }); } /** * Returns 0 if server is alive. * * @param req a Request */ private void ping(Request req) { dispatchRpcRequest(req, () -> { req.returnValues().add(new Int32Value(0)); req.returnRequest(); }); } private void listCachedConfig(Request req) { dispatchRpcRequest(req, () -> listCachedConfig(req, false)); } private void listCachedConfigFull(Request req) { dispatchRpcRequest(req, () -> listCachedConfig(req, true)); } private void listSourceConnections(Request req) { dispatchRpcRequest(req, () -> { String[] ret = new String[2]; ret[0] = "Current source: " + proxyServer.getActiveSourceConnection(); ret[1] = "All sources:\n" + printSourceConnections(); req.returnValues().add(new StringArray(ret)); req.returnRequest(); }); } private void updateSources(Request req) { dispatchRpcRequest(req, () -> { String sources = req.parameters().get(0).asString(); String ret; System.out.println(proxyServer.getMode()); if (proxyServer.getMode().requiresConfigSource()) { proxyServer.updateSourceConnections(Arrays.asList(sources.split(","))); ret = "Updated config sources to: " + sources; } else { ret = "Cannot update sources when in '" + proxyServer.getMode().name() + "' mode"; } req.returnValues().add(new StringValue(ret)); req.returnRequest(); }); } private void invalidateCache(Request req) { dispatchRpcRequest(req, () -> { proxyServer.memoryCache().clear(); String[] s = new String[2]; s[0] = "0"; s[1] = "success"; req.returnValues().add(new StringArray(s)); req.returnRequest(); }); } private void setMode(Request req) { dispatchRpcRequest(req, () -> { String suppliedMode = req.parameters().get(0).asString(); String[] s = new String[2]; try { proxyServer.setMode(suppliedMode); s[0] = "0"; s[1] = "success"; } catch (Exception e) { s[0] = "1"; s[1] = e.getMessage(); } req.returnValues().add(new StringArray(s)); req.returnRequest(); }); } private void getMode(Request req) { dispatchRpcRequest(req, () -> { req.returnValues().add(new StringValue(proxyServer.getMode().name())); req.returnRequest(); }); } private void dumpCache(Request req) { dispatchRpcRequest(req, () -> { final MemoryCache memoryCache = proxyServer.memoryCache(); req.returnValues().add(new StringValue(memoryCache.dumpCacheToDisk(req.parameters().get(0).asString(), memoryCache))); req.returnRequest(); }); } //---------------------------------------------------- private void dispatchRpcRequest(Request request, Runnable handler) { request.detach(); log.log(Level.FINEST, () -> String.format("Dispatching RPC request %s", requestLogId(request))); rpcExecutor.execute(() -> { try { log.log(Level.FINEST, () -> String.format("Executing RPC request %s.", requestLogId(request))); handler.run(); } catch (Exception e) { log.log(Level.WARNING, String.format("Exception thrown during execution of RPC request %s: %s", requestLogId(request), e.getMessage()), e); } }); } private String requestLogId(Request request) { return String.format("%s/%08X", request.methodName(), request.hashCode()); } /** * Handles all versions of "getConfig" requests. * * @param request a Request */ private void getConfigImpl(JRTServerConfigRequest request) { ResponseHandler responseHandler = new ResponseHandler(); request.getRequestTrace().trace(TRACELEVEL, "Config proxy getConfig()"); log.log(Level.FINE, () ->"getConfig: " + request.getShortDescription() + ",config checksums=" + request.getRequestConfigChecksums()); if (!request.validateParameters()) { // Error code is set in verifyParameters if parameters are not OK. log.log(Level.WARNING, "Parameters for request " + request + " did not validate: " + request.errorCode() + " : " + request.errorMessage()); responseHandler.returnErrorResponse(request, request.errorCode(), "Parameters for request " + request.getShortDescription() + " did not validate: " + request.errorMessage()); return; } try { RawConfig config = proxyServer.resolveConfig(request); if (config == null) { log.log(Level.FINEST, () -> "No config received yet for " + request.getShortDescription() + ", not sending response"); } else if (ProxyServer.configOrGenerationHasChanged(config, request)) { responseHandler.returnOkResponse(request, config); } else { log.log(Level.FINEST, () -> "No new config for " + request.getShortDescription() + ", not sending response"); } } catch (Exception e) { e.printStackTrace(); responseHandler.returnErrorResponse(request, com.yahoo.vespa.config.ErrorCode.INTERNAL_ERROR, e.getMessage()); } } private String printSourceConnections() { StringBuilder sb = new StringBuilder(); for (String s : proxyServer.getSourceConnections()) { sb.append(s).append("\n"); } return sb.toString(); } private void listCachedConfig(Request req, boolean full) { String[] ret; MemoryCache cache = proxyServer.memoryCache(); ret = new String[cache.size()]; int i = 0; for (RawConfig config : cache.values()) { StringBuilder sb = new StringBuilder(); sb.append(config.getNamespace()); sb.append("."); sb.append(config.getName()); sb.append(","); sb.append(config.getConfigId()); sb.append(","); sb.append(config.getGeneration()); sb.append(","); sb.append(config.getPayloadChecksums()); if (full) { sb.append(","); sb.append(config.getPayload()); } ret[i] = sb.toString(); i++; } Arrays.sort(ret); req.returnValues().add(new StringArray(ret)); req.returnRequest(); } /** * Removes invalid targets (closed client connections) from delayedResponsesQueue. * * @param target a Target that has become invalid (i.e, client has closed connection) */ @Override public void notifyTargetInvalid(Target target) { log.log(Level.FINE, () -> "Target invalid " + target); for (Iterator<DelayedResponse> it = proxyServer.delayedResponses().responses().iterator(); it.hasNext(); ) { DelayedResponse delayed = it.next(); JRTServerConfigRequest request = delayed.getRequest(); if (request.getRequest().target().equals(target)) { log.log(Level.FINE, () -> "Removing " + request.getShortDescription()); it.remove(); } } // TODO: Could we also cancel active getConfig requests upstream if the client was the only one // requesting this config? } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.sql.planner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Maps; import io.trino.Session; import io.trino.SystemSessionProperties; import io.trino.cost.StatsAndCosts; import io.trino.execution.QueryManagerConfig; import io.trino.execution.scheduler.BucketNodeMap; import io.trino.execution.warnings.WarningCollector; import io.trino.metadata.Metadata; import io.trino.metadata.TableHandle; import io.trino.metadata.TableProperties.TablePartitioning; import io.trino.spi.TrinoException; import io.trino.spi.TrinoWarning; import io.trino.spi.connector.ConnectorPartitionHandle; import io.trino.spi.connector.ConnectorPartitioningHandle; import io.trino.spi.type.Type; import io.trino.sql.planner.plan.AggregationNode; import io.trino.sql.planner.plan.ExchangeNode; import io.trino.sql.planner.plan.ExplainAnalyzeNode; import io.trino.sql.planner.plan.JoinNode; import io.trino.sql.planner.plan.OutputNode; import io.trino.sql.planner.plan.PlanFragmentId; import io.trino.sql.planner.plan.PlanNode; import io.trino.sql.planner.plan.PlanNodeId; import io.trino.sql.planner.plan.PlanVisitor; import io.trino.sql.planner.plan.RemoteSourceNode; import io.trino.sql.planner.plan.RowNumberNode; import io.trino.sql.planner.plan.SimplePlanRewriter; import io.trino.sql.planner.plan.StatisticsWriterNode; import io.trino.sql.planner.plan.TableDeleteNode; import io.trino.sql.planner.plan.TableFinishNode; import io.trino.sql.planner.plan.TableScanNode; import io.trino.sql.planner.plan.TableWriterNode; import io.trino.sql.planner.plan.TopNRankingNode; import io.trino.sql.planner.plan.ValuesNode; import io.trino.sql.planner.plan.WindowNode; import javax.inject.Inject; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Predicates.in; import static com.google.common.collect.ImmutableList.toImmutableList; import static com.google.common.collect.Iterables.getOnlyElement; import static io.trino.SystemSessionProperties.getQueryMaxStageCount; import static io.trino.SystemSessionProperties.isDynamicScheduleForGroupedExecution; import static io.trino.SystemSessionProperties.isForceSingleNodeOutput; import static io.trino.operator.StageExecutionDescriptor.ungroupedExecution; import static io.trino.spi.StandardErrorCode.QUERY_HAS_TOO_MANY_STAGES; import static io.trino.spi.connector.NotPartitionedPartitionHandle.NOT_PARTITIONED; import static io.trino.spi.connector.StandardWarningCode.TOO_MANY_STAGES; import static io.trino.sql.planner.SchedulingOrderVisitor.scheduleOrder; import static io.trino.sql.planner.SystemPartitioningHandle.COORDINATOR_DISTRIBUTION; import static io.trino.sql.planner.SystemPartitioningHandle.SINGLE_DISTRIBUTION; import static io.trino.sql.planner.SystemPartitioningHandle.SOURCE_DISTRIBUTION; import static io.trino.sql.planner.plan.ExchangeNode.Scope.REMOTE; import static io.trino.sql.planner.plan.ExchangeNode.Type.REPLICATE; import static io.trino.sql.planner.planprinter.PlanPrinter.jsonFragmentPlan; import static java.lang.String.format; import static java.util.Objects.requireNonNull; /** * Splits a logical plan into fragments that can be shipped and executed on distributed nodes */ public class PlanFragmenter { private static final String TOO_MANY_STAGES_MESSAGE = "" + "If the query contains multiple aggregates with DISTINCT over different columns, please set the 'use_mark_distinct' session property to false. " + "If the query contains WITH clauses that are referenced more than once, please create temporary table(s) for the queries in those clauses."; private final Metadata metadata; private final NodePartitioningManager nodePartitioningManager; private final QueryManagerConfig config; @Inject public PlanFragmenter(Metadata metadata, NodePartitioningManager nodePartitioningManager, QueryManagerConfig queryManagerConfig) { this.metadata = requireNonNull(metadata, "metadata is null"); this.nodePartitioningManager = requireNonNull(nodePartitioningManager, "nodePartitioningManager is null"); this.config = requireNonNull(queryManagerConfig, "queryManagerConfig is null"); } public SubPlan createSubPlans(Session session, Plan plan, boolean forceSingleNode, WarningCollector warningCollector) { Fragmenter fragmenter = new Fragmenter(session, metadata, plan.getTypes(), plan.getStatsAndCosts()); FragmentProperties properties = new FragmentProperties(new PartitioningScheme(Partitioning.create(SINGLE_DISTRIBUTION, ImmutableList.of()), plan.getRoot().getOutputSymbols())); if (forceSingleNode || isForceSingleNodeOutput(session)) { properties = properties.setSingleNodeDistribution(); } PlanNode root = SimplePlanRewriter.rewriteWith(fragmenter, plan.getRoot(), properties); SubPlan subPlan = fragmenter.buildRootFragment(root, properties); subPlan = reassignPartitioningHandleIfNecessary(session, subPlan); subPlan = analyzeGroupedExecution(session, subPlan); checkState(!isForceSingleNodeOutput(session) || subPlan.getFragment().getPartitioning().isSingleNode(), "Root of PlanFragment is not single node"); // TODO: Remove query_max_stage_count session property and use queryManagerConfig.getMaxStageCount() here sanityCheckFragmentedPlan(subPlan, warningCollector, getQueryMaxStageCount(session), config.getStageCountWarningThreshold()); return subPlan; } private void sanityCheckFragmentedPlan(SubPlan subPlan, WarningCollector warningCollector, int maxStageCount, int stageCountSoftLimit) { subPlan.sanityCheck(); int fragmentCount = subPlan.getAllFragments().size(); if (fragmentCount > maxStageCount) { throw new TrinoException(QUERY_HAS_TOO_MANY_STAGES, format( "Number of stages in the query (%s) exceeds the allowed maximum (%s). %s", fragmentCount, maxStageCount, TOO_MANY_STAGES_MESSAGE)); } if (fragmentCount > stageCountSoftLimit) { warningCollector.add(new TrinoWarning(TOO_MANY_STAGES, format( "Number of stages in the query (%s) exceeds the soft limit (%s). %s", fragmentCount, stageCountSoftLimit, TOO_MANY_STAGES_MESSAGE))); } } private SubPlan analyzeGroupedExecution(Session session, SubPlan subPlan) { PlanFragment fragment = subPlan.getFragment(); GroupedExecutionProperties properties = fragment.getRoot().accept(new GroupedExecutionTagger(session, metadata, nodePartitioningManager), null); if (properties.isSubTreeUseful()) { boolean preferDynamic = fragment.getRemoteSourceNodes().stream().allMatch(node -> node.getExchangeType() == REPLICATE) && isDynamicScheduleForGroupedExecution(session); BucketNodeMap bucketNodeMap = nodePartitioningManager.getBucketNodeMap(session, fragment.getPartitioning(), preferDynamic); if (bucketNodeMap.isDynamic()) { fragment = fragment.withDynamicLifespanScheduleGroupedExecution(properties.getCapableTableScanNodes()); } else { fragment = fragment.withFixedLifespanScheduleGroupedExecution(properties.getCapableTableScanNodes()); } } ImmutableList.Builder<SubPlan> result = ImmutableList.builder(); for (SubPlan child : subPlan.getChildren()) { result.add(analyzeGroupedExecution(session, child)); } return new SubPlan(fragment, result.build()); } private SubPlan reassignPartitioningHandleIfNecessary(Session session, SubPlan subPlan) { return reassignPartitioningHandleIfNecessaryHelper(session, subPlan, subPlan.getFragment().getPartitioning()); } private SubPlan reassignPartitioningHandleIfNecessaryHelper(Session session, SubPlan subPlan, PartitioningHandle newOutputPartitioningHandle) { PlanFragment fragment = subPlan.getFragment(); PlanNode newRoot = fragment.getRoot(); // If the fragment's partitioning is SINGLE or COORDINATOR_ONLY, leave the sources as is (this is for single-node execution) if (!fragment.getPartitioning().isSingleNode()) { PartitioningHandleReassigner partitioningHandleReassigner = new PartitioningHandleReassigner(fragment.getPartitioning(), metadata, session); newRoot = SimplePlanRewriter.rewriteWith(partitioningHandleReassigner, newRoot); } PartitioningScheme outputPartitioningScheme = fragment.getPartitioningScheme(); Partitioning newOutputPartitioning = outputPartitioningScheme.getPartitioning(); if (outputPartitioningScheme.getPartitioning().getHandle().getConnectorId().isPresent()) { // Do not replace the handle if the source's output handle is a system one, e.g. broadcast. newOutputPartitioning = newOutputPartitioning.withAlternativePartitiongingHandle(newOutputPartitioningHandle); } PlanFragment newFragment = new PlanFragment( fragment.getId(), newRoot, fragment.getSymbols(), fragment.getPartitioning(), fragment.getPartitionedSources(), new PartitioningScheme( newOutputPartitioning, outputPartitioningScheme.getOutputLayout(), outputPartitioningScheme.getHashColumn(), outputPartitioningScheme.isReplicateNullsAndAny(), outputPartitioningScheme.getBucketToPartition()), fragment.getStageExecutionDescriptor(), fragment.getStatsAndCosts(), fragment.getJsonRepresentation()); ImmutableList.Builder<SubPlan> childrenBuilder = ImmutableList.builder(); for (SubPlan child : subPlan.getChildren()) { childrenBuilder.add(reassignPartitioningHandleIfNecessaryHelper(session, child, fragment.getPartitioning())); } return new SubPlan(newFragment, childrenBuilder.build()); } private static class Fragmenter extends SimplePlanRewriter<FragmentProperties> { private static final int ROOT_FRAGMENT_ID = 0; private final Session session; private final Metadata metadata; private final TypeProvider types; private final StatsAndCosts statsAndCosts; private int nextFragmentId = ROOT_FRAGMENT_ID + 1; public Fragmenter(Session session, Metadata metadata, TypeProvider types, StatsAndCosts statsAndCosts) { this.session = requireNonNull(session, "session is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.types = requireNonNull(types, "types is null"); this.statsAndCosts = requireNonNull(statsAndCosts, "statsAndCosts is null"); } public SubPlan buildRootFragment(PlanNode root, FragmentProperties properties) { return buildFragment(root, properties, new PlanFragmentId(String.valueOf(ROOT_FRAGMENT_ID))); } private PlanFragmentId nextFragmentId() { return new PlanFragmentId(String.valueOf(nextFragmentId++)); } private SubPlan buildFragment(PlanNode root, FragmentProperties properties, PlanFragmentId fragmentId) { Set<Symbol> dependencies = SymbolsExtractor.extractOutputSymbols(root); List<PlanNodeId> schedulingOrder = scheduleOrder(root); boolean equals = properties.getPartitionedSources().equals(ImmutableSet.copyOf(schedulingOrder)); checkArgument(equals, "Expected scheduling order (%s) to contain an entry for all partitioned sources (%s)", schedulingOrder, properties.getPartitionedSources()); Map<Symbol, Type> symbols = Maps.filterKeys(types.allTypes(), in(dependencies)); PlanFragment fragment = new PlanFragment( fragmentId, root, symbols, properties.getPartitioningHandle(), schedulingOrder, properties.getPartitioningScheme(), ungroupedExecution(), statsAndCosts.getForSubplan(root), Optional.of(jsonFragmentPlan(root, symbols, metadata, session))); return new SubPlan(fragment, properties.getChildren()); } @Override public PlanNode visitOutput(OutputNode node, RewriteContext<FragmentProperties> context) { if (isForceSingleNodeOutput(session)) { context.get().setSingleNodeDistribution(); } return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitExplainAnalyze(ExplainAnalyzeNode node, RewriteContext<FragmentProperties> context) { context.get().setCoordinatorOnlyDistribution(); return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitStatisticsWriterNode(StatisticsWriterNode node, RewriteContext<FragmentProperties> context) { context.get().setCoordinatorOnlyDistribution(); return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitTableFinish(TableFinishNode node, RewriteContext<FragmentProperties> context) { context.get().setCoordinatorOnlyDistribution(); return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitTableDelete(TableDeleteNode node, RewriteContext<FragmentProperties> context) { context.get().setCoordinatorOnlyDistribution(); return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitTableScan(TableScanNode node, RewriteContext<FragmentProperties> context) { PartitioningHandle partitioning = metadata.getTableProperties(session, node.getTable()) .getTablePartitioning() .filter(value -> node.isUseConnectorNodePartitioning()) .map(TablePartitioning::getPartitioningHandle) .orElse(SOURCE_DISTRIBUTION); context.get().addSourceDistribution(node.getId(), partitioning, metadata, session); return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitTableWriter(TableWriterNode node, RewriteContext<FragmentProperties> context) { if (node.getPartitioningScheme().isPresent()) { context.get().setDistribution(node.getPartitioningScheme().get().getPartitioning().getHandle(), metadata, session); } return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitValues(ValuesNode node, RewriteContext<FragmentProperties> context) { context.get().setSingleNodeDistribution(); return context.defaultRewrite(node, context.get()); } @Override public PlanNode visitExchange(ExchangeNode exchange, RewriteContext<FragmentProperties> context) { if (exchange.getScope() != REMOTE) { return context.defaultRewrite(exchange, context.get()); } PartitioningScheme partitioningScheme = exchange.getPartitioningScheme(); if (exchange.getType() == ExchangeNode.Type.GATHER) { context.get().setSingleNodeDistribution(); } else if (exchange.getType() == ExchangeNode.Type.REPARTITION) { context.get().setDistribution(partitioningScheme.getPartitioning().getHandle(), metadata, session); } ImmutableList.Builder<SubPlan> builder = ImmutableList.builder(); for (int sourceIndex = 0; sourceIndex < exchange.getSources().size(); sourceIndex++) { FragmentProperties childProperties = new FragmentProperties(partitioningScheme.translateOutputLayout(exchange.getInputs().get(sourceIndex))); builder.add(buildSubPlan(exchange.getSources().get(sourceIndex), childProperties, context)); } List<SubPlan> children = builder.build(); context.get().addChildren(children); List<PlanFragmentId> childrenIds = children.stream() .map(SubPlan::getFragment) .map(PlanFragment::getId) .collect(toImmutableList()); return new RemoteSourceNode(exchange.getId(), childrenIds, exchange.getOutputSymbols(), exchange.getOrderingScheme(), exchange.getType()); } private SubPlan buildSubPlan(PlanNode node, FragmentProperties properties, RewriteContext<FragmentProperties> context) { PlanFragmentId planFragmentId = nextFragmentId(); PlanNode child = context.rewrite(node, properties); return buildFragment(child, properties, planFragmentId); } } private static class FragmentProperties { private final List<SubPlan> children = new ArrayList<>(); private final PartitioningScheme partitioningScheme; private Optional<PartitioningHandle> partitioningHandle = Optional.empty(); private final Set<PlanNodeId> partitionedSources = new HashSet<>(); public FragmentProperties(PartitioningScheme partitioningScheme) { this.partitioningScheme = partitioningScheme; } public List<SubPlan> getChildren() { return children; } public FragmentProperties setSingleNodeDistribution() { if (partitioningHandle.isPresent() && partitioningHandle.get().isSingleNode()) { // already single node distribution return this; } checkState(partitioningHandle.isEmpty(), "Cannot overwrite partitioning with %s (currently set to %s)", SINGLE_DISTRIBUTION, partitioningHandle); partitioningHandle = Optional.of(SINGLE_DISTRIBUTION); return this; } public FragmentProperties setDistribution(PartitioningHandle distribution, Metadata metadata, Session session) { if (partitioningHandle.isEmpty()) { partitioningHandle = Optional.of(distribution); return this; } PartitioningHandle currentPartitioning = this.partitioningHandle.get(); if (isCompatibleSystemPartitioning(distribution)) { return this; } if (currentPartitioning.equals(SOURCE_DISTRIBUTION)) { this.partitioningHandle = Optional.of(distribution); return this; } // If already system SINGLE or COORDINATOR_ONLY, leave it as is (this is for single-node execution) if (currentPartitioning.isSingleNode()) { return this; } if (currentPartitioning.equals(distribution)) { return this; } Optional<PartitioningHandle> commonPartitioning = metadata.getCommonPartitioning(session, currentPartitioning, distribution); if (commonPartitioning.isPresent()) { partitioningHandle = commonPartitioning; return this; } throw new IllegalStateException(format( "Cannot set distribution to %s. Already set to %s", distribution, this.partitioningHandle)); } private boolean isCompatibleSystemPartitioning(PartitioningHandle distribution) { ConnectorPartitioningHandle currentHandle = partitioningHandle.get().getConnectorHandle(); ConnectorPartitioningHandle distributionHandle = distribution.getConnectorHandle(); if ((currentHandle instanceof SystemPartitioningHandle) && (distributionHandle instanceof SystemPartitioningHandle)) { return ((SystemPartitioningHandle) currentHandle).getPartitioning() == ((SystemPartitioningHandle) distributionHandle).getPartitioning(); } return false; } public FragmentProperties setCoordinatorOnlyDistribution() { if (partitioningHandle.isPresent() && partitioningHandle.get().isCoordinatorOnly()) { // already single node distribution return this; } // only system SINGLE can be upgraded to COORDINATOR_ONLY checkState(partitioningHandle.isEmpty() || partitioningHandle.get().equals(SINGLE_DISTRIBUTION), "Cannot overwrite partitioning with %s (currently set to %s)", COORDINATOR_DISTRIBUTION, partitioningHandle); partitioningHandle = Optional.of(COORDINATOR_DISTRIBUTION); return this; } public FragmentProperties addSourceDistribution(PlanNodeId source, PartitioningHandle distribution, Metadata metadata, Session session) { requireNonNull(source, "source is null"); requireNonNull(distribution, "distribution is null"); partitionedSources.add(source); if (partitioningHandle.isEmpty()) { partitioningHandle = Optional.of(distribution); return this; } PartitioningHandle currentPartitioning = partitioningHandle.get(); // If already system SINGLE or COORDINATOR_ONLY, leave it as is (this is for single-node execution) if (currentPartitioning.equals(SINGLE_DISTRIBUTION) || currentPartitioning.equals(COORDINATOR_DISTRIBUTION)) { return this; } if (currentPartitioning.equals(distribution)) { return this; } Optional<PartitioningHandle> commonPartitioning = metadata.getCommonPartitioning(session, currentPartitioning, distribution); if (commonPartitioning.isPresent()) { partitioningHandle = commonPartitioning; return this; } throw new IllegalStateException(format("Cannot overwrite distribution with %s (currently set to %s)", distribution, currentPartitioning)); } public FragmentProperties addChildren(List<SubPlan> children) { this.children.addAll(children); return this; } public PartitioningScheme getPartitioningScheme() { return partitioningScheme; } public PartitioningHandle getPartitioningHandle() { return partitioningHandle.get(); } public Set<PlanNodeId> getPartitionedSources() { return partitionedSources; } } private static class GroupedExecutionTagger extends PlanVisitor<GroupedExecutionProperties, Void> { private final Session session; private final Metadata metadata; private final NodePartitioningManager nodePartitioningManager; private final boolean groupedExecutionEnabled; public GroupedExecutionTagger(Session session, Metadata metadata, NodePartitioningManager nodePartitioningManager) { this.session = requireNonNull(session, "session is null"); this.metadata = requireNonNull(metadata, "metadata is null"); this.nodePartitioningManager = requireNonNull(nodePartitioningManager, "nodePartitioningManager is null"); this.groupedExecutionEnabled = SystemSessionProperties.isGroupedExecutionEnabled(session); } @Override protected GroupedExecutionProperties visitPlan(PlanNode node, Void context) { if (node.getSources().isEmpty()) { return GroupedExecutionProperties.notCapable(); } return processChildren(node); } @Override public GroupedExecutionProperties visitJoin(JoinNode node, Void context) { GroupedExecutionProperties left = node.getLeft().accept(this, null); GroupedExecutionProperties right = node.getRight().accept(this, null); if (node.getDistributionType().isEmpty()) { // This is possible when the optimizers is invoked with `forceSingleNode` set to true. return GroupedExecutionProperties.notCapable(); } if ((node.getType() == JoinNode.Type.RIGHT || node.getType() == JoinNode.Type.FULL) && !right.currentNodeCapable) { // For a plan like this, if the fragment participates in grouped execution, // the LookupOuterOperator corresponding to the RJoin will not work execute properly. // // * The operator has to execute as not-grouped because it can only look at the "used" flags in // join build after all probe has finished. // * The operator has to execute as grouped the subsequent LJoin expects that incoming // operators are grouped. Otherwise, the LJoin won't be able to throw out the build side // for each group as soon as the group completes. // // LJoin // / \ // RJoin Scan // / \ // Scan Remote // // TODO: // The RJoin can still execute as grouped if there is no subsequent operator that depends // on the RJoin being executed in a grouped manner. However, this is not currently implemented. // Support for this scenario is already implemented in the execution side. return GroupedExecutionProperties.notCapable(); } switch (node.getDistributionType().get()) { case REPLICATED: // Broadcast join maintains partitioning for the left side. // Right side of a broadcast is not capable of grouped execution because it always comes from a remote exchange. checkState(!right.currentNodeCapable); return left; case PARTITIONED: if (left.currentNodeCapable && right.currentNodeCapable) { return new GroupedExecutionProperties( true, true, ImmutableList.<PlanNodeId>builder() .addAll(left.capableTableScanNodes) .addAll(right.capableTableScanNodes) .build()); } // right.subTreeUseful && !left.currentNodeCapable: // It's not particularly helpful to do grouped execution on the right side // because the benefit is likely cancelled out due to required buffering for hash build. // In theory, it could still be helpful (e.g. when the underlying aggregation's intermediate group state maybe larger than aggregation output). // However, this is not currently implemented. JoinBridgeManager need to support such a lifecycle. // !right.currentNodeCapable: // The build/right side needs to buffer fully for this JOIN, but the probe/left side will still stream through. // As a result, there is no reason to change currentNodeCapable or subTreeUseful to false. // return left; } throw new UnsupportedOperationException("Unknown distribution type: " + node.getDistributionType()); } @Override public GroupedExecutionProperties visitAggregation(AggregationNode node, Void context) { GroupedExecutionProperties properties = node.getSource().accept(this, null); if (groupedExecutionEnabled && properties.isCurrentNodeCapable()) { switch (node.getStep()) { case SINGLE: case FINAL: return new GroupedExecutionProperties(true, true, properties.capableTableScanNodes); case PARTIAL: case INTERMEDIATE: return properties; } } return GroupedExecutionProperties.notCapable(); } @Override public GroupedExecutionProperties visitWindow(WindowNode node, Void context) { return processWindowFunction(node); } @Override public GroupedExecutionProperties visitRowNumber(RowNumberNode node, Void context) { return processWindowFunction(node); } @Override public GroupedExecutionProperties visitTopNRanking(TopNRankingNode node, Void context) { return processWindowFunction(node); } private GroupedExecutionProperties processWindowFunction(PlanNode node) { GroupedExecutionProperties properties = getOnlyElement(node.getSources()).accept(this, null); if (groupedExecutionEnabled && properties.isCurrentNodeCapable()) { return new GroupedExecutionProperties(true, true, properties.capableTableScanNodes); } return GroupedExecutionProperties.notCapable(); } @Override public GroupedExecutionProperties visitTableScan(TableScanNode node, Void context) { Optional<TablePartitioning> tablePartitioning = metadata.getTableProperties(session, node.getTable()).getTablePartitioning(); if (tablePartitioning.isEmpty() || !node.isUseConnectorNodePartitioning()) { return GroupedExecutionProperties.notCapable(); } List<ConnectorPartitionHandle> partitionHandles = nodePartitioningManager.listPartitionHandles(session, tablePartitioning.get().getPartitioningHandle()); if (ImmutableList.of(NOT_PARTITIONED).equals(partitionHandles)) { return new GroupedExecutionProperties(false, false, ImmutableList.of()); } return new GroupedExecutionProperties(true, false, ImmutableList.of(node.getId())); } private GroupedExecutionProperties processChildren(PlanNode node) { // Each fragment has a partitioning handle, which is derived from leaf nodes in the fragment. // Leaf nodes with different partitioning handle are not allowed to share a single fragment // (except for special cases as detailed in addSourceDistribution). // As a result, it is not necessary to check the compatibility between node.getSources because // they are guaranteed to be compatible. // * If any child is "not capable", return "not capable" // * When all children are capable ("capable and useful" or "capable but not useful") // * if any child is "capable and useful", return "capable and useful" // * if no children is "capable and useful", return "capable but not useful" boolean anyUseful = false; ImmutableList.Builder<PlanNodeId> capableTableScanNodes = ImmutableList.builder(); for (PlanNode source : node.getSources()) { GroupedExecutionProperties properties = source.accept(this, null); if (!properties.isCurrentNodeCapable()) { return GroupedExecutionProperties.notCapable(); } anyUseful |= properties.isSubTreeUseful(); capableTableScanNodes.addAll(properties.capableTableScanNodes); } return new GroupedExecutionProperties(true, anyUseful, capableTableScanNodes.build()); } } private static class GroupedExecutionProperties { // currentNodeCapable: // Whether grouped execution is possible with the current node. // For example, a table scan is capable iff it supports addressable split discovery. // subTreeUseful: // Whether grouped execution is beneficial in the current node, or any node below it. // For example, a JOIN can benefit from grouped execution because build can be flushed early, reducing peak memory requirement. // // In the current implementation, subTreeUseful implies currentNodeCapable. // In theory, this doesn't have to be the case. Take an example where a GROUP BY feeds into the build side of a JOIN. // Even if JOIN cannot take advantage of grouped execution, it could still be beneficial to execute the GROUP BY with grouped execution // (e.g. when the underlying aggregation's intermediate group state may be larger than aggregation output). private final boolean currentNodeCapable; private final boolean subTreeUseful; private final List<PlanNodeId> capableTableScanNodes; public GroupedExecutionProperties(boolean currentNodeCapable, boolean subTreeUseful, List<PlanNodeId> capableTableScanNodes) { this.currentNodeCapable = currentNodeCapable; this.subTreeUseful = subTreeUseful; this.capableTableScanNodes = ImmutableList.copyOf(requireNonNull(capableTableScanNodes, "capableTableScanNodes is null")); // Verify that `subTreeUseful` implies `currentNodeCapable` checkArgument(!subTreeUseful || currentNodeCapable); checkArgument(currentNodeCapable == !capableTableScanNodes.isEmpty()); } public static GroupedExecutionProperties notCapable() { return new GroupedExecutionProperties(false, false, ImmutableList.of()); } public boolean isCurrentNodeCapable() { return currentNodeCapable; } public boolean isSubTreeUseful() { return subTreeUseful; } public List<PlanNodeId> getCapableTableScanNodes() { return capableTableScanNodes; } } private static final class PartitioningHandleReassigner extends SimplePlanRewriter<Void> { private final PartitioningHandle fragmentPartitioningHandle; private final Metadata metadata; private final Session session; public PartitioningHandleReassigner(PartitioningHandle fragmentPartitioningHandle, Metadata metadata, Session session) { this.fragmentPartitioningHandle = fragmentPartitioningHandle; this.metadata = metadata; this.session = session; } @Override public PlanNode visitTableScan(TableScanNode node, RewriteContext<Void> context) { PartitioningHandle partitioning = metadata.getTableProperties(session, node.getTable()) .getTablePartitioning() .filter(value -> node.isUseConnectorNodePartitioning()) .map(TablePartitioning::getPartitioningHandle) .orElse(SOURCE_DISTRIBUTION); if (partitioning.equals(fragmentPartitioningHandle)) { // do nothing if the current scan node's partitioning matches the fragment's return node; } TableHandle newTable = metadata.makeCompatiblePartitioning(session, node.getTable(), fragmentPartitioningHandle); return new TableScanNode( node.getId(), newTable, node.getOutputSymbols(), node.getAssignments(), node.getEnforcedConstraint(), node.isUpdateTarget(), // plan was already fragmented with scan node's partitioning // and new partitioning is compatible with previous one node.getUseConnectorNodePartitioning()); } } }
/* * Copyright LWJGL. All rights reserved. * License terms: http://lwjgl.org/license.php * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.vulkan; import java.nio.*; import org.lwjgl.*; import org.lwjgl.system.*; import static org.lwjgl.system.MemoryUtil.*; import static org.lwjgl.system.MemoryStack.*; /** * <a href="https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkDisplayModePropertiesKHR.html">Khronos Reference Page</a><br> * <a href="https://www.khronos.org/registry/vulkan/specs/1.0-wsi_extensions/xhtml/vkspec.html#VkDisplayModePropertiesKHR">Vulkan Specification</a> * * <p>Describes properties of a display mode.</p> * * <h5>Valid Usage</h5> * * <ul> * <li>{@code displayMode} <b>must</b> be a valid {@code VkDisplayModeKHR} handle</li> * </ul> * * <h3>Member documentation</h3> * * <ul> * <li>{@code displayMode} &ndash; a handle to the display mode described in this structure. This handle will be valid for the lifetime of the Vulkan instance.</li> * <li>{@code parameters} &ndash; a {@link VkDisplayModeParametersKHR} structure describing the display parameters associated with {@code displayMode}</li> * </ul> * * <h3>Layout</h3> * * <pre><code>struct VkDisplayModePropertiesKHR { VkDisplayModeKHR displayMode; {@link VkDisplayModeParametersKHR VkDisplayModeParametersKHR} parameters; }</code></pre> */ public class VkDisplayModePropertiesKHR extends Struct { /** The struct size in bytes. */ public static final int SIZEOF; public static final int ALIGNOF; /** The struct member offsets. */ public static final int DISPLAYMODE, PARAMETERS; static { Layout layout = __struct( __member(8), __member(VkDisplayModeParametersKHR.SIZEOF, VkDisplayModeParametersKHR.ALIGNOF) ); SIZEOF = layout.getSize(); ALIGNOF = layout.getAlignment(); DISPLAYMODE = layout.offsetof(0); PARAMETERS = layout.offsetof(1); } VkDisplayModePropertiesKHR(long address, ByteBuffer container) { super(address, container); } /** * Creates a {@link VkDisplayModePropertiesKHR} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be * visible to the struct instance and vice versa. * * <p>The created instance holds a strong reference to the container object.</p> */ public VkDisplayModePropertiesKHR(ByteBuffer container) { this(memAddress(container), checkContainer(container, SIZEOF)); } @Override public int sizeof() { return SIZEOF; } /** Returns the value of the {@code displayMode} field. */ public long displayMode() { return ndisplayMode(address()); } /** Returns a {@link VkDisplayModeParametersKHR} view of the {@code parameters} field. */ public VkDisplayModeParametersKHR parameters() { return nparameters(address()); } // ----------------------------------- /** Returns a new {@link VkDisplayModePropertiesKHR} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */ public static VkDisplayModePropertiesKHR malloc() { return create(nmemAlloc(SIZEOF)); } /** Returns a new {@link VkDisplayModePropertiesKHR} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */ public static VkDisplayModePropertiesKHR calloc() { return create(nmemCalloc(1, SIZEOF)); } /** Returns a new {@link VkDisplayModePropertiesKHR} instance allocated with {@link BufferUtils}. */ public static VkDisplayModePropertiesKHR create() { return new VkDisplayModePropertiesKHR(BufferUtils.createByteBuffer(SIZEOF)); } /** Returns a new {@link VkDisplayModePropertiesKHR} instance for the specified memory address or {@code null} if the address is {@code NULL}. */ public static VkDisplayModePropertiesKHR create(long address) { return address == NULL ? null : new VkDisplayModePropertiesKHR(address, null); } /** * Returns a new {@link VkDisplayModePropertiesKHR.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static Buffer malloc(int capacity) { return create(nmemAlloc(capacity * SIZEOF), capacity); } /** * Returns a new {@link VkDisplayModePropertiesKHR.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. * * @param capacity the buffer capacity */ public static Buffer calloc(int capacity) { return create(nmemCalloc(capacity, SIZEOF), capacity); } /** * Returns a new {@link VkDisplayModePropertiesKHR.Buffer} instance allocated with {@link BufferUtils}. * * @param capacity the buffer capacity */ public static Buffer create(int capacity) { return new Buffer(BufferUtils.createByteBuffer(capacity * SIZEOF)); } /** * Create a {@link VkDisplayModePropertiesKHR.Buffer} instance at the specified memory. * * @param address the memory address * @param capacity the buffer capacity */ public static Buffer create(long address, int capacity) { return address == NULL ? null : new Buffer(address, null, -1, 0, capacity, capacity); } // ----------------------------------- /** Returns a new {@link VkDisplayModePropertiesKHR} instance allocated on the thread-local {@link MemoryStack}. */ public static VkDisplayModePropertiesKHR mallocStack() { return mallocStack(stackGet()); } /** Returns a new {@link VkDisplayModePropertiesKHR} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. */ public static VkDisplayModePropertiesKHR callocStack() { return callocStack(stackGet()); } /** * Returns a new {@link VkDisplayModePropertiesKHR} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate */ public static VkDisplayModePropertiesKHR mallocStack(MemoryStack stack) { return create(stack.nmalloc(ALIGNOF, SIZEOF)); } /** * Returns a new {@link VkDisplayModePropertiesKHR} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate */ public static VkDisplayModePropertiesKHR callocStack(MemoryStack stack) { return create(stack.ncalloc(ALIGNOF, 1, SIZEOF)); } /** * Returns a new {@link VkDisplayModePropertiesKHR.Buffer} instance allocated on the thread-local {@link MemoryStack}. * * @param capacity the buffer capacity */ public static Buffer mallocStack(int capacity) { return mallocStack(capacity, stackGet()); } /** * Returns a new {@link VkDisplayModePropertiesKHR.Buffer} instance allocated on the thread-local {@link MemoryStack} and initializes all its bits to zero. * * @param capacity the buffer capacity */ public static Buffer callocStack(int capacity) { return callocStack(capacity, stackGet()); } /** * Returns a new {@link VkDisplayModePropertiesKHR.Buffer} instance allocated on the specified {@link MemoryStack}. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static Buffer mallocStack(int capacity, MemoryStack stack) { return create(stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity); } /** * Returns a new {@link VkDisplayModePropertiesKHR.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero. * * @param stack the stack from which to allocate * @param capacity the buffer capacity */ public static Buffer callocStack(int capacity, MemoryStack stack) { return create(stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity); } // ----------------------------------- /** Unsafe version of {@link #displayMode}. */ public static long ndisplayMode(long struct) { return memGetLong(struct + VkDisplayModePropertiesKHR.DISPLAYMODE); } /** Unsafe version of {@link #parameters}. */ public static VkDisplayModeParametersKHR nparameters(long struct) { return VkDisplayModeParametersKHR.create(struct + VkDisplayModePropertiesKHR.PARAMETERS); } // ----------------------------------- /** An array of {@link VkDisplayModePropertiesKHR} structs. */ public static final class Buffer extends StructBuffer<VkDisplayModePropertiesKHR, Buffer> { /** * Creates a new {@link VkDisplayModePropertiesKHR.Buffer} instance backed by the specified container. * * Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values * will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided * by {@link VkDisplayModePropertiesKHR#SIZEOF}, and its mark will be undefined. * * <p>The created buffer instance holds a strong reference to the container object.</p> */ public Buffer(ByteBuffer container) { super(container, container.remaining() / SIZEOF); } Buffer(long address, ByteBuffer container, int mark, int pos, int lim, int cap) { super(address, container, mark, pos, lim, cap); } @Override protected Buffer self() { return this; } @Override protected Buffer newBufferInstance(long address, ByteBuffer container, int mark, int pos, int lim, int cap) { return new Buffer(address, container, mark, pos, lim, cap); } @Override protected VkDisplayModePropertiesKHR newInstance(long address) { return new VkDisplayModePropertiesKHR(address, getContainer()); } @Override protected int sizeof() { return SIZEOF; } /** Returns the value of the {@code displayMode} field. */ public long displayMode() { return VkDisplayModePropertiesKHR.ndisplayMode(address()); } /** Returns a {@link VkDisplayModeParametersKHR} view of the {@code parameters} field. */ public VkDisplayModeParametersKHR parameters() { return VkDisplayModePropertiesKHR.nparameters(address()); } } }
/** * Copyright (C) 2011-2020 Red Hat, Inc. (https://github.com/Commonjava/indy) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.commonjava.indy.httprox.util; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.math.BigInteger; import java.nio.file.Files; import java.nio.file.Paths; import java.security.GeneralSecurityException; import java.security.KeyFactory; import java.security.KeyPair; import java.security.KeyPairGenerator; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.Principal; import java.security.PrivateKey; import java.security.PublicKey; import java.security.SecureRandom; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import java.security.spec.PKCS8EncodedKeySpec; import java.security.spec.X509EncodedKeySpec; import java.util.Date; import javax.security.auth.x500.X500Principal; import org.bouncycastle.asn1.x500.X500Name; import org.bouncycastle.asn1.x509.BasicConstraints; import org.bouncycastle.asn1.x509.Extension; import org.bouncycastle.asn1.x509.SubjectPublicKeyInfo; import org.bouncycastle.cert.X509CertificateHolder; import org.bouncycastle.cert.X509v3CertificateBuilder; import org.bouncycastle.cert.jcajce.JcaX509CertificateConverter; import org.bouncycastle.cert.jcajce.JcaX509v3CertificateBuilder; import org.bouncycastle.jce.provider.BouncyCastleProvider; import org.bouncycastle.operator.ContentSigner; import org.bouncycastle.operator.OperatorCreationException; import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Created by ruhan on 9/18/18. */ public class CertUtils { public static final String DEFAULT_SIGN_ALGORITHM = "SHA256withRSA"; public static final String KEY_TYPE_RSA = "RSA"; public static final String CERT_TYPE_X509 = "X.509"; public static final int DEFAULT_CERT_EXPIRATION_DAYS = 365; public static final long MILLIS_IN_DAY = 1000L * 60 * 60 * 24; public static BigInteger serialNumber = new BigInteger( 64, new SecureRandom() ); private static Logger logger = LoggerFactory.getLogger( CertUtils.class ); static { java.security.Security.addProvider(new BouncyCastleProvider()); } /** * Create a self-signed X.509 cert * * @param pair KeyPair generated for this request * @param dn the X.509 Distinguished Name, eg "CN=Test, L=London, C=GB" * @param days how many days from now the cert is valid for * @param algorithm the signing algorithm, eg "SHA256withRSA" * @return X509Certificate newly generated certificate */ public static X509Certificate generateX509Certificate( KeyPair pair, String dn, int days, String algorithm ) throws GeneralSecurityException, OperatorCreationException, IOException { JcaX509CertificateConverter converter = new JcaX509CertificateConverter(); PrivateKey subPrivKey = pair.getPrivate(); PublicKey subPubKey = pair.getPublic(); ContentSigner contentSignerBuilder = new JcaContentSignerBuilder( algorithm ).setProvider( BouncyCastleProvider.PROVIDER_NAME ).build( subPrivKey ); X500Name name = new X500Name( dn ); Date expires = new Date( System.currentTimeMillis() + (MILLIS_IN_DAY * days) ); X509CertificateHolder holder = new X509v3CertificateBuilder( name, allocateSerialNumber(), new Date(), expires, name, SubjectPublicKeyInfo.getInstance( subPubKey.getEncoded() ) ).build(contentSignerBuilder); X509Certificate cert = converter.getCertificate( holder ); logger.debug( "Created cert using CA private key:\n" + cert.toString() ); return cert; } /** * Load a certificate from a file * @param file location of file * @return certificate generated from the encoded file bytes * @throws CertificateException * @throws IOException */ public static X509Certificate loadX509Certificate( File file ) throws CertificateException, IOException { CertificateFactory cf = CertificateFactory.getInstance( CERT_TYPE_X509 ); Certificate ca; try (InputStream caInput = new BufferedInputStream( new FileInputStream( file ) )) { ca = cf.generateCertificate( caInput ); } return (X509Certificate) ca; } /** * Create a keystore object * @return empty keystore * @throws KeyStoreException * @throws CertificateException * @throws NoSuchAlgorithmException * @throws IOException */ public static KeyStore createKeyStore() throws KeyStoreException, CertificateException, NoSuchAlgorithmException, IOException { String keyStoreType = KeyStore.getDefaultType(); KeyStore keyStore = KeyStore.getInstance( keyStoreType ); keyStore.load( null, null ); return keyStore; } /** * Load a keystore using the contents of a file to populate the store * @param file * @param passwd * @return * @throws IOException * @throws KeyStoreException * @throws CertificateException * @throws NoSuchAlgorithmException */ public static KeyStore loadKeyStore( File file, String passwd ) throws IOException, KeyStoreException, CertificateException, NoSuchAlgorithmException { String keyStoreType = KeyStore.getDefaultType(); // jks KeyStore keyStore = KeyStore.getInstance( keyStoreType ); keyStore.load( new FileInputStream( file ), passwd.toCharArray() ); return keyStore; } /** * Load a PrivateKey using the encoded bytes of a file * @param filename file containing PrivateKey bytes * @return created PrivateKey * @throws Exception */ public static PrivateKey getPrivateKey( String filename ) throws Exception { byte[] keyBytes = Files.readAllBytes( Paths.get( filename ) ); PKCS8EncodedKeySpec spec = new PKCS8EncodedKeySpec( keyBytes ); KeyFactory kf = KeyFactory.getInstance( KEY_TYPE_RSA ); return kf.generatePrivate( spec ); } /** * Load a PublicKey using the contents of a file * @param filename file containing PublicKey * @return created PublicKey * @throws Exception */ public static PublicKey getPublicKey( String filename ) throws Exception { byte[] keyBytes = Files.readAllBytes( Paths.get( filename ) ); X509EncodedKeySpec spec = new X509EncodedKeySpec( keyBytes ); KeyFactory kf = KeyFactory.getInstance( KEY_TYPE_RSA ); return kf.generatePublic( spec ); } /** * Generate X509Certificate using objects from existing issuer and subject certificates. * The generated certificate is signed by issuer PrivateKey. * @param certificate * @param issuerCertificate * @param issuerPrivateKey * @param isIntermediate * @return * @throws Exception */ public static X509Certificate createSignedCertificate( X509Certificate certificate, X509Certificate issuerCertificate, PrivateKey issuerPrivateKey, boolean isIntermediate ) throws Exception { String issuerSigAlg = issuerCertificate.getSigAlgName(); X500Principal principal = issuerCertificate.getIssuerX500Principal(); JcaX509CertificateConverter converter = new JcaX509CertificateConverter(); JcaContentSignerBuilder contentSignerBuilder = new JcaContentSignerBuilder(issuerSigAlg).setProvider(BouncyCastleProvider.PROVIDER_NAME); JcaX509v3CertificateBuilder v3CertGen = new JcaX509v3CertificateBuilder( principal, certificate.getSerialNumber(), certificate.getNotBefore(), certificate.getNotAfter(), certificate.getSubjectX500Principal(), certificate.getPublicKey() ); if ( isIntermediate ) { v3CertGen.addExtension( Extension.basicConstraints, true, new BasicConstraints(-1)); } return converter.getCertificate(v3CertGen.build(contentSignerBuilder.build(issuerPrivateKey))); } public static CertificateAndKeys createSignedCertificateAndKey( String dn, X509Certificate issuerCertificate, PrivateKey issuerPrivateKey, boolean isIntermediate ) throws OperatorCreationException, Exception { KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance( KEY_TYPE_RSA ); KeyPair pair = keyPairGenerator.generateKeyPair(); X509Certificate cert = generateX509Certificate( pair, dn, DEFAULT_CERT_EXPIRATION_DAYS, DEFAULT_SIGN_ALGORITHM ); X509Certificate signedCertificate = createSignedCertificate( cert, issuerCertificate, issuerPrivateKey, isIntermediate ); PublicKey publicKey = signedCertificate.getPublicKey(); PrivateKey privateKey = pair.getPrivate(); return new CertificateAndKeys( signedCertificate, privateKey, publicKey ); } private static BigInteger allocateSerialNumber() { BigInteger sn = serialNumber; synchronized (serialNumber) { serialNumber = new BigInteger( 64, new SecureRandom() ); } return sn; } }
/* * Copyright 2016 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.models.cache.infinispan.authorization; import org.keycloak.authorization.model.CachedModel; import org.keycloak.authorization.model.Policy; import org.keycloak.authorization.model.Resource; import org.keycloak.authorization.model.ResourceServer; import org.keycloak.authorization.model.Scope; import org.keycloak.authorization.store.PolicyStore; import org.keycloak.authorization.store.ResourceStore; import org.keycloak.authorization.store.ScopeStore; import org.keycloak.models.cache.infinispan.authorization.entities.CachedPolicy; import org.keycloak.representations.idm.authorization.DecisionStrategy; import org.keycloak.representations.idm.authorization.Logic; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Map; import java.util.Set; import java.util.function.Supplier; import java.util.stream.Collectors; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class PolicyAdapter implements Policy, CachedModel<Policy> { private final Supplier<Policy> modelSupplier; protected final CachedPolicy cached; protected final StoreFactoryCacheSession cacheSession; protected Policy updated; public PolicyAdapter(CachedPolicy cached, StoreFactoryCacheSession cacheSession) { this.cached = cached; this.cacheSession = cacheSession; this.modelSupplier = this::getPolicyModel; } @Override public Policy getDelegateForUpdate() { if (updated == null) { updated = modelSupplier.get(); String defaultResourceType = updated.getConfig().get("defaultResourceType"); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), defaultResourceType, cached.getResourceServerId()); if (updated == null) throw new IllegalStateException("Not found in database"); } return updated; } protected boolean invalidated; protected void invalidateFlag() { invalidated = true; } @Override public void invalidate() { invalidated = true; getDelegateForUpdate(); } @Override public long getCacheTimestamp() { return cached.getCacheTimestamp(); } protected boolean isUpdated() { if (updated != null) return true; if (!invalidated) return false; updated = cacheSession.getPolicyStoreDelegate().findById(cached.getId(), cached.getResourceServerId()); if (updated == null) throw new IllegalStateException("Not found in database"); return true; } @Override public String getId() { if (isUpdated()) return updated.getId(); return cached.getId(); } @Override public String getName() { if (isUpdated()) return updated.getName(); return cached.getName(); } @Override public void setName(String name) { getDelegateForUpdate(); cacheSession.registerPolicyInvalidation(cached.getId(), name, cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); updated.setName(name); } @Override public ResourceServer getResourceServer() { return cacheSession.getResourceServerStore().findById(cached.getResourceServerId()); } @Override public String getType() { if (isUpdated()) return updated.getType(); return cached.getType(); } @Override public DecisionStrategy getDecisionStrategy() { if (isUpdated()) return updated.getDecisionStrategy(); return cached.getDecisionStrategy(); } @Override public void setDecisionStrategy(DecisionStrategy decisionStrategy) { getDelegateForUpdate(); updated.setDecisionStrategy(decisionStrategy); } @Override public Logic getLogic() { if (isUpdated()) return updated.getLogic(); return cached.getLogic(); } @Override public void setLogic(Logic logic) { getDelegateForUpdate(); updated.setLogic(logic); } @Override public Map<String, String> getConfig() { if (isUpdated()) return updated.getConfig(); return cached.getConfig(modelSupplier); } @Override public void setConfig(Map<String, String> config) { getDelegateForUpdate(); if (config.containsKey("defaultResourceType") || cached.getConfig(modelSupplier).containsKey("defaultResourceType")) { cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), config.get("defaultResourceType"), cached.getResourceServerId()); } updated.setConfig(config); } @Override public void removeConfig(String name) { getDelegateForUpdate(); if (name.equals("defaultResourceType")) { cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); } updated.removeConfig(name); } @Override public void putConfig(String name, String value) { getDelegateForUpdate(); if (name.equals("defaultResourceType")) { cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), value, cached.getResourceServerId()); } updated.putConfig(name, value); } @Override public String getDescription() { if (isUpdated()) return updated.getDescription(); return cached.getDescription(); } @Override public void setDescription(String description) { getDelegateForUpdate(); updated.setDescription(description); } protected Set<Policy> associatedPolicies; @Override public Set<Policy> getAssociatedPolicies() { if (isUpdated()) { return updated.getAssociatedPolicies().stream().map(policy -> new PolicyAdapter(cacheSession.createCachedPolicy(policy, policy.getId()), cacheSession)).collect(Collectors.toSet()); } if (associatedPolicies != null) return associatedPolicies; associatedPolicies = new HashSet<>(); PolicyStore policyStore = cacheSession.getPolicyStore(); String resourceServerId = cached.getResourceServerId(); for (String id : cached.getAssociatedPoliciesIds(modelSupplier)) { Policy policy = policyStore.findById(id, resourceServerId); cacheSession.cachePolicy(policy); associatedPolicies.add(policy); } return associatedPolicies = Collections.unmodifiableSet(associatedPolicies); } protected Set<Resource> resources; @Override public Set<Resource> getResources() { if (isUpdated()) return updated.getResources(); if (resources != null) return resources; resources = new HashSet<>(); ResourceStore resourceStore = cacheSession.getResourceStore(); for (String resourceId : cached.getResourcesIds(modelSupplier)) { String resourceServerId = cached.getResourceServerId(); Resource resource = resourceStore.findById(resourceId, resourceServerId); cacheSession.cacheResource(resource); resources.add(resource); } return resources = Collections.unmodifiableSet(resources); } @Override public void addScope(Scope scope) { getDelegateForUpdate(); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), new HashSet<>(Arrays.asList(scope.getId())), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); updated.addScope(scope); } @Override public void removeScope(Scope scope) { getDelegateForUpdate(); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), new HashSet<>(Arrays.asList(scope.getId())), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); updated.removeScope(scope); } @Override public void addAssociatedPolicy(Policy associatedPolicy) { getDelegateForUpdate(); updated.addAssociatedPolicy(associatedPolicy); } @Override public void removeAssociatedPolicy(Policy associatedPolicy) { getDelegateForUpdate(); updated.removeAssociatedPolicy(associatedPolicy); } @Override public void addResource(Resource resource) { getDelegateForUpdate(); HashSet<String> resources = new HashSet<>(); resources.add(resource.getId()); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), resources, cached.getScopesIds(modelSupplier), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); updated.addResource(resource); } @Override public void removeResource(Resource resource) { getDelegateForUpdate(); HashSet<String> resources = new HashSet<>(); resources.add(resource.getId()); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), resources, cached.getScopesIds(modelSupplier), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); updated.removeResource(resource); } protected Set<Scope> scopes; @Override public Set<Scope> getScopes() { if (isUpdated()) return updated.getScopes(); if (scopes != null) return scopes; scopes = new HashSet<>(); ScopeStore scopeStore = cacheSession.getScopeStore(); String resourceServerId = cached.getResourceServerId(); for (String scopeId : cached.getScopesIds(modelSupplier)) { Scope scope = scopeStore.findById(scopeId, resourceServerId); cacheSession.cacheScope(scope); scopes.add(scope); } return scopes = Collections.unmodifiableSet(scopes); } @Override public String getOwner() { if (isUpdated()) return updated.getOwner(); return cached.getOwner(); } @Override public void setOwner(String owner) { getDelegateForUpdate(); cacheSession.registerPolicyInvalidation(cached.getId(), cached.getName(), cached.getResourcesIds(modelSupplier), cached.getScopesIds(modelSupplier), cached.getConfig(modelSupplier).get("defaultResourceType"), cached.getResourceServerId()); updated.setOwner(owner); } @Override public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Policy)) return false; Policy that = (Policy) o; return that.getId().equals(getId()); } @Override public int hashCode() { return getId().hashCode(); } private Policy getPolicyModel() { return cacheSession.getPolicyStoreDelegate().findById(cached.getId(), cached.getResourceServerId()); } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableList; import com.google.schemaorg.JsonLdContext; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.core.datatype.Date; import com.google.schemaorg.core.datatype.DateTime; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.PopularityScoreSpecification; import javax.annotation.Nullable; /** Interface of <a href="http://schema.org/Event}">http://schema.org/Event}</a>. */ public interface Event extends Thing { /** Builder interface of <a href="http://schema.org/Event}">http://schema.org/Event}</a>. */ public interface Builder extends Thing.Builder { @Override Builder addJsonLdContext(@Nullable JsonLdContext context); @Override Builder addJsonLdContext(@Nullable JsonLdContext.Builder context); @Override Builder setJsonLdId(@Nullable String value); @Override Builder setJsonLdReverse(String property, Thing obj); @Override Builder setJsonLdReverse(String property, Thing.Builder builder); /** Add a value to property additionalType. */ Builder addAdditionalType(URL value); /** Add a value to property additionalType. */ Builder addAdditionalType(String value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(AggregateRating.Builder value); /** Add a value to property aggregateRating. */ Builder addAggregateRating(String value); /** Add a value to property alternateName. */ Builder addAlternateName(Text value); /** Add a value to property alternateName. */ Builder addAlternateName(String value); /** Add a value to property attendee. */ Builder addAttendee(Organization value); /** Add a value to property attendee. */ Builder addAttendee(Organization.Builder value); /** Add a value to property attendee. */ Builder addAttendee(Person value); /** Add a value to property attendee. */ Builder addAttendee(Person.Builder value); /** Add a value to property attendee. */ Builder addAttendee(String value); /** Add a value to property attendees. */ Builder addAttendees(Organization value); /** Add a value to property attendees. */ Builder addAttendees(Organization.Builder value); /** Add a value to property attendees. */ Builder addAttendees(Person value); /** Add a value to property attendees. */ Builder addAttendees(Person.Builder value); /** Add a value to property attendees. */ Builder addAttendees(String value); /** Add a value to property description. */ Builder addDescription(Text value); /** Add a value to property description. */ Builder addDescription(String value); /** Add a value to property doorTime. */ Builder addDoorTime(DateTime value); /** Add a value to property doorTime. */ Builder addDoorTime(String value); /** Add a value to property duration. */ Builder addDuration(Duration value); /** Add a value to property duration. */ Builder addDuration(Duration.Builder value); /** Add a value to property duration. */ Builder addDuration(String value); /** Add a value to property endDate. */ Builder addEndDate(Date value); /** Add a value to property endDate. */ Builder addEndDate(String value); /** Add a value to property eventStatus. */ Builder addEventStatus(EventStatusType value); /** Add a value to property eventStatus. */ Builder addEventStatus(String value); /** Add a value to property image. */ Builder addImage(ImageObject value); /** Add a value to property image. */ Builder addImage(ImageObject.Builder value); /** Add a value to property image. */ Builder addImage(URL value); /** Add a value to property image. */ Builder addImage(String value); /** Add a value to property inLanguage. */ Builder addInLanguage(Language value); /** Add a value to property inLanguage. */ Builder addInLanguage(Language.Builder value); /** Add a value to property inLanguage. */ Builder addInLanguage(Text value); /** Add a value to property inLanguage. */ Builder addInLanguage(String value); /** Add a value to property location. */ Builder addLocation(Place value); /** Add a value to property location. */ Builder addLocation(Place.Builder value); /** Add a value to property location. */ Builder addLocation(PostalAddress value); /** Add a value to property location. */ Builder addLocation(PostalAddress.Builder value); /** Add a value to property location. */ Builder addLocation(Text value); /** Add a value to property location. */ Builder addLocation(String value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(CreativeWork.Builder value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(URL value); /** Add a value to property mainEntityOfPage. */ Builder addMainEntityOfPage(String value); /** Add a value to property name. */ Builder addName(Text value); /** Add a value to property name. */ Builder addName(String value); /** Add a value to property offers. */ Builder addOffers(Offer value); /** Add a value to property offers. */ Builder addOffers(Offer.Builder value); /** Add a value to property offers. */ Builder addOffers(String value); /** Add a value to property organizer. */ Builder addOrganizer(Organization value); /** Add a value to property organizer. */ Builder addOrganizer(Organization.Builder value); /** Add a value to property organizer. */ Builder addOrganizer(Person value); /** Add a value to property organizer. */ Builder addOrganizer(Person.Builder value); /** Add a value to property organizer. */ Builder addOrganizer(String value); /** Add a value to property performer. */ Builder addPerformer(Organization value); /** Add a value to property performer. */ Builder addPerformer(Organization.Builder value); /** Add a value to property performer. */ Builder addPerformer(Person value); /** Add a value to property performer. */ Builder addPerformer(Person.Builder value); /** Add a value to property performer. */ Builder addPerformer(String value); /** Add a value to property performers. */ Builder addPerformers(Organization value); /** Add a value to property performers. */ Builder addPerformers(Organization.Builder value); /** Add a value to property performers. */ Builder addPerformers(Person value); /** Add a value to property performers. */ Builder addPerformers(Person.Builder value); /** Add a value to property performers. */ Builder addPerformers(String value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action value); /** Add a value to property potentialAction. */ Builder addPotentialAction(Action.Builder value); /** Add a value to property potentialAction. */ Builder addPotentialAction(String value); /** Add a value to property previousStartDate. */ Builder addPreviousStartDate(Date value); /** Add a value to property previousStartDate. */ Builder addPreviousStartDate(String value); /** Add a value to property recordedIn. */ Builder addRecordedIn(CreativeWork value); /** Add a value to property recordedIn. */ Builder addRecordedIn(CreativeWork.Builder value); /** Add a value to property recordedIn. */ Builder addRecordedIn(String value); /** Add a value to property review. */ Builder addReview(Review value); /** Add a value to property review. */ Builder addReview(Review.Builder value); /** Add a value to property review. */ Builder addReview(String value); /** Add a value to property sameAs. */ Builder addSameAs(URL value); /** Add a value to property sameAs. */ Builder addSameAs(String value); /** Add a value to property startDate. */ Builder addStartDate(Date value); /** Add a value to property startDate. */ Builder addStartDate(String value); /** Add a value to property subEvent. */ Builder addSubEvent(Event value); /** Add a value to property subEvent. */ Builder addSubEvent(Event.Builder value); /** Add a value to property subEvent. */ Builder addSubEvent(String value); /** Add a value to property subEvents. */ Builder addSubEvents(Event value); /** Add a value to property subEvents. */ Builder addSubEvents(Event.Builder value); /** Add a value to property subEvents. */ Builder addSubEvents(String value); /** Add a value to property superEvent. */ Builder addSuperEvent(Event value); /** Add a value to property superEvent. */ Builder addSuperEvent(Event.Builder value); /** Add a value to property superEvent. */ Builder addSuperEvent(String value); /** Add a value to property typicalAgeRange. */ Builder addTypicalAgeRange(Text value); /** Add a value to property typicalAgeRange. */ Builder addTypicalAgeRange(String value); /** Add a value to property url. */ Builder addUrl(URL value); /** Add a value to property url. */ Builder addUrl(String value); /** Add a value to property workFeatured. */ Builder addWorkFeatured(CreativeWork value); /** Add a value to property workFeatured. */ Builder addWorkFeatured(CreativeWork.Builder value); /** Add a value to property workFeatured. */ Builder addWorkFeatured(String value); /** Add a value to property workPerformed. */ Builder addWorkPerformed(CreativeWork value); /** Add a value to property workPerformed. */ Builder addWorkPerformed(CreativeWork.Builder value); /** Add a value to property workPerformed. */ Builder addWorkPerformed(String value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(Article.Builder value); /** Add a value to property detailedDescription. */ Builder addDetailedDescription(String value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification value); /** Add a value to property popularityScore. */ Builder addPopularityScore(PopularityScoreSpecification.Builder value); /** Add a value to property popularityScore. */ Builder addPopularityScore(String value); /** * Add a value to property. * * @param name The property name. * @param value The value of the property. */ Builder addProperty(String name, SchemaOrgType value); /** * Add a value to property. * * @param name The property name. * @param builder The schema.org object builder for the property value. */ Builder addProperty(String name, Thing.Builder builder); /** * Add a value to property. * * @param name The property name. * @param value The string value of the property. */ Builder addProperty(String name, String value); /** Build a {@link Event} object. */ Event build(); } /** * Returns the value list of property aggregateRating. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getAggregateRatingList(); /** * Returns the value list of property attendee. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getAttendeeList(); /** * Returns the value list of property attendees. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getAttendeesList(); /** * Returns the value list of property doorTime. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getDoorTimeList(); /** * Returns the value list of property duration. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getDurationList(); /** * Returns the value list of property endDate. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getEndDateList(); /** * Returns the value list of property eventStatus. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getEventStatusList(); /** * Returns the value list of property inLanguage. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getInLanguageList(); /** * Returns the value list of property location. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getLocationList(); /** * Returns the value list of property offers. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getOffersList(); /** * Returns the value list of property organizer. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getOrganizerList(); /** * Returns the value list of property performer. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getPerformerList(); /** * Returns the value list of property performers. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getPerformersList(); /** * Returns the value list of property previousStartDate. Empty list is returned if the property * not set in current object. */ ImmutableList<SchemaOrgType> getPreviousStartDateList(); /** * Returns the value list of property recordedIn. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getRecordedInList(); /** * Returns the value list of property review. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getReviewList(); /** * Returns the value list of property startDate. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getStartDateList(); /** * Returns the value list of property subEvent. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getSubEventList(); /** * Returns the value list of property subEvents. Empty list is returned if the property not set in * current object. */ ImmutableList<SchemaOrgType> getSubEventsList(); /** * Returns the value list of property superEvent. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getSuperEventList(); /** * Returns the value list of property typicalAgeRange. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getTypicalAgeRangeList(); /** * Returns the value list of property workFeatured. Empty list is returned if the property not set * in current object. */ ImmutableList<SchemaOrgType> getWorkFeaturedList(); /** * Returns the value list of property workPerformed. Empty list is returned if the property not * set in current object. */ ImmutableList<SchemaOrgType> getWorkPerformedList(); }
/* Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.activiti.engine.impl.bpmn.helper; import java.util.List; import org.activiti.engine.ActivitiActivityExecutionException; import org.activiti.engine.ActivitiException; import org.activiti.engine.delegate.BpmnError; import org.activiti.engine.delegate.event.impl.ActivitiEventBuilder; import org.activiti.engine.impl.bpmn.behavior.EventSubProcessStartEventActivityBehavior; import org.activiti.engine.impl.bpmn.parser.BpmnParse; import org.activiti.engine.impl.bpmn.parser.ErrorEventDefinition; import org.activiti.engine.impl.context.Context; import org.activiti.engine.impl.persistence.entity.ExecutionEntity; import org.activiti.engine.impl.pvm.PvmActivity; import org.activiti.engine.impl.pvm.PvmProcessDefinition; import org.activiti.engine.impl.pvm.PvmScope; import org.activiti.engine.impl.pvm.delegate.ActivityExecution; import org.activiti.engine.impl.pvm.process.ActivityImpl; import org.activiti.engine.impl.pvm.process.ProcessDefinitionImpl; import org.activiti.engine.impl.pvm.process.ScopeImpl; import org.activiti.engine.impl.pvm.runtime.AtomicOperation; import org.activiti.engine.impl.pvm.runtime.InterpretableExecution; import org.activiti.engine.impl.util.ReflectUtil; import org.apache.commons.lang3.StringUtils; import org.flowable.bpmn.model.MapExceptionEntry; import org.flowable.engine.delegate.event.FlowableEngineEventType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * This class is responsible for finding and executing error handlers for BPMN Errors. * * Possible error handlers include Error Intermediate Events and Error Event Sub-Processes. * * @author Falko Menge * @author Daniel Meyer * @author Saeid Mirzaei */ public class ErrorPropagation { private static final Logger LOG = LoggerFactory.getLogger(ErrorPropagation.class); public static void propagateError(BpmnError error, ActivityExecution execution) { propagateError(error.getErrorCode(), execution); } public static void propagateError(String errorCode, ActivityExecution execution) { while (execution != null) { String eventHandlerId = findLocalErrorEventHandler(execution, errorCode); if (eventHandlerId != null) { executeCatch(eventHandlerId, execution, errorCode); break; } if (execution.isProcessInstanceType()) { // dispatch process completed event if (Context.getProcessEngineConfiguration() != null && Context.getProcessEngineConfiguration().getEventDispatcher().isEnabled()) { Context.getProcessEngineConfiguration().getEventDispatcher().dispatchEvent( ActivitiEventBuilder.createEntityEvent(FlowableEngineEventType.PROCESS_COMPLETED_WITH_ERROR_END_EVENT, execution)); } } execution = getSuperExecution(execution); } if (execution == null) { throw new BpmnError(errorCode, "No catching boundary event found for error with errorCode '" + errorCode + "', neither in same process nor in parent process"); } } private static String findLocalErrorEventHandler(ActivityExecution execution, String errorCode) { PvmScope scope = execution.getActivity(); while (scope != null) { @SuppressWarnings("unchecked") List<ErrorEventDefinition> definitions = (List<ErrorEventDefinition>) scope.getProperty(BpmnParse.PROPERTYNAME_ERROR_EVENT_DEFINITIONS); if (definitions != null) { // definitions are sorted by precedence, ie. event subprocesses first. for (ErrorEventDefinition errorEventDefinition : definitions) { if (errorEventDefinition.catches(errorCode)) { return scope.findActivity(errorEventDefinition.getHandlerActivityId()).getId(); } } } // search for error handlers in parent scopes if (scope instanceof PvmActivity) { scope = ((PvmActivity) scope).getParent(); } else { scope = null; } } return null; } private static ActivityExecution getSuperExecution(ActivityExecution execution) { ExecutionEntity executionEntity = (ExecutionEntity) execution; ExecutionEntity superExecution = executionEntity.getProcessInstance().getSuperExecution(); if (superExecution != null && !superExecution.isScope()) { return superExecution.getParent(); } return superExecution; } private static void executeCatch(String errorHandlerId, ActivityExecution execution, String errorCode) { ProcessDefinitionImpl processDefinition = ((ExecutionEntity) execution).getProcessDefinition(); ActivityImpl errorHandler = processDefinition.findActivity(errorHandlerId); if (errorHandler == null) { throw new ActivitiException(errorHandlerId + " not found in process definition"); } boolean matchingParentFound = false; ActivityExecution leavingExecution = execution; ActivityImpl currentActivity = (ActivityImpl) execution.getActivity(); ScopeImpl catchingScope = errorHandler.getParent(); if (catchingScope instanceof ActivityImpl) { ActivityImpl catchingScopeActivity = (ActivityImpl) catchingScope; if (!catchingScopeActivity.isScope()) { // event subprocesses catchingScope = catchingScopeActivity.getParent(); } } if (catchingScope instanceof PvmProcessDefinition) { executeEventHandler(errorHandler, ((ExecutionEntity) execution).getProcessInstance(), errorCode); } else { if (currentActivity.getId().equals(catchingScope.getId())) { matchingParentFound = true; } else { currentActivity = (ActivityImpl) currentActivity.getParent(); // Traverse parents until one is found that is a scope // and matches the activity the boundary event is defined on while (!matchingParentFound && leavingExecution != null && currentActivity != null) { if (!leavingExecution.isConcurrent() && currentActivity.getId().equals(catchingScope.getId())) { matchingParentFound = true; } else if (leavingExecution.isConcurrent()) { leavingExecution = leavingExecution.getParent(); } else { currentActivity = currentActivity.getParentActivity(); leavingExecution = leavingExecution.getParent(); } } // Follow parents up until matching scope can't be found anymore (needed to support for multi-instance) while (leavingExecution != null && leavingExecution.getParent() != null && leavingExecution.getParent().getActivity() != null && leavingExecution.getParent().getActivity().getId().equals(catchingScope.getId())) { leavingExecution = leavingExecution.getParent(); } } if (matchingParentFound && leavingExecution != null) { executeEventHandler(errorHandler, leavingExecution, errorCode); } else { throw new ActivitiException("No matching parent execution for activity " + errorHandlerId + " found"); } } } private static void executeEventHandler(ActivityImpl borderEventActivity, ActivityExecution leavingExecution, String errorCode) { if (Context.getProcessEngineConfiguration() != null && Context.getProcessEngineConfiguration().getEventDispatcher().isEnabled()) { Context.getProcessEngineConfiguration().getEventDispatcher().dispatchEvent( ActivitiEventBuilder.createErrorEvent(FlowableEngineEventType.ACTIVITY_ERROR_RECEIVED, borderEventActivity.getId(), errorCode, leavingExecution.getId(), leavingExecution.getProcessInstanceId(), leavingExecution.getProcessDefinitionId())); } // The current activity of the execution will be changed in the next lines. // So we must make sure the activity is ended correctly here // The other executions (for example when doing something parallel in a subprocess, will // be destroyed by the destroy scope operation (but this execution will be used to do it and // will have list the original activity by then) Context.getCommandContext().getHistoryManager().recordActivityEnd((ExecutionEntity) leavingExecution); if (borderEventActivity.getActivityBehavior() instanceof EventSubProcessStartEventActivityBehavior) { InterpretableExecution execution = (InterpretableExecution) leavingExecution; execution.setActivity(borderEventActivity.getParentActivity()); execution.performOperation(AtomicOperation.ACTIVITY_START); // make sure the listeners are invoked! } else { leavingExecution.executeActivity(borderEventActivity); } } public static boolean mapException(Exception e, ActivityExecution execution, List<MapExceptionEntry> exceptionMap) { return mapException(e, execution, exceptionMap, false); } public static boolean mapException(Exception e, ActivityExecution execution, List<MapExceptionEntry> exceptionMap, boolean wrapped) { if (exceptionMap == null) { return false; } if (wrapped && e instanceof ActivitiActivityExecutionException) { e = (Exception) e.getCause(); } String defaultMap = null; for (MapExceptionEntry me : exceptionMap) { String exceptionClass = me.getClassName(); String errorCode = me.getErrorCode(); // save the first mapping with no exception class as default map if (StringUtils.isNotEmpty(errorCode) && StringUtils.isEmpty(exceptionClass) && defaultMap == null) { defaultMap = errorCode; continue; } // ignore if error code or class are not defined if (StringUtils.isEmpty(errorCode) || StringUtils.isEmpty(exceptionClass)) continue; if (e.getClass().getName().equals(exceptionClass)) { propagateError(errorCode, execution); return true; } if (me.isAndChildren()) { Class<?> exceptionClassClass = ReflectUtil.loadClass(exceptionClass); if (exceptionClassClass.isAssignableFrom(e.getClass())) { propagateError(errorCode, execution); return true; } } } if (defaultMap != null) { propagateError(defaultMap, execution); return true; } return false; } }
/* * Copyright 2015 The SageTV Authors. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package sage; import sage.io.BufferedSageFile; import sage.io.LocalSageFile; import sage.io.SageDataFile; public class FreetypeFont extends MetaFont { // Freetype is NOT designed for multi-threading so we need to ensure not more than a single thread goes into the native freetype code at one time private static final Object ftLock = new Object(); private static long ftLibPtr; private static java.util.Map faceCacheMap = java.util.Collections.synchronizedMap(new java.util.HashMap()); private static void ensureLibLoaded() { if (ftLibPtr == 0) { synchronized (ftLock) { if (ftLibPtr != 0) return; sage.Native.loadLibrary("FreetypeFontJNI"); ftLibPtr = loadFreetypeLib0(); if (ftLibPtr == 0) { throw new RuntimeException("Can't load freetype lib!"); } } } } public FreetypeFont(String fontName, int fontStyle, int fontSize) throws java.io.FileNotFoundException { super(fontName, fontStyle, fontSize); ensureLibLoaded(); if (new java.io.File(fontName).isFile()) this.fontPath = fontName; else if (fontStyle == PLAIN) this.fontPath = Sage.getPath("fonts") + fontName + ".ttf"; else if (fontStyle == ITALIC) { this.fontPath = Sage.getPath("fonts") + fontName + "Italic.ttf"; if (!new java.io.File(fontPath).isFile()) this.fontPath = Sage.getPath("fonts") + fontName + "I.ttf"; fontStyle = 0; } else if (fontStyle == BOLD) { this.fontPath = Sage.getPath("fonts") + fontName + "Bold.ttf"; if (!new java.io.File(fontPath).isFile()) { this.fontPath = Sage.getPath("fonts") + fontName + "bd.ttf"; if (!new java.io.File(fontPath).isFile()) this.fontPath = Sage.getPath("fonts") + fontName + "B.ttf"; } fontStyle = 0; } else // BOLD + ITALIC { this.fontPath = Sage.getPath("fonts") + fontName + "BoldItalic.ttf"; if (!new java.io.File(fontPath).isFile()) this.fontPath = Sage.getPath("fonts") + fontName + "BI.ttf"; fontStyle = 0; } synchronized (ftLock) { if (faceCacheMap.containsKey(fontPath)) { if (Sage.DBG) System.out.println("Deriving FreeType font face for " + fontName + " size=" + fontSize + " style=" + fontStyle + " from=" + fontPath); parentFont = (FreetypeFont) faceCacheMap.get(fontPath); fontFacePtr = deriveFontFace0(parentFont.fontFacePtr, size, fontStyle); if (fontFacePtr == 0) { throw new RuntimeException("Can't derive freetype font name=" + fontName + " style=" + fontStyle + " size=" + fontSize + " path=" + fontPath + " err=" + ftErr); } } else { if (!new java.io.File(fontPath).isFile()) { throw new java.io.FileNotFoundException(); } if (Sage.DBG) System.out.println("Creating new FreeType font face for " + fontName + " size=" + fontSize + " style=" + fontStyle + " from=" + fontPath); fontFacePtr = loadFontFace0(ftLibPtr, fontPath, size, fontStyle); if (fontFacePtr == 0) { throw new RuntimeException("Can't load freetype font name=" + fontName + " style=" + fontStyle + " size=" + fontSize + " path=" + fontPath + " err=" + ftErr); } faceCacheMap.put(fontPath, this); parentFont = this; } } height = getFontHeight0(fontFacePtr) >> 6; descent = -1 * (getFontDescent0(fontFacePtr) >> 6); ascent = getFontAscent0(fontFacePtr) >> 6; leading = height - ascent - descent; // Ensure our sizing information is fully loaded...this corrects issues with incorrect font heights SageRenderer.getAcceleratedFont(this); } public MetaFont deriveFontSize(int newSize, UIManager uiLoader) { return UIManager.getCachedMetaFont(name, style, newSize, uiLoader); } public MetaFont deriveFont(int newStyle, UIManager uiLoader) { return UIManager.getCachedMetaFont(name, newStyle, size, uiLoader); } public int stringWidth(String str) { int rv = 0; for (int i = 0; i < str.length(); i++) { int glyphCode = getGlyphForChar(str.charAt(i)); int gwidth = getGlyphAdvance(glyphCode); rv += gwidth; } return rv; } // UGLY---FINALIZE!!!! BAD FOR PERFORMANCE protected void finalize() { releaseNativeFont(); } // Override to optimize public /*EMBEDDED_SWITCH*/float/*/int/**/ charWidth(char c) { return getGlyphAdvance(getGlyphForChar(c)); } public int getNumGlyphs() { return getNumGlyphs0(fontFacePtr); } public int getGlyphForChar(char c) { return getGlyphForChar0(fontFacePtr, c); } public int renderGlyph(int glyphCode, java.awt.image.BufferedImage bi, int x, int y) { synchronized (ftLock) { loadGlyph(glyphCode); return renderGlyph0(fontFacePtr, bi, x, y); } } public int getGlyphAdvance(int glyphCode) { if (accelerator != null) { java.awt.geom.Rectangle2D.Float grect = accelerator.getLogicalRect(glyphCode); if (grect != null) return (int)grect.width; } synchronized (ftLock) { loadGlyph(glyphCode); return getGlyphAdvance0(fontFacePtr) >> 6; } } public int getGlyphPixWidth(int glyphCode) { if (accelerator != null) { java.awt.geom.Rectangle2D.Float grect = accelerator.getPixelRect(glyphCode); if (grect != null) return (int)grect.width; } synchronized (ftLock) { loadGlyph(glyphCode); return getGlyphWidth0(fontFacePtr) >> 6; } } public int getGlyphHeight(int glyphCode) { if (accelerator != null) { java.awt.geom.Rectangle2D.Float grect = accelerator.getPixelRect(glyphCode); if (grect != null) return (int)grect.height; } synchronized (ftLock) { loadGlyph(glyphCode); return getGlyphHeight0(fontFacePtr) >> 6; } } private void loadGlyph(int glyphCode) { if (currLoadedGlyph != glyphCode) { currLoadedGlyph = glyphCode; loadGlyph0(fontFacePtr, glyphCode); } } public MetaFont.GlyphVector createGlyphVector(String str) { int advance = 0; int visAdvance = 0; int[] glyphCodes = new int[str.length()]; float[] glyphPos = new float[glyphCodes.length]; java.awt.geom.Rectangle2D.Float bounder = new java.awt.geom.Rectangle2D.Float(); int trailingWS = 0; int strlen = str.length(); for (int i = 0; i < strlen; i++) { char c = str.charAt(i); glyphCodes[i] = getGlyphForChar(c); java.awt.geom.Rectangle2D.Float pixRect = accelerator != null ? accelerator.getPixelRect(glyphCodes[i]) : null; java.awt.geom.Rectangle2D.Float logRect = accelerator != null ? accelerator.getLogicalRect(glyphCodes[i]) : null; int gwidth = getGlyphAdvance(glyphCodes[i]); int gheight = getGlyphHeight(glyphCodes[i]); if (pixRect != null && logRect != null) gheight += pixRect.y - logRect.y + getAscent(); glyphPos[i] = advance; advance += gwidth; if (c == ' ') { // If we're whitespace then add us to the whitespace list trailingWS += gwidth; } else { // We're a char; so if there was trailing whitespace we should add it in visAdvance += trailingWS + gwidth; trailingWS = 0; } bounder.height = Math.max(bounder.height, gheight); } bounder.width = advance; return new FreetypeGlyphVector(advance, visAdvance, str, glyphCodes, glyphPos, bounder); } public MetaFont.GlyphVector[] createGlyphVectors(String s, int wrapWidth) { java.util.ArrayList rv = new java.util.ArrayList(); int advance = 0; int visAdvance = 0; int[] glyphCodes = new int[s.length()]; float[] glyphPos = new float[glyphCodes.length]; java.awt.geom.Rectangle2D.Float bounder = new java.awt.geom.Rectangle2D.Float(); int lastStart = 0; int trailingWS = 0; int lastWrapIndex = 0; int wsWidth = 0; int strlen = s.length(); for (int i = 0; i < strlen; i++) { char c = s.charAt(i); glyphCodes[i] = getGlyphForChar(c); int gwidth = getGlyphAdvance(glyphCodes[i]); java.awt.geom.Rectangle2D.Float pixRect = accelerator != null ? accelerator.getPixelRect(glyphCodes[i]) : null; java.awt.geom.Rectangle2D.Float logRect = accelerator != null ? accelerator.getLogicalRect(glyphCodes[i]) : null; int gheight = getGlyphHeight(glyphCodes[i]); if (pixRect != null && logRect != null) gheight += pixRect.y - logRect.y + getAscent(); if (lastStart < i && gwidth + advance > wrapWidth) { // There is another case where whitespace will be the first char on the next line. We want // to avoid that and put it at the end of the prior line. So we need to consume that char in this case. boolean consumedChar = false; if (c == ' ') { // Consume this char now trailingWS += gwidth; glyphPos[i] = advance; advance += gwidth; bounder.height = Math.max(bounder.height, gheight); bounder.width = advance; consumedChar = true; lastWrapIndex = 0; } // Wrap onto another line. Find the best point for this. if (lastWrapIndex > 0 && lastWrapIndex < i - 1) { // soft line wrapping // This line we're adding goes from lastStart until lastWrapIndex(inclusive). // Then we have to take the chars after lastWrapIndex and put them onto the 'new' current line // Move those chars onto the next line first and figure out how much of the advance to subtract in the process int xoff = (int)(advance - glyphPos[lastWrapIndex + 1]); int xshift = (int)glyphPos[lastWrapIndex + 1]; for (int j = lastWrapIndex + 1; j < i; j++) { glyphPos[j] -= xshift; } int[] newgc = new int[lastWrapIndex + 1 - lastStart]; System.arraycopy(glyphCodes, lastStart, newgc, 0, newgc.length); float[] newgp = new float[newgc.length]; System.arraycopy(glyphPos, lastStart, newgp, 0, newgp.length); rv.add(new FreetypeGlyphVector(xshift, xshift - wsWidth, s.substring(lastStart, lastWrapIndex + 1), newgc, newgp, new java.awt.geom.Rectangle2D.Float(bounder.x, bounder.y, advance, bounder.height))); bounder.y += height; bounder.height = 0; advance = visAdvance = xoff/* - wsWidth*/; lastStart = lastWrapIndex + 1; trailingWS = 0; // we wrapped so there's nothing trailing anymore } else { // hard line wrapping int theEnd = consumedChar ? (i + 1) : i; int[] newgc = new int[theEnd - lastStart]; System.arraycopy(glyphCodes, lastStart, newgc, 0, newgc.length); float[] newgp = new float[newgc.length]; System.arraycopy(glyphPos, lastStart, newgp, 0, newgp.length); rv.add(new FreetypeGlyphVector(advance, visAdvance, s.substring(lastStart, theEnd), newgc, newgp, new java.awt.geom.Rectangle2D.Float(bounder.x, bounder.y, advance, bounder.height))); bounder.y += height; bounder.height = 0; advance = visAdvance = 0; lastStart = theEnd; trailingWS = 0; // we wrapped so there's nothing trailing anymore } lastWrapIndex = 0; wsWidth = 0; if (consumedChar) continue; } if (c == ' ') { // We found whitespace. Add this to the trailingWS count trailingWS += gwidth; // Set this point as the last wrap lastWrapIndex = i; wsWidth = gwidth; } else { // We're a char; so if there was trailing whitespace we should add it in visAdvance += trailingWS + gwidth; trailingWS = 0; if (c == '-') { // Set this point as the last wrap lastWrapIndex = i; wsWidth = 0; } } glyphPos[i] = advance; advance += gwidth; bounder.height = Math.max(bounder.height, gheight); bounder.width = advance; } int[] newgc = new int[s.length() - lastStart]; System.arraycopy(glyphCodes, lastStart, newgc, 0, newgc.length); float[] newgp = new float[newgc.length]; System.arraycopy(glyphPos, lastStart, newgp, 0, newgp.length); rv.add(new FreetypeGlyphVector(advance, visAdvance, s.substring(lastStart), newgc, newgp, new java.awt.geom.Rectangle2D.Float(bounder.x, bounder.y, advance, bounder.height))); return (MetaFont.GlyphVector[]) rv.toArray(new MetaFont.GlyphVector[0]); } private static native long loadFreetypeLib0(); private static native boolean closeFreetypeLib0(long libPtr); private native boolean closeFontFace0(long facePtr); private native long loadFontFace0(long libPtr, String fontPath, int ptSize, int style); private native long deriveFontFace0(long parentFacePtr, int ptSize, int style); private native int getGlyphForChar0(long facePtr, char c); private native void loadGlyph0(long facePtr, int glyphCode); private native int renderGlyph0(long facePtr, java.awt.image.BufferedImage bi, int x, int y); private native sage.media.image.RawImage renderGlyphRaw0(long facePtr, sage.media.image.RawImage img, int imgWidth, int imgHeight, int x, int y); private native int getNumGlyphs0(long facePtr); private native int getGlyphWidth0(long facePtr); private native int getGlyphHeight0(long facePtr); private native int getGlyphBearingX0(long facePtr); private native int getGlyphBearingY0(long facePtr); private native int getGlyphAdvance0(long facePtr); private native int getFontHeight0(long facePtr); private native int getFontAscent0(long facePtr); private native int getFontDescent0(long facePtr); public /*EMBEDDED_SWITCH*/float/*/int/**/ getHeight() { return height; } public /*EMBEDDED_SWITCH*/float/*/int/**/ getAscent() { return ascent; } public /*EMBEDDED_SWITCH*/float/*/int/**/ getDescent() { return descent; } public /*EMBEDDED_SWITCH*/float/*/int/**/ getLeading() { return leading; } public void releaseNativeFont() { if (fontFacePtr != 0) closeFontFace0(fontFacePtr); fontFacePtr = 0; //closeFreetypeLib(ftLibPtr); } public long getNativeFontHandle() { return fontFacePtr; } public SageRenderer.CachedFontGlyphs loadAcceleratedFont(int maxRequiredGlyphCode, int width, int height) { int x = 0; int y = 0; int maxHeightForRow = 0; int maxWidthForGlyph = 0; boolean fixedGlyphCacheWidth = !Sage.getBoolean("ui/load_complete_glyph_maps", false); if (fixedGlyphCacheWidth) { synchronized (ftLock) { loadGlyph(getGlyphForChar('W')); maxWidthForGlyph = (getGlyphAdvance0(fontFacePtr) >> 6) + 4; loadGlyph(getGlyphForChar('\u5355')); maxWidthForGlyph = Math.max(maxWidthForGlyph, (getGlyphAdvance0(fontFacePtr) >> 6) + 4); } } int orgMaxRequiredGlyphCode = maxRequiredGlyphCode; maxRequiredGlyphCode = Integer.MAX_VALUE; SageRenderer.CachedFontGlyphs rv = new SageRenderer.CachedFontGlyphs(); rv.font = this; rv.width = width; rv.height = height; int imageCount = 0; int numCachedGlyphs = 0; int numGlyphs = getNumGlyphs(); rv.numGlyphs = numGlyphs; rv.imageIndexByGlyphCode = new int[numGlyphs]; java.util.Arrays.fill(rv.imageIndexByGlyphCode, -1); rv.pixelRectByGlyphCode = new java.awt.geom.Rectangle2D.Float[numGlyphs]; rv.logicalRectByGlyphCode = new java.awt.geom.Rectangle2D.Float[numGlyphs]; int i = 0; // See if the cache file exists : java.io.File cacheFile; cacheFile = new java.io.File(System.getProperty("user.dir"), "fontcache" + java.io.File.separator + width+"x"+height+"_"+hashCode()+"_size_"+size+"_style_"+style); if (cacheFile.isFile()) { // Verify the number of glyph SageDataFile cacheIn = null; try { cacheIn = new SageDataFile(new BufferedSageFile(new LocalSageFile(cacheFile, true)), Sage.I18N_CHARSET); if(numGlyphs==cacheIn.readInt()) { imageCount=cacheIn.readInt()-1; rv.glyphCounts = new int[imageCount+1]; rv.glyphCounts[imageCount]=numGlyphs; for (int j=0; j < numGlyphs; j++) { rv.imageIndexByGlyphCode[j]=cacheIn.readInt(); rv.pixelRectByGlyphCode[j] = new java.awt.geom.Rectangle2D.Float(); rv.pixelRectByGlyphCode[j].x=cacheIn.readFloat(); rv.pixelRectByGlyphCode[j].y=cacheIn.readFloat(); rv.pixelRectByGlyphCode[j].width=cacheIn.readFloat(); rv.pixelRectByGlyphCode[j].height=cacheIn.readFloat(); rv.logicalRectByGlyphCode[j] = new java.awt.geom.Rectangle2D.Float(); rv.logicalRectByGlyphCode[j].x=cacheIn.readFloat(); rv.logicalRectByGlyphCode[j].y=cacheIn.readFloat(); rv.logicalRectByGlyphCode[j].width=cacheIn.readFloat(); rv.logicalRectByGlyphCode[j].height=cacheIn.readFloat(); } for(int j=0; j < imageCount+1; j++) { rv.glyphCounts[j]=cacheIn.readInt(); } accelerator = rv; return rv; } } catch (Exception e) { System.out.println("Error reading font cache : " + e); e.printStackTrace(System.out); } finally { if (cacheIn != null) { try{cacheIn.close();}catch(Exception e1){} } } } int[] tmpGlyphCounts = new int[1024]; // way more then we'd ever have synchronized (ftLock) { for (; i < numGlyphs; i++) { int glyphPixWidth; int glyphHeight; int glyphAdvance; int glyphBearingX; int glyphBearingY; if (fixedGlyphCacheWidth && i > orgMaxRequiredGlyphCode) { glyphPixWidth = glyphAdvance = maxWidthForGlyph; glyphBearingX = -2; glyphBearingY = (int)getAscent(); glyphHeight = (int)getHeight(); } else { loadGlyph(i); glyphPixWidth = getGlyphWidth0(fontFacePtr) >> 6; glyphHeight = getGlyphHeight0(fontFacePtr) >> 6; glyphAdvance = getGlyphAdvance0(fontFacePtr) >> 6; glyphBearingX = getGlyphBearingX0(fontFacePtr) >> 6; glyphBearingY = getGlyphBearingY0(fontFacePtr) >> 6; } if (x + glyphPixWidth >= width) { // Move us on to the next line x = 0; y += maxHeightForRow + 1; maxHeightForRow = 0; } if (y + glyphHeight >= height) { // Move on to the next image if (i > maxRequiredGlyphCode) break; tmpGlyphCounts[imageCount] = i; imageCount++; if (tmpGlyphCounts.length <= imageCount) { int[] newTemp = new int[tmpGlyphCounts.length * 2]; System.arraycopy(tmpGlyphCounts, 0, newTemp, 0, tmpGlyphCounts.length); tmpGlyphCounts = newTemp; } x = 0; y = 0; maxHeightForRow = 0; } x -= glyphBearingX; // skip over the blank space on the left of the glyph, or if it goes over the left then move us right numCachedGlyphs = i; //renderGlyph(i, currImage, x, y); rv.imageIndexByGlyphCode[i] = imageCount; rv.pixelRectByGlyphCode[i] = new java.awt.geom.Rectangle2D.Float(x + glyphBearingX, y, glyphPixWidth, glyphHeight); rv.logicalRectByGlyphCode[i] = new java.awt.geom.Rectangle2D.Float(x, y + glyphBearingY, glyphAdvance, glyphHeight); // Adjust font height for any glyphs that are larger than it this.height = Math.max(this.height, (int)Math.ceil(glyphHeight - glyphBearingY + getAscent())); maxHeightForRow = Math.max(maxHeightForRow, glyphHeight); x += glyphPixWidth + glyphBearingX + 1; } if (Sage.DBG) System.out.println("There are "+numGlyphs+" glyphs"); } tmpGlyphCounts[imageCount] = i; rv.glyphCounts = new int[imageCount + 1]; System.arraycopy(tmpGlyphCounts, 0, rv.glyphCounts, 0, imageCount + 1); { if (Sage.DBG) System.out.println("Saving cache version"); cacheFile.getParentFile().mkdirs(); SageDataFile cacheOut = null; try { // assumes font name is valid file string cacheOut = new SageDataFile(new BufferedSageFile(new LocalSageFile(cacheFile, false)), Sage.I18N_CHARSET); cacheOut.writeInt(numGlyphs); cacheOut.writeInt(imageCount+1); int j; for (j=0; j < numGlyphs; j++) { cacheOut.writeInt(rv.imageIndexByGlyphCode[j]); cacheOut.writeFloat(rv.pixelRectByGlyphCode[j].x); cacheOut.writeFloat(rv.pixelRectByGlyphCode[j].y); cacheOut.writeFloat(rv.pixelRectByGlyphCode[j].width); cacheOut.writeFloat(rv.pixelRectByGlyphCode[j].height); cacheOut.writeFloat(rv.logicalRectByGlyphCode[j].x); cacheOut.writeFloat(rv.logicalRectByGlyphCode[j].y); cacheOut.writeFloat(rv.logicalRectByGlyphCode[j].width); cacheOut.writeFloat(rv.logicalRectByGlyphCode[j].height); } for(j=0; j < imageCount+1; j++) { cacheOut.writeInt(rv.glyphCounts[j]); } cacheOut.close(); } catch (Exception e) { System.out.println("Error writing font cache : " + e); } finally { if (cacheOut != null) { try{cacheOut.close();}catch(Exception e){} } } } accelerator = rv; return rv; } public java.awt.image.BufferedImage loadJavaFontImage(SageRenderer.CachedFontGlyphs cacheData, int imageIndex) { int x = 0; int y = 0; int maxHeightForRow = 0; java.awt.image.BufferedImage currImage = new java.awt.image.BufferedImage(cacheData.width, cacheData.height, java.awt.image.BufferedImage.TYPE_INT_ARGB_PRE); int numGlyphs = getNumGlyphs(); int startGlyph = (imageIndex == 0) ? 0 : cacheData.glyphCounts[imageIndex - 1]; int endGlyph = Math.min(numGlyphs, cacheData.glyphCounts[imageIndex] - 1); synchronized (ftLock) { for (int i = startGlyph; i <= endGlyph; i++) { renderGlyph(i, currImage, (int)cacheData.logicalRectByGlyphCode[i].x, (int)cacheData.logicalRectByGlyphCode[i].y); } } // Fix the alpha for the image if (UIManager.shouldAntialiasFont(getSize()) && !Sage.getBoolean("ui/dont_premultiply_alpha_for_3dfontmaps", false)) { int[] texturedata = ((java.awt.image.DataBufferInt) currImage.getRaster().getDataBuffer()).getData(); for (int j = 0; j < texturedata.length; j++) { int tmp = (texturedata[j] >> 24) & 0xFF; if (tmp != 0) { tmp = tmp | (tmp << 8); texturedata[j] = tmp | (tmp << 16); } } } if (Sage.DBG) System.out.println("Rendered new font to cache index=" + imageIndex + " font=" + this); if (Sage.getBoolean("ui/dump_font_cache", false)) { try { java.io.FileOutputStream os = new java.io.FileOutputStream(getName() + "-" + getStyle() + "-" + getSize() + "_" + imageIndex + ".png"); javax.imageio.ImageIO.write(currImage, "png", os); os.close(); }catch(Exception e){} } return currImage; } public sage.media.image.RawImage loadRawFontImage(SageRenderer.CachedFontGlyphs cacheData, int imageIndex) { int x = 0; int y = 0; int maxHeightForRow = 0; sage.media.image.RawImage rv = null; int numGlyphs = getNumGlyphs(); int startGlyph = (imageIndex == 0) ? 0 : cacheData.glyphCounts[imageIndex - 1]; int endGlyph = Math.min(numGlyphs, cacheData.glyphCounts[imageIndex] - 1); synchronized (ftLock) { for (int i = startGlyph; i <= endGlyph; i++) { loadGlyph(i); rv = renderGlyphRaw0(fontFacePtr, rv, cacheData.width, cacheData.height, (int)cacheData.logicalRectByGlyphCode[i].x, (int)cacheData.logicalRectByGlyphCode[i].y); } } if (Sage.DBG) System.out.println("Rendered new font to raw cache index=" + imageIndex + " font=" + this); if (Sage.getBoolean("ui/dump_font_cache", false)) { try { java.io.FileOutputStream os = new java.io.FileOutputStream(getName() + "-" + getStyle() + "-" + getSize() + "_" + imageIndex + ".png"); javax.imageio.ImageIO.write(rv.convertToBufferedImage(), "png", os); os.close(); }catch(Exception e){} } return rv; } private String fontPath; private long fontFacePtr; private int currLoadedGlyph = -1; private int ftErr; // error code from native code private FreetypeFont parentFont; // for shared font face information private SageRenderer.CachedFontGlyphs accelerator; public class FreetypeGlyphVector extends MetaFont.GlyphVector { public FreetypeGlyphVector(int inAdvance, int inVisAdvance, String inText, int[] inGlyphCodes, float[] inPos, java.awt.geom.Rectangle2D.Float inBox) { advance = inAdvance; visAdvance = inVisAdvance; text = inText; glyphCodes = inGlyphCodes; glyphPos = inPos; boundingBox = inBox; font = FreetypeFont.this; } } }
package ve.com.abicelis.chefbuddy.ui.home; import android.content.Intent; import android.os.Bundle; import android.os.Handler; import android.support.annotation.Nullable; import android.support.design.widget.AppBarLayout; import android.support.design.widget.FloatingActionButton; import android.support.design.widget.TabLayout; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.view.ViewPager; import android.support.v7.app.AppCompatActivity; import android.support.v7.widget.Toolbar; import android.view.Menu; import android.view.MenuItem; import android.view.View; import com.miguelcatalan.materialsearchview.MaterialSearchView; import java.util.ArrayList; import java.util.List; import javax.inject.Inject; import butterknife.BindView; import butterknife.ButterKnife; import ve.com.abicelis.chefbuddy.R; import ve.com.abicelis.chefbuddy.app.ChefBuddyApplication; import ve.com.abicelis.chefbuddy.model.RecipeSource; import ve.com.abicelis.chefbuddy.ui.about.AboutActivity; import ve.com.abicelis.chefbuddy.ui.addEditRecipe.AddEditRecipeActivity; import ve.com.abicelis.chefbuddy.ui.home.presenter.HomePresenter; import ve.com.abicelis.chefbuddy.ui.home.view.HomeView; import ve.com.abicelis.chefbuddy.ui.home_history.HistoryFragment; import ve.com.abicelis.chefbuddy.ui.home_recipeList.RecipeListFragment; import ve.com.abicelis.chefbuddy.ui.home_spinWheel.SpinWheelFragment; import ve.com.abicelis.chefbuddy.ui.searchOnlineRecipe.SearchOnlineRecipeActivity; import ve.com.abicelis.chefbuddy.ui.settings.SettingsActivity; /** * Created by abicelis on 8/7/2017. */ public class HomeActivity extends AppCompatActivity implements HomeView, SearchViewListener { //CONST private static final String TAG = HomeActivity.class.getSimpleName(); private static final int RECIPE_TAB_POSITION = 0; private static final int SPINWHEEL_TAB_POSITION = 1; private static final int HISTORY_TAB_POSITION = 2; //DATA @Inject HomePresenter presenter; //UI @BindView(R.id.activity_home_viewpager) ViewPager mViewpager; @BindView(R.id.activity_home_tab_layout) TabLayout mTabLayout; @BindView(R.id.activity_home_search_view) MaterialSearchView mSearchView; @BindView(R.id.activity_home_appbar) AppBarLayout mAppBarLayout; @BindView(R.id.activity_home_toolbar) Toolbar mToolbar; @BindView(R.id.activity_home_fab_add_recipe) FloatingActionButton mFabAdd; private HomeViewPagerAdapter mHomeViewPagerAdapter; private RecipeListFragment mRecipeListFragment; private SpinWheelFragment mSpinWheelFragment; private HistoryFragment mHistoryFragment; //DATA private List<String> titleList = new ArrayList<>(); private List<Fragment> fragmentList = new ArrayList<>(); @Override protected void onCreate(@Nullable Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_home); ButterKnife.bind(this); ((ChefBuddyApplication)getApplication()).getAppComponent().inject(this); presenter.attachView(this); //Setup toolbar setSupportActionBar(mToolbar); getSupportActionBar().setTitle(R.string.app_name); getSupportActionBar().setLogo(R.drawable.ic_toolbar_home); setupViewPagerAndTabLayout(); setupSearchView(); setupFab(); } private void setupViewPagerAndTabLayout() { //Clear lists titleList.clear(); fragmentList.clear(); //Populate title list titleList.add(getResources().getString(R.string.activity_home_tab_recipe_title)); titleList.add(getResources().getString(R.string.activity_home_tab_spin_wheel_title)); titleList.add(getResources().getString(R.string.activity_home_tab_history_title)); //Populate fragment list mRecipeListFragment = new RecipeListFragment(); fragmentList.add(mRecipeListFragment); mSpinWheelFragment = new SpinWheelFragment(); fragmentList.add(mSpinWheelFragment); mHistoryFragment = new HistoryFragment(); fragmentList.add(mHistoryFragment); //Setup adapter, viewpager and tabLayout mHomeViewPagerAdapter = new HomeViewPagerAdapter(getSupportFragmentManager(), titleList, fragmentList); mViewpager.setAdapter(mHomeViewPagerAdapter); mViewpager.setCurrentItem(0); //Start at page 1 mViewpager.addOnPageChangeListener(new ViewPager.OnPageChangeListener() { @Override public void onPageScrolled(int position, float positionOffset, int positionOffsetPixels) {} @Override public void onPageSelected(int position) { if (position != 0 && mSearchView.isSearchOpen()) mSearchView.closeSearch(); if(position == RECIPE_TAB_POSITION) mFabAdd.show(); else mFabAdd.hide(); //Expand appBar when not in recipe tab if(position != RECIPE_TAB_POSITION) mAppBarLayout.setExpanded(true, true); if(position == SPINWHEEL_TAB_POSITION) { //Notify fragment SpinWheelFragment f = ((SpinWheelFragment)mHomeViewPagerAdapter.getRegisteredFragment(SPINWHEEL_TAB_POSITION)); if(f != null) f.refreshWheel(); } if(position == HISTORY_TAB_POSITION) { //Notify fragment HistoryFragment f = ((HistoryFragment)mHomeViewPagerAdapter.getRegisteredFragment(HISTORY_TAB_POSITION)); if(f != null) f.refresh(); } } @Override public void onPageScrollStateChanged(int state) {} }); mTabLayout.setupWithViewPager(mViewpager); } private void setupSearchView() { mSearchView.setVoiceSearch(true); mSearchView.setOnQueryTextListener(new MaterialSearchView.OnQueryTextListener() { @Override public boolean onQueryTextSubmit(String query) { if(mHomeViewPagerAdapter.getRegisteredFragment(0) != null) { if (query.isEmpty()) ((RecipeListFragment) mHomeViewPagerAdapter.getRegisteredFragment(0)).cancelFilterRecipes(); else ((RecipeListFragment) mHomeViewPagerAdapter.getRegisteredFragment(0)).filterRecipes(query); } return false; } @Override public boolean onQueryTextChange(String newText) { if(mHomeViewPagerAdapter.getRegisteredFragment(0) != null) { if (newText.isEmpty()) ((RecipeListFragment) mHomeViewPagerAdapter.getRegisteredFragment(0)).cancelFilterRecipes(); else ((RecipeListFragment) mHomeViewPagerAdapter.getRegisteredFragment(0)).filterRecipes(newText); } return false; } }); mSearchView.setOnSearchViewListener(new MaterialSearchView.SearchViewListener() { @Override public void onSearchViewShown() { //mAppBarLayout.setExpanded(false, true); mTabLayout.setVisibility(View.GONE); } @Override public void onSearchViewClosed() { //mAppBarLayout.setExpanded(true, true); mTabLayout.setVisibility(View.VISIBLE); ((RecipeListFragment) mHomeViewPagerAdapter.getRegisteredFragment(0)).cancelFilterRecipes(); } }); } private void setupFab() { mFabAdd.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { FragmentManager fm = getSupportFragmentManager(); SelectRecipeSourceDialogFragment dialog = SelectRecipeSourceDialogFragment.newInstance(); dialog.setListener(new SelectRecipeSourceDialogFragment.RecipeSourceSelectedListener() { @Override public void onSourceSelected(RecipeSource recipeSource) { switch (recipeSource) { case DATABASE: Intent addRecipeIntent = new Intent(HomeActivity.this, AddEditRecipeActivity.class); startActivity(addRecipeIntent); break; case ONLINE: Intent goToSearchIntent = new Intent(HomeActivity.this, SearchOnlineRecipeActivity.class); startActivity(goToSearchIntent); break; } } }); dialog.show(fm, "dialog"); } }); } // @Override // protected void onActivityResult(int requestCode, int resultCode, Intent data) { // if (requestCode == MaterialSearchView.REQUEST_VOICE && resultCode == RESULT_OK) { // ArrayList<String> matches = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS); // if (matches != null && matches.size() > 0) { // String searchWrd = matches.get(0); // if (!TextUtils.isEmpty(searchWrd)) { // mSearchView.setQuery(searchWrd, false); // } // } // // return; // } // super.onActivityResult(requestCode, resultCode, data); // } @Override protected void onDestroy() { super.onDestroy(); //presenter.detachView(); } @Override public boolean onCreateOptionsMenu(Menu menu) { getMenuInflater().inflate(R.menu.menu_home, menu); MenuItem item = menu.findItem(R.id.menu_home_search); mSearchView.setMenuItem(item); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { int id = item.getItemId(); switch (id) { case R.id.menu_home_about: Intent goToAboutPage = new Intent(this, AboutActivity.class); startActivity(goToAboutPage); break; case R.id.menu_home_settings: Intent goToSettingsPage = new Intent(this, SettingsActivity.class); startActivity(goToSettingsPage); break; } return super.onOptionsItemSelected(item); } /* SearchViewListener interface implementation */ @Override public void closeSearchView(){ new Handler().postDelayed(new Runnable() { @Override public void run() { mSearchView.closeSearch(); } }, 500); } }
package io.cattle.platform.iaas.api.auth.integration.external; import io.cattle.platform.api.auth.Identity; import io.cattle.platform.core.constants.ProjectConstants; import io.cattle.platform.core.model.Account; import io.cattle.platform.core.model.AuthToken; import io.cattle.platform.iaas.api.auth.SecurityConstants; import io.cattle.platform.iaas.api.auth.dao.AuthTokenDao; import io.cattle.platform.iaas.api.auth.identity.Token; import io.cattle.platform.json.JsonMapper; import io.cattle.platform.object.util.DataAccessor; import io.cattle.platform.token.TokenException; import io.cattle.platform.token.TokenService; import io.cattle.platform.util.type.CollectionUtils; import io.github.ibuildthecloud.gdapi.context.ApiContext; import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException; import io.github.ibuildthecloud.gdapi.request.ApiRequest; import io.github.ibuildthecloud.gdapi.util.ResponseCodes; import org.apache.commons.lang3.StringUtils; import org.apache.http.HttpResponse; import org.apache.http.client.fluent.Request; import org.apache.http.conn.HttpHostConnectException; import org.apache.http.entity.ContentType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Set; public class ExternalServiceAuthProvider { private static final Logger log = LoggerFactory.getLogger(ExternalServiceAuthProvider.class); private JsonMapper jsonMapper; TokenService tokenService; ExternalServiceTokenUtil tokenUtil; AuthTokenDao authTokenDao; public ExternalServiceAuthProvider(JsonMapper jsonMapper, TokenService tokenService, ExternalServiceTokenUtil tokenUtil, AuthTokenDao authTokenDao) { super(); this.jsonMapper = jsonMapper; this.tokenService = tokenService; this.tokenUtil = tokenUtil; this.authTokenDao = authTokenDao; } public Token getToken(ApiRequest request) { Map<String, Object> requestBody = CollectionUtils.toMap(request.getRequestObject()); String code = Objects.toString(requestBody.get(SecurityConstants.CODE), null); //get the token from the auth service StringBuilder authUrl = new StringBuilder(ServiceAuthConstants.AUTH_SERVICE_URL.get()); authUrl.append("/token"); HttpResponse response; try { Map<String, String> data = new HashMap<>(); data.put("code", code); String jsonString = jsonMapper.writeValueAsString(data); Request temp = Request.Post(authUrl.toString()) .addHeader(ServiceAuthConstants.ACCEPT, ServiceAuthConstants.APPLICATION_JSON) .bodyString(jsonString, ContentType.APPLICATION_JSON); response = temp.execute().returnResponse(); int statusCode = response.getStatusLine().getStatusCode(); if(statusCode >= 300) { log.error("Got error from Auth service. statusCode: {}", statusCode); throw new ClientVisibleException(ResponseCodes.SERVICE_UNAVAILABLE, ServiceAuthConstants.AUTH_ERROR, "Error Response from Auth service", "Status code from Auth Service: " + Integer.toString(statusCode)); } Map<String, Object> jsonData = jsonMapper.readValue(response.getEntity().getContent()); String encryptedToken = (String)jsonData.get(ServiceAuthConstants.JWT_KEY); Map<String, Object> decryptedToken = tokenService.getJsonPayload(encryptedToken, false); String accessToken = (String)decryptedToken.get("access_token"); request.setAttribute(ServiceAuthConstants.ACCESS_TOKEN, accessToken); List<?> identityList = CollectionUtils.toList(jsonData.get("identities")); Set<Identity> identities = new HashSet<>(); if (identityList != null && !identityList.isEmpty()) { for(Object identity : identityList) { Map<String, Object> jsonIdentity = CollectionUtils.toMap(identity); identities.add(tokenUtil.jsonToIdentity(jsonIdentity)); } } Token token = tokenUtil.createToken(identities, null); return token; } catch(HttpHostConnectException ex) { log.error("Auth Service not reachable at [{}]", ServiceAuthConstants.AUTH_SERVICE_URL); return null; } catch (IOException e) { log.error("Failed to get token from Auth Service.", e); throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, ServiceAuthConstants.AUTH_ERROR, "Failed to get Auth token.", null); } catch (TokenException e) { log.error("Failed to decrypt the token from Auth Service.", e); return null; } } public Token refreshToken(String accessToken) { //get the token from the auth service StringBuilder authUrl = new StringBuilder(ServiceAuthConstants.AUTH_SERVICE_URL.get()); authUrl.append("/token"); HttpResponse response; try { Map<String, String> data = new HashMap<>(); data.put("accessToken", accessToken); String jsonString = jsonMapper.writeValueAsString(data); Request temp = Request.Post(authUrl.toString()).addHeader(ServiceAuthConstants.ACCEPT, ServiceAuthConstants.APPLICATION_JSON) .bodyString(jsonString, ContentType.APPLICATION_JSON); response = temp.execute().returnResponse(); int statusCode = response.getStatusLine().getStatusCode(); if(statusCode >= 300) { log.error("Got error from Auth service. statusCode: {}", statusCode); throw new ClientVisibleException(ResponseCodes.SERVICE_UNAVAILABLE, ServiceAuthConstants.AUTH_ERROR, "Error Response from Auth service", "Status code from Auth Service: " + Integer.toString(statusCode)); } Map<String, Object> jsonData = jsonMapper.readValue(response.getEntity().getContent()); String encryptedToken = (String)jsonData.get(ServiceAuthConstants.JWT_KEY); Map<String, Object> decryptedToken = tokenService.getJsonPayload(encryptedToken, false); String newAccessToken = (String)decryptedToken.get("access_token"); ApiRequest request = ApiContext.getContext().getApiRequest(); request.setAttribute(ServiceAuthConstants.ACCESS_TOKEN, newAccessToken); List<?> identityList = CollectionUtils.toList(jsonData.get("identities")); Set<Identity> identities = new HashSet<>(); if (identityList != null && !identityList.isEmpty()) { for(Object identity : identityList) { Map<String, Object> jsonIdentity = CollectionUtils.toMap(identity); identities.add(tokenUtil.jsonToIdentity(jsonIdentity)); } } Token token = tokenUtil.createToken(identities, null); return token; } catch(HttpHostConnectException ex) { log.error("Auth Service not reachable at [{}]", ServiceAuthConstants.AUTH_SERVICE_URL); return null; } catch (IOException e) { log.error("Failed to get token from Auth Service.", e); throw new ClientVisibleException(ResponseCodes.INTERNAL_SERVER_ERROR, ServiceAuthConstants.AUTH_ERROR, "Failed to get Auth token.", null); } catch (TokenException e) { log.error("Failed to decrypt the token from Auth Service.", e); return null; } } public List<Identity> searchIdentities(String name, boolean exactMatch) { if (!isConfigured()) { return new ArrayList<>(); } List<Identity> identities = new ArrayList<>(); StringBuilder authUrl = new StringBuilder(ServiceAuthConstants.AUTH_SERVICE_URL.get()); HttpResponse response; try { authUrl.append("/identities?name=").append(URLEncoder.encode(name, "UTF-8")); Request temp = Request.Get(authUrl.toString()).addHeader(ServiceAuthConstants.ACCEPT, ServiceAuthConstants.APPLICATION_JSON); String externalAccessToken = (String) ApiContext.getContext().getApiRequest().getAttribute(ServiceAuthConstants.ACCESS_TOKEN); String bearerToken = " Bearer "+ externalAccessToken; temp.addHeader(ServiceAuthConstants.AUTHORIZATION, bearerToken); response = temp.execute().returnResponse(); int statusCode = response.getStatusLine().getStatusCode(); if(statusCode >= 300) { log.error("searchIdentities: Got error from Auth service. statusCode: {}", statusCode); return identities; } Map<String, Object> jsonData = jsonMapper.readValue(response.getEntity().getContent()); List<?> identityList = CollectionUtils.toList(jsonData.get("data")); if (identityList != null && !identityList.isEmpty()) { for(Object identity : identityList) { Map<String, Object> jsonIdentity = CollectionUtils.toMap(identity); identities.add(tokenUtil.jsonToIdentity(jsonIdentity)); } } } catch(HttpHostConnectException ex) { log.error("Auth Service not reachable at [{}]", ServiceAuthConstants.AUTH_SERVICE_URL); } catch (ClientVisibleException e) { log.error("Failed to search identities from Auth Service.", e); } catch (Exception e) { log.error("Failed to search identities from Auth Service.", e); } return identities; } public Identity getIdentity(String id, String scope) { if (!isConfigured()) { return null; } //check if the setting 'support.identity.lookup = false', if yes then lookup the identity from token if(ServiceAuthConstants.NO_IDENTITY_LOOKUP_SUPPORTED.get()) { // This means it is saml (among github and saml) log.debug("Identity lookup is not supported at the provider"); if (tokenUtil.findAndSetJWT()) { Set<Identity> identitiesInToken = tokenUtil.getIdentities(); log.debug("Found identitiesInToken {}" , identitiesInToken); for (Identity identity : identitiesInToken) { if(identity != null && id.equals(identity.getExternalId()) && scope.equals(identity.getExternalIdType())) { if (StringUtils.equals(identity.getExternalIdType(), ServiceAuthConstants.USER_TYPE.get())) { identity.setUser(true); } return identity; } } } } StringBuilder authUrl = new StringBuilder(ServiceAuthConstants.AUTH_SERVICE_URL.get()); HttpResponse response; try { authUrl.append("/identities?externalId=").append(URLEncoder.encode(id, "UTF-8")).append("&externalIdType=") .append(URLEncoder.encode(scope, "UTF-8")); Request temp = Request.Get(authUrl.toString()).addHeader(ServiceAuthConstants.ACCEPT, ServiceAuthConstants.APPLICATION_JSON); String externalAccessToken = (String) ApiContext.getContext().getApiRequest().getAttribute(ServiceAuthConstants.ACCESS_TOKEN); String bearerToken = " Bearer "+ externalAccessToken; temp.addHeader(ServiceAuthConstants.AUTHORIZATION, bearerToken); response = temp.execute().returnResponse(); int statusCode = response.getStatusLine().getStatusCode(); if(statusCode >= 300) { log.error("getIdentity: Got error from Auth service. statusCode: {}", statusCode); return null; } Map<String, Object> jsonData = jsonMapper.readValue(response.getEntity().getContent()); return tokenUtil.jsonToIdentity(jsonData); } catch(HttpHostConnectException ex) { log.error("Auth Service not reachable at [{}]", ServiceAuthConstants.AUTH_SERVICE_URL); return null; } catch (IOException e) { log.error("Failed to get token from Auth Service.", e); return null; } } public Set<Identity> getIdentities(Account account) { if (!isConfigured()) { return new HashSet<>(); } String accessToken = (String) DataAccessor.fields(account).withKey(ServiceAuthConstants.ACCESS_TOKEN).get(); if (tokenUtil.findAndSetJWT()) { ApiRequest request = ApiContext.getContext().getApiRequest(); request.setAttribute(ServiceAuthConstants.ACCESS_TOKEN, accessToken); return tokenUtil.getIdentities(); } String jwt = null; if (!StringUtils.isBlank(accessToken) && SecurityConstants.SECURITY.get()) { AuthToken authToken = authTokenDao.getTokenByAccountId(account.getId()); if (authToken == null) { try { //refresh token API. Token token = refreshToken(accessToken); if (token != null) { jwt = ProjectConstants.AUTH_TYPE + token.getJwt(); authToken = authTokenDao.createToken(token.getJwt(), token.getAuthProvider(), account.getId(), account.getId()); jwt = authToken.getKey(); accessToken = (String) DataAccessor.fields(account).withKey(ServiceAuthConstants.ACCESS_TOKEN).get(); } } catch (ClientVisibleException e) { log.error("Got error from Auth service.error", e); return Collections.emptySet(); } } else { jwt = authToken.getKey(); } } if (StringUtils.isBlank(jwt)){ return Collections.emptySet(); } ApiRequest request = ApiContext.getContext().getApiRequest(); request.setAttribute(tokenUtil.tokenType(), jwt); request.setAttribute(ServiceAuthConstants.ACCESS_TOKEN, accessToken); return tokenUtil.getIdentities(); } public boolean isConfigured() { return SecurityConstants.AUTH_PROVIDER.get() != null && !SecurityConstants.NO_PROVIDER.equalsIgnoreCase(SecurityConstants.AUTH_PROVIDER.get()) && !SecurityConstants.INTERNAL_AUTH_PROVIDERS.contains(SecurityConstants.AUTH_PROVIDER.get()) && ServiceAuthConstants.IS_EXTERNAL_AUTH_PROVIDER.get(); } public Identity untransform(Identity identity) { return identity; } public Identity transform(Identity identity) { return identity; } public String getRedirectUrl() { StringBuilder authUrl = new StringBuilder(ServiceAuthConstants.AUTH_SERVICE_URL.get()); authUrl.append("/redirectUrl"); HttpResponse response; try { Request temp = Request.Get(authUrl.toString()).addHeader(ServiceAuthConstants.ACCEPT, ServiceAuthConstants.APPLICATION_JSON); response = temp.execute().returnResponse(); int statusCode = response.getStatusLine().getStatusCode(); if(statusCode >= 300) { log.error("Got error from Auth service. statusCode: {}", statusCode); return ""; } Map<String, Object> jsonData = jsonMapper.readValue(response.getEntity().getContent()); if( jsonData != null && !jsonData.isEmpty()) { if (jsonData.containsKey("redirectUrl")) { return (String)jsonData.get("redirectUrl"); } } } catch(HttpHostConnectException ex) { log.error("Auth Service not reachable at [{}]", ServiceAuthConstants.AUTH_SERVICE_URL); } catch (IOException e) { log.error("Failed to get the redirectUrl from Auth Service.", e); } return ""; } public Token readCurrentToken() { Token token = new Token(); token = tokenUtil.retrieveCurrentToken(); if (token != null) { String redirect = getRedirectUrl(); token.setRedirectUrl(redirect); } return token; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.text.tests.java.text; import java.text.DateFormat; import java.text.DateFormatSymbols; import java.text.FieldPosition; import java.text.ParseException; import java.text.ParsePosition; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Date; import java.util.GregorianCalendar; import java.util.Locale; import java.util.SimpleTimeZone; import java.util.TimeZone; import tests.support.Support_SimpleDateFormat; public class SimpleDateFormatTest extends junit.framework.TestCase { static SimpleDateFormat format = new SimpleDateFormat("", Locale.ENGLISH); static SimpleDateFormat pFormat = new SimpleDateFormat("", Locale.ENGLISH); static class TestFormat extends junit.framework.TestCase { boolean testsFailed = false; public TestFormat(String name) { super(name); } public void test(String pattern, Calendar cal, String expected, int field) { StringBuffer buffer = new StringBuffer(); FieldPosition position = new FieldPosition(field); format.applyPattern(pattern); format.format(cal.getTime(), buffer, position); String result = buffer.toString(); if (!System.getProperty("java.vendor", "None").substring(0, 3) .equals("Sun")) { assertTrue("Wrong format: \"" + pattern + "\" expected: " + expected + " result: " + result, result .equals(expected)); assertTrue("Wrong begin position: " + pattern + " expected: " + expected + " field: " + field, position .getBeginIndex() == 1); assertTrue("Wrong end position: " + pattern + " expected: " + expected + " field: " + field, position.getEndIndex() == result.length()); } else { // Print the failure but don't use assert as this // will stop subsequent tests from running if (!result.equals(expected)) { System.out .println("Wrong format: \"" + pattern + "\" expected: " + expected + " result: " + result); testsFailed = true; } } } public boolean testsFailed() { return testsFailed; } public void parse(String pattern, String input, Date expected, int start, int end) { pFormat.applyPattern(pattern); ParsePosition position = new ParsePosition(start); Date result = pFormat.parse(input, position); assertTrue("Wrong result: " + pattern + " input: " + input + " expected: " + expected + " result: " + result, expected .equals(result)); assertTrue("Wrong end position: " + pattern + " input: " + input, position.getIndex() == end); } public void verifyFormatTimezone(String timeZoneId, String expected1, String expected2, Date date) { format.setTimeZone(SimpleTimeZone.getTimeZone(timeZoneId)); format.applyPattern("z, zzzz"); assertEquals("Test z for TimeZone : " + timeZoneId, expected1, format.format(date)); format.applyPattern("Z, ZZZZ"); assertEquals("Test Z for TimeZone : " + timeZoneId, expected2, format.format(date)); } } /** * @tests java.text.SimpleDateFormat#SimpleDateFormat() */ public void test_Constructor() { // Test for method java.text.SimpleDateFormat() SimpleDateFormat f2 = new SimpleDateFormat(); assertTrue("Wrong class", f2.getClass() == SimpleDateFormat.class); assertTrue("Wrong default", f2.equals(DateFormat.getDateTimeInstance( DateFormat.SHORT, DateFormat.SHORT, Locale.getDefault()))); assertTrue("Wrong symbols", f2.getDateFormatSymbols().equals( new DateFormatSymbols())); assertTrue("Doesn't work", f2.format(new Date()).getClass() == String.class); } /** * @tests java.text.SimpleDateFormat#SimpleDateFormat(java.lang.String) */ public void test_ConstructorLjava_lang_String() { // Test for method java.text.SimpleDateFormat(java.lang.String) SimpleDateFormat f2 = new SimpleDateFormat("yyyy"); assertTrue("Wrong class", f2.getClass() == SimpleDateFormat.class); assertEquals("Wrong pattern", "yyyy", f2.toPattern()); assertTrue("Wrong locale", f2.equals(new SimpleDateFormat("yyyy", Locale.getDefault()))); assertTrue("Wrong symbols", f2.getDateFormatSymbols().equals( new DateFormatSymbols())); assertTrue("Doesn't work", f2.format(new Date()).getClass() == String.class); // Invalid constructor value. try { new SimpleDateFormat( "this is an invalid simple date format"); fail("Expected test_ConstructorLjava_lang_String to throw IAE."); } catch (IllegalArgumentException ex) { // expected } catch (Throwable ex) { fail("Expected test_ConstructorLjava_lang_String to throw IAE, not " + ex.getClass().getName()); } // Null string value try { new SimpleDateFormat(null); fail("Expected test_ConstructorLjava_lang_String to throw NPE."); } catch (NullPointerException ex) { // expected } catch (Throwable ex) { fail("Expected test_ConstructorLjava_lang_String to throw NPE, not " + ex.getClass().getName()); } } /** * @tests java.text.SimpleDateFormat#SimpleDateFormat(java.lang.String, * java.text.DateFormatSymbols) */ public void test_ConstructorLjava_lang_StringLjava_text_DateFormatSymbols() { // Test for method java.text.SimpleDateFormat(java.lang.String, // java.text.DateFormatSymbols) DateFormatSymbols symbols = new DateFormatSymbols(Locale.ENGLISH); symbols.setEras(new String[] { "Before", "After" }); SimpleDateFormat f2 = new SimpleDateFormat("y'y'yy", symbols); assertTrue("Wrong class", f2.getClass() == SimpleDateFormat.class); assertEquals("Wrong pattern", "y'y'yy", f2.toPattern()); assertTrue("Wrong symbols", f2.getDateFormatSymbols().equals(symbols)); assertTrue("Doesn't work", f2.format(new Date()).getClass() == String.class); } /** * @tests java.text.SimpleDateFormat#SimpleDateFormat(java.lang.String, * java.util.Locale) */ public void test_ConstructorLjava_lang_StringLjava_util_Locale() { // Test for method java.text.SimpleDateFormat(java.lang.String, // java.util.Locale) SimpleDateFormat f2 = new SimpleDateFormat("'yyyy' MM yy", Locale.GERMAN); assertTrue("Wrong class", f2.getClass() == SimpleDateFormat.class); assertEquals("Wrong pattern", "'yyyy' MM yy", f2.toPattern()); assertTrue("Wrong symbols", f2.getDateFormatSymbols().equals( new DateFormatSymbols(Locale.GERMAN))); assertTrue("Doesn't work", f2.format(new Date()).getClass() == String.class); } /** * @tests java.text.SimpleDateFormat#applyLocalizedPattern(java.lang.String) */ public void test_applyLocalizedPatternLjava_lang_String() { // Test for method void // java.text.SimpleDateFormat.applyLocalizedPattern(java.lang.String) SimpleDateFormat f2 = new SimpleDateFormat("y", new Locale("de", "CH")); f2.applyLocalizedPattern("GuMtkHmsSEDFwWahKz"); String pattern = f2.toPattern(); assertTrue("Wrong pattern: " + pattern, pattern .equals("GyMdkHmsSEDFwWahKz")); // test the new "Z" pattern char f2 = new SimpleDateFormat("y", new Locale("de", "CH")); f2.applyLocalizedPattern("G u M t Z"); pattern = f2.toPattern(); assertTrue("Wrong pattern: " + pattern, pattern.equals("G y M d Z")); // test invalid patterns try { f2.applyLocalizedPattern("b"); fail("Expected IllegalArgumentException for pattern with invalid pattern letter: b"); } catch (IllegalArgumentException e) { } try { f2.applyLocalizedPattern("y"); fail("Expected IllegalArgumentException for pattern with invalid pattern letter: y"); } catch (IllegalArgumentException e) { } try { f2.applyLocalizedPattern("a '"); fail("Expected IllegalArgumentException for pattern with unterminated quote: a '"); } catch (IllegalArgumentException e) { } try { f2.applyLocalizedPattern(null); fail("Expected NullPointerException for null pattern"); } catch (NullPointerException e) { } } /** * @tests java.text.SimpleDateFormat#applyPattern(java.lang.String) */ public void test_applyPatternLjava_lang_String() { // Test for method void // java.text.SimpleDateFormat.applyPattern(java.lang.String) SimpleDateFormat f2 = new SimpleDateFormat("y", new Locale("de", "CH")); f2.applyPattern("GyMdkHmsSEDFwWahKz"); assertEquals("Wrong pattern", "GyMdkHmsSEDFwWahKz", f2.toPattern()); // test invalid patterns try { f2.applyPattern("b"); fail("Expected IllegalArgumentException for pattern with invalid patter letter: b"); } catch (IllegalArgumentException e) { } try { f2.applyPattern("u"); fail("Expected IllegalArgumentException for pattern with invalid patter letter: u"); } catch (IllegalArgumentException e) { } try { f2.applyPattern("a '"); fail("Expected IllegalArgumentException for pattern with unterminated quote: a '"); } catch (IllegalArgumentException e) { } try { f2.applyPattern(null); fail("Expected NullPointerException for null pattern"); } catch (NullPointerException e) { } } /** * @tests java.text.SimpleDateFormat#clone() */ public void test_clone() { // Test for method java.lang.Object java.text.SimpleDateFormat.clone() SimpleDateFormat f2 = new SimpleDateFormat(); SimpleDateFormat clone = (SimpleDateFormat) f2.clone(); assertTrue("Invalid clone", f2.equals(clone)); clone.applyPattern("y"); assertTrue("Format modified", !f2.equals(clone)); clone = (SimpleDateFormat) f2.clone(); // Date date = clone.get2DigitYearStart(); // date.setTime(0); // assertTrue("Equal after date change: " + // f2.get2DigitYearStart().getTime() + " " + // clone.get2DigitYearStart().getTime(), !f2.equals(clone)); } /** * @tests java.text.SimpleDateFormat#equals(java.lang.Object) */ public void test_equalsLjava_lang_Object() { // Test for method boolean // java.text.SimpleDateFormat.equals(java.lang.Object) SimpleDateFormat format = (SimpleDateFormat) DateFormat.getInstance(); SimpleDateFormat clone = (SimpleDateFormat) format.clone(); assertTrue("clone not equal", format.equals(clone)); format.format(new Date()); assertTrue("not equal after format", format.equals(clone)); } public void test_equals_afterFormat() { // Regression test for HARMONY-209 SimpleDateFormat df = new SimpleDateFormat(); df.format(new Date()); assertEquals(df, new SimpleDateFormat()); } /** * @tests java.text.SimpleDateFormat#formatToCharacterIterator(java.lang.Object) */ public void test_formatToCharacterIteratorLjava_lang_Object() { try { // Regression for HARMONY-466 new SimpleDateFormat().formatToCharacterIterator(null); fail("NullPointerException expected"); } catch (NullPointerException e) { // expected } // Test for method formatToCharacterIterator(java.lang.Object) new Support_SimpleDateFormat( "test_formatToCharacterIteratorLjava_lang_Object") .t_formatToCharacterIterator(); } /** * @tests java.text.SimpleDateFormat#format(java.util.Date, * java.lang.StringBuffer, java.text.FieldPosition) */ public void test_formatLjava_util_DateLjava_lang_StringBufferLjava_text_FieldPosition() { // Test for method java.lang.StringBuffer // java.text.SimpleDateFormat.format(java.util.Date, // java.lang.StringBuffer, java.text.FieldPosition) new Support_SimpleDateFormat( "test_formatLjava_util_DateLjava_lang_StringBufferLjava_text_FieldPosition") .t_format_with_FieldPosition(); TestFormat test = new TestFormat( "test_formatLjava_util_DateLjava_lang_StringBufferLjava_text_FieldPosition"); Calendar cal = new GregorianCalendar(1999, Calendar.JUNE, 2, 15, 3, 6); test.test(" G", cal, " AD", DateFormat.ERA_FIELD); test.test(" GG", cal, " AD", DateFormat.ERA_FIELD); test.test(" GGG", cal, " AD", DateFormat.ERA_FIELD); test.test(" G", new GregorianCalendar(-1999, Calendar.JUNE, 2), " BC", DateFormat.ERA_FIELD); test.test(" y", cal, " 99", DateFormat.YEAR_FIELD); test.test(" yy", cal, " 99", DateFormat.YEAR_FIELD); test.test(" yy", new GregorianCalendar(2001, Calendar.JUNE, 2), " 01", DateFormat.YEAR_FIELD); test.test(" yy", new GregorianCalendar(2000, Calendar.JUNE, 2), " 00", DateFormat.YEAR_FIELD); test.test(" yyy", new GregorianCalendar(2000, Calendar.JUNE, 2), " 00", DateFormat.YEAR_FIELD); test.test(" yyy", cal, " 99", DateFormat.YEAR_FIELD); test.test(" yyyy", cal, " 1999", DateFormat.YEAR_FIELD); test.test(" yyyyy", cal, " 01999", DateFormat.YEAR_FIELD); test.test(" M", cal, " 6", DateFormat.MONTH_FIELD); test.test(" M", new GregorianCalendar(1999, Calendar.NOVEMBER, 2), " 11", DateFormat.MONTH_FIELD); test.test(" MM", cal, " 06", DateFormat.MONTH_FIELD); test.test(" MMM", cal, " Jun", DateFormat.MONTH_FIELD); test.test(" MMMM", cal, " June", DateFormat.MONTH_FIELD); test.test(" MMMMM", cal, " June", DateFormat.MONTH_FIELD); test.test(" d", cal, " 2", DateFormat.DATE_FIELD); test.test(" d", new GregorianCalendar(1999, Calendar.NOVEMBER, 12), " 12", DateFormat.DATE_FIELD); test.test(" dd", cal, " 02", DateFormat.DATE_FIELD); test.test(" dddd", cal, " 0002", DateFormat.DATE_FIELD); test.test(" h", cal, " 3", DateFormat.HOUR1_FIELD); test.test(" h", new GregorianCalendar(1999, Calendar.NOVEMBER, 12), " 12", DateFormat.HOUR1_FIELD); test.test(" hh", cal, " 03", DateFormat.HOUR1_FIELD); test.test(" hhhh", cal, " 0003", DateFormat.HOUR1_FIELD); test.test(" H", cal, " 15", DateFormat.HOUR_OF_DAY0_FIELD); test.test(" H", new GregorianCalendar(1999, Calendar.NOVEMBER, 12, 4, 0), " 4", DateFormat.HOUR_OF_DAY0_FIELD); test.test(" H", new GregorianCalendar(1999, Calendar.NOVEMBER, 12, 12, 0), " 12", DateFormat.HOUR_OF_DAY0_FIELD); test.test(" H", new GregorianCalendar(1999, Calendar.NOVEMBER, 12), " 0", DateFormat.HOUR_OF_DAY0_FIELD); test.test(" HH", cal, " 15", DateFormat.HOUR_OF_DAY0_FIELD); test.test(" HHHH", cal, " 0015", DateFormat.HOUR_OF_DAY0_FIELD); test.test(" m", cal, " 3", DateFormat.MINUTE_FIELD); test.test(" m", new GregorianCalendar(1999, Calendar.NOVEMBER, 12, 4, 47), " 47", DateFormat.MINUTE_FIELD); test.test(" mm", cal, " 03", DateFormat.MINUTE_FIELD); test.test(" mmmm", cal, " 0003", DateFormat.MINUTE_FIELD); test.test(" s", cal, " 6", DateFormat.SECOND_FIELD); test.test(" s", new GregorianCalendar(1999, Calendar.NOVEMBER, 12, 4, 47, 13), " 13", DateFormat.SECOND_FIELD); test.test(" ss", cal, " 06", DateFormat.SECOND_FIELD); test.test(" ssss", cal, " 0006", DateFormat.SECOND_FIELD); test.test(" S", cal, " 0", DateFormat.MILLISECOND_FIELD); Calendar temp = new GregorianCalendar(); temp.set(Calendar.MILLISECOND, 961); test.test(" SS", temp, " 961", DateFormat.MILLISECOND_FIELD); test.test(" SSSS", cal, " 0000", DateFormat.MILLISECOND_FIELD); test.test(" SS", cal, " 00", DateFormat.MILLISECOND_FIELD); test.test(" E", cal, " Wed", DateFormat.DAY_OF_WEEK_FIELD); test.test(" EE", cal, " Wed", DateFormat.DAY_OF_WEEK_FIELD); test.test(" EEE", cal, " Wed", DateFormat.DAY_OF_WEEK_FIELD); test.test(" EEEE", cal, " Wednesday", DateFormat.DAY_OF_WEEK_FIELD); test.test(" EEEEE", cal, " Wednesday", DateFormat.DAY_OF_WEEK_FIELD); test.test(" D", cal, " 153", DateFormat.DAY_OF_YEAR_FIELD); test.test(" DD", cal, " 153", DateFormat.DAY_OF_YEAR_FIELD); test.test(" DDDD", cal, " 0153", DateFormat.DAY_OF_YEAR_FIELD); test.test(" F", cal, " 1", DateFormat.DAY_OF_WEEK_IN_MONTH_FIELD); test.test(" F", new GregorianCalendar(1999, Calendar.NOVEMBER, 14), " 2", DateFormat.DAY_OF_WEEK_IN_MONTH_FIELD); test.test(" FF", cal, " 01", DateFormat.DAY_OF_WEEK_IN_MONTH_FIELD); test.test(" FFFF", cal, " 0001", DateFormat.DAY_OF_WEEK_IN_MONTH_FIELD); test.test(" w", cal, " 23", DateFormat.WEEK_OF_YEAR_FIELD); test.test(" ww", cal, " 23", DateFormat.WEEK_OF_YEAR_FIELD); test.test(" wwww", cal, " 0023", DateFormat.WEEK_OF_YEAR_FIELD); test.test(" W", cal, " 1", DateFormat.WEEK_OF_MONTH_FIELD); test.test(" W", new GregorianCalendar(1999, Calendar.NOVEMBER, 14), " 3", DateFormat.WEEK_OF_MONTH_FIELD); test.test(" WW", cal, " 01", DateFormat.WEEK_OF_MONTH_FIELD); test.test(" WWWW", cal, " 0001", DateFormat.WEEK_OF_MONTH_FIELD); test.test(" a", cal, " PM", DateFormat.AM_PM_FIELD); test.test(" a", new GregorianCalendar(1999, Calendar.NOVEMBER, 14), " AM", DateFormat.AM_PM_FIELD); test.test(" a", new GregorianCalendar(1999, Calendar.NOVEMBER, 14, 12, 0), " PM", DateFormat.AM_PM_FIELD); test.test(" aa", cal, " PM", DateFormat.AM_PM_FIELD); test.test(" aaa", cal, " PM", DateFormat.AM_PM_FIELD); test.test(" aaaa", cal, " PM", DateFormat.AM_PM_FIELD); test.test(" aaaaa", cal, " PM", DateFormat.AM_PM_FIELD); test.test(" k", cal, " 15", DateFormat.HOUR_OF_DAY1_FIELD); test.test(" k", new GregorianCalendar(1999, Calendar.NOVEMBER, 12, 4, 0), " 4", DateFormat.HOUR_OF_DAY1_FIELD); test.test(" k", new GregorianCalendar(1999, Calendar.NOVEMBER, 12, 12, 0), " 12", DateFormat.HOUR_OF_DAY1_FIELD); test.test(" k", new GregorianCalendar(1999, Calendar.NOVEMBER, 12), " 24", DateFormat.HOUR_OF_DAY1_FIELD); test.test(" kk", cal, " 15", DateFormat.HOUR_OF_DAY1_FIELD); test.test(" kkkk", cal, " 0015", DateFormat.HOUR_OF_DAY1_FIELD); test.test(" K", cal, " 3", DateFormat.HOUR0_FIELD); test.test(" K", new GregorianCalendar(1999, Calendar.NOVEMBER, 12), " 0", DateFormat.HOUR0_FIELD); test.test(" KK", cal, " 03", DateFormat.HOUR0_FIELD); test.test(" KKKK", cal, " 0003", DateFormat.HOUR0_FIELD); format.setTimeZone(TimeZone.getTimeZone("EST")); test.test(" z", cal, " EDT", DateFormat.TIMEZONE_FIELD); Calendar temp2 = new GregorianCalendar(1999, Calendar.JANUARY, 12); test.test(" z", temp2, " EST", DateFormat.TIMEZONE_FIELD); test.test(" zz", cal, " EDT", DateFormat.TIMEZONE_FIELD); test.test(" zzz", cal, " EDT", DateFormat.TIMEZONE_FIELD); test.test(" zzzz", cal, " Eastern Daylight Time", DateFormat.TIMEZONE_FIELD); test.test(" zzzz", temp2, " Eastern Standard Time", DateFormat.TIMEZONE_FIELD); test.test(" zzzzz", cal, " Eastern Daylight Time", DateFormat.TIMEZONE_FIELD); format.setTimeZone(new SimpleTimeZone(60000, "ONE MINUTE")); test.test(" z", cal, " GMT+00:01", DateFormat.TIMEZONE_FIELD); test.test(" zzzz", cal, " GMT+00:01", DateFormat.TIMEZONE_FIELD); format.setTimeZone(new SimpleTimeZone(5400000, "ONE HOUR, THIRTY")); test.test(" z", cal, " GMT+01:30", DateFormat.TIMEZONE_FIELD); format .setTimeZone(new SimpleTimeZone(-5400000, "NEG ONE HOUR, THIRTY")); test.test(" z", cal, " GMT-01:30", DateFormat.TIMEZONE_FIELD); format.applyPattern("'Mkz''':.@5"); assertEquals("Wrong output", "Mkz':.@5", format.format(new Date())); assertTrue("Tests failed", !test.testsFailed()); // Test invalid args to format. SimpleDateFormat dateFormat = new SimpleDateFormat(); try { dateFormat.format(null, new StringBuffer(), new FieldPosition(1)); fail("Expected test to throw NPE."); } catch (NullPointerException ex) { // expected } catch (Throwable ex) { fail("Expected test to throw NPE, not " + ex.getClass().getName()); } } /** * @tests java.text.SimpleDateFormat#format(java.util.Date) */ public void test_timeZoneFormatting() { // tests specific to formatting of timezones Date summerDate = new GregorianCalendar(1999, Calendar.JUNE, 2, 15, 3, 6).getTime(); Date winterDate = new GregorianCalendar(1999, Calendar.JANUARY, 12) .getTime(); TestFormat test = new TestFormat( "test_formatLjava_util_DateLjava_lang_StringBufferLjava_text_FieldPosition"); test.verifyFormatTimezone("PST", "PDT, Pacific Daylight Time", "-0700, -0700", summerDate); test.verifyFormatTimezone("PST", "PST, Pacific Standard Time", "-0800, -0800", winterDate); test.verifyFormatTimezone("GMT-7", "GMT-07:00, GMT-07:00", "-0700, -0700", summerDate); test.verifyFormatTimezone("GMT-7", "GMT-07:00, GMT-07:00", "-0700, -0700", winterDate); // Pacific/Kiritimati is one of the timezones supported only in mJava test.verifyFormatTimezone("Pacific/Kiritimati", "LINT, Line Is. Time", "+1400, +1400", summerDate); test.verifyFormatTimezone("Pacific/Kiritimati", "LINT, Line Is. Time", "+1400, +1400", winterDate); test.verifyFormatTimezone("EST", "EDT, Eastern Daylight Time", "-0400, -0400", summerDate); test.verifyFormatTimezone("EST", "EST, Eastern Standard Time", "-0500, -0500", winterDate); test.verifyFormatTimezone("GMT+14", "GMT+14:00, GMT+14:00", "+1400, +1400", summerDate); test.verifyFormatTimezone("GMT+14", "GMT+14:00, GMT+14:00", "+1400, +1400", winterDate); } /** * @tests java.text.SimpleDateFormat#get2DigitYearStart() */ public void test_get2DigitYearStart() { // Test for method java.util.Date // java.text.SimpleDateFormat.get2DigitYearStart() SimpleDateFormat f1 = new SimpleDateFormat("y"); Date date = f1.get2DigitYearStart(); Calendar cal = new GregorianCalendar(); int year = cal.get(Calendar.YEAR); cal.setTime(date); assertTrue("Wrong default year start", cal.get(Calendar.YEAR) == (year - 80)); } /** * @tests java.text.SimpleDateFormat#getDateFormatSymbols() */ public void test_getDateFormatSymbols() { // Test for method java.text.DateFormatSymbols // java.text.SimpleDateFormat.getDateFormatSymbols() SimpleDateFormat df = (SimpleDateFormat) DateFormat.getInstance(); DateFormatSymbols dfs = df.getDateFormatSymbols(); assertTrue("Symbols identical", dfs != df.getDateFormatSymbols()); } /** * @tests java.text.SimpleDateFormat#parse(java.lang.String, * java.text.ParsePosition) */ public void test_parseLjava_lang_StringLjava_text_ParsePosition() { // Test for method java.util.Date // java.text.SimpleDateFormat.parse(java.lang.String, // java.text.ParsePosition) TestFormat test = new TestFormat( "test_formatLjava_util_DateLjava_lang_StringBufferLjava_text_FieldPosition"); Calendar cal = new GregorianCalendar(1970, Calendar.JANUARY, 1); Date time = cal.getTime(); test.parse("h", " 12", time, 1, 3); test.parse("H", " 0", time, 1, 2); test.parse("k", " 24", time, 1, 3); test.parse("K", " 0", time, 1, 2); cal = new GregorianCalendar(1970, Calendar.JANUARY, 1, 1, 0); time = cal.getTime(); test.parse("h", "1", time, 0, 1); test.parse("H", "1 ", time, 0, 1); test.parse("k", "1", time, 0, 1); test.parse("K", "1", time, 0, 1); cal = new GregorianCalendar(1970, Calendar.JANUARY, 1, 11, 0); time = cal.getTime(); test.parse("h", "0011 ", time, 0, 4); test.parse("K", "11", time, 0, 2); cal = new GregorianCalendar(1970, Calendar.JANUARY, 1, 23, 0); time = cal.getTime(); test.parse("H", "23", time, 0, 2); test.parse("k", "23", time, 0, 2); test.parse("h a", " 3 AM", new GregorianCalendar(1970, Calendar.JANUARY, 1, 3, 0).getTime(), 1, 5); test.parse("K a", " 3 pm ", new GregorianCalendar(1970, Calendar.JANUARY, 1, 15, 0).getTime(), 1, 5); test.parse("m:s", "0:59 ", new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 59).getTime(), 0, 4); test.parse("m:s", "59:0", new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 59, 0).getTime(), 0, 4); test.parse("ms", "059", new GregorianCalendar(1970, Calendar.JANUARY, 1, 0, 0, 59).getTime(), 0, 3); cal = new GregorianCalendar(1970, Calendar.JANUARY, 1); test.parse("S", "0", cal.getTime(), 0, 1); cal.setTimeZone(TimeZone.getTimeZone("HST")); cal.set(Calendar.MILLISECOND, 999); test.parse("S z", "999 HST", cal.getTime(), 0, 7); cal = new GregorianCalendar(1970, Calendar.JANUARY, 1); cal.set(Calendar.ERA, GregorianCalendar.BC); test.parse("G", "Bc ", cal.getTime(), 0, 2); test.parse("y", "00", new GregorianCalendar(2000, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("y", "99", new GregorianCalendar(1999, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("y", "1", new GregorianCalendar(1, Calendar.JANUARY, 1) .getTime(), 0, 1); test.parse("y", "-1", new GregorianCalendar(-1, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("y", "001", new GregorianCalendar(1, Calendar.JANUARY, 1) .getTime(), 0, 3); test.parse("y", "2005", new GregorianCalendar(2005, Calendar.JANUARY, 1).getTime(), 0, 4); test.parse("yy", "00", new GregorianCalendar(2000, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("yy", "99", new GregorianCalendar(1999, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("yy", "1", new GregorianCalendar(1, Calendar.JANUARY, 1) .getTime(), 0, 1); test.parse("yy", "-1", new GregorianCalendar(-1, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("yy", "001", new GregorianCalendar(1, Calendar.JANUARY, 1) .getTime(), 0, 3); test.parse("yy", "2005", new GregorianCalendar(2005, Calendar.JANUARY, 1).getTime(), 0, 4); test.parse("yyy", "99", new GregorianCalendar(99, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("yyy", "1", new GregorianCalendar(1, Calendar.JANUARY, 1) .getTime(), 0, 1); test.parse("yyy", "-1", new GregorianCalendar(-1, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("yyy", "001", new GregorianCalendar(1, Calendar.JANUARY, 1) .getTime(), 0, 3); test.parse("yyy", "2005", new GregorianCalendar(2005, Calendar.JANUARY, 1).getTime(), 0, 4); test.parse("yyyy", "99", new GregorianCalendar(99, Calendar.JANUARY, 1) .getTime(), 0, 2); test.parse("yyyy", " 1999", new GregorianCalendar(1999, Calendar.JANUARY, 1).getTime(), 2, 6); test.parse("MM'M'", "4M", new GregorianCalendar(1970, Calendar.APRIL, 1).getTime(), 0, 2); test.parse("MMM", "Feb", new GregorianCalendar(1970, Calendar.FEBRUARY, 1).getTime(), 0, 3); test.parse("MMMM d", "April 14 ", new GregorianCalendar(1970, Calendar.APRIL, 14).getTime(), 0, 8); test.parse("MMMMd", "April14 ", new GregorianCalendar(1970, Calendar.APRIL, 14).getTime(), 0, 7); test.parse("E w", "Mon 12", new GregorianCalendar(1970, Calendar.MARCH, 16).getTime(), 0, 6); test.parse("Ew", "Mon12", new GregorianCalendar(1970, Calendar.MARCH, 16).getTime(), 0, 5); test.parse("M EE ''W", "5 Tue '2", new GregorianCalendar(1970, Calendar.MAY, 5).getTime(), 0, 8); test.parse("MEE''W", "5Tue'2", new GregorianCalendar(1970, Calendar.MAY, 5).getTime(), 0, 6); test.parse("MMM EEE F", " JUL Sunday 3", new GregorianCalendar(1970, Calendar.JULY, 19).getTime(), 1, 13); test.parse("MMMEEEF", " JULSunday3", new GregorianCalendar(1970, Calendar.JULY, 19).getTime(), 1, 11); cal = new GregorianCalendar(1970, Calendar.JANUARY, 1); cal.setTimeZone(TimeZone.getTimeZone("GMT+0:1")); cal.set(Calendar.DAY_OF_YEAR, 243); test.parse("D z", "243 GMT+0:0", cal.getTime(), 0, 11); cal.setTimeZone(TimeZone.getTimeZone("EST")); cal.set(1970, Calendar.JANUARY, 1, 4, 30); test.parse("h:m z", "4:30 GMT-5 ", cal.getTime(), 0, 10); test.parse("h z", "14 GMT-24 ", new Date(51840000), 0, 9); test.parse("h z", "14 GMT-23 ", new Date(133200000), 0, 9); test.parse("h z", "14 GMT-0001 ", new Date(54000000), 0, 11); test.parse("h z", "14 GMT+24 ", new Date(48960000), 0, 9); test.parse("h z", "14 GMT+23 ", new Date(-32400000), 0, 9); test.parse("h z", "14 GMT+0001 ", new Date(46800000), 0, 11); test.parse("h z", "14 +0001 ", new Date(46800000), 0, 8); test.parse("h z", "14 -0001 ", new Date(54000000), 0, 8); test.parse("yyyyMMddHHmmss", "19990913171901", new GregorianCalendar( 1999, Calendar.SEPTEMBER, 13, 17, 19, 01).getTime(), 0, 14); Date d = new Date(1015822800000L); SimpleDateFormat df = new SimpleDateFormat("", new Locale("en", "US")); df.setTimeZone(TimeZone.getTimeZone("EST")); try { df.applyPattern("dd MMMM yyyy EEEE"); String output = df.format(d); Date date = df.parse(output); assertTrue("Invalid result 1: " + date, d.equals(date)); df.applyPattern("dd MMMM yyyy F"); output = df.format(d); date = df.parse(output); assertTrue("Invalid result 2: " + date, d.equals(date)); df.applyPattern("dd MMMM yyyy w"); output = df.format(d); date = df.parse(output); assertTrue("Invalid result 3: " + date, d.equals(date)); df.applyPattern("dd MMMM yyyy W"); output = df.format(d); date = df.parse(output); assertTrue("Invalid result 4: " + date, d.equals(date)); df.applyPattern("dd MMMM yyyy D"); date = df.parse("5 January 2002 70"); assertTrue("Invalid result 5: " + date, d.equals(date)); df.applyPattern("W w dd MMMM yyyy EEEE"); output = df.format(d); date = df.parse("3 12 5 March 2002 Monday"); assertTrue("Invalid result 6: " + date, d.equals(date)); df.applyPattern("w W dd MMMM yyyy EEEE"); output = df.format(d); date = df.parse("12 3 5 March 2002 Monday"); assertTrue("Invalid result 6a: " + date, d.equals(date)); df.applyPattern("F dd MMMM yyyy EEEE"); output = df.format(d); date = df.parse("2 5 March 2002 Monday"); assertTrue("Invalid result 7: " + date, d.equals(date)); df.applyPattern("w dd MMMM yyyy EEEE"); output = df.format(d); date = df.parse("11 5 January 2002 Monday"); assertTrue("Invalid result 8: " + date, d.equals(date)); df.applyPattern("w dd yyyy EEEE MMMM"); output = df.format(d); date = df.parse("11 5 2002 Monday January"); assertTrue("Invalid result 9: " + date, d.equals(date)); df.applyPattern("w yyyy EEEE MMMM dd"); output = df.format(d); date = df.parse("17 2002 Monday March 11"); assertTrue("Invalid result 10: " + date, d.equals(date)); df.applyPattern("dd D yyyy MMMM"); output = df.format(d); date = df.parse("5 70 2002 January"); assertTrue("Invalid result 11: " + date, d.equals(date)); df.applyPattern("D dd yyyy MMMM"); output = df.format(d); date = df.parse("240 11 2002 March"); assertTrue("Invalid result 12: " + date, d.equals(date)); } catch (ParseException e) { fail("unexpected: " + e); } } /** * @tests java.text.SimpleDateFormat#set2DigitYearStart(java.util.Date) */ public void test_set2DigitYearStartLjava_util_Date() { // Test for method void // java.text.SimpleDateFormat.set2DigitYearStart(java.util.Date) SimpleDateFormat f1 = new SimpleDateFormat("yy"); f1.set2DigitYearStart(new GregorianCalendar(1950, Calendar.JANUARY, 1) .getTime()); Calendar cal = new GregorianCalendar(); try { cal.setTime(f1.parse("49")); assertEquals("Incorrect year 2049", 2049, cal.get(Calendar.YEAR)); cal.setTime(f1.parse("50")); int year = cal.get(Calendar.YEAR); assertTrue("Incorrect year 1950: " + year, year == 1950); f1.applyPattern("y"); cal.setTime(f1.parse("00")); assertEquals("Incorrect year 2000", 2000, cal.get(Calendar.YEAR)); f1.applyPattern("yyy"); cal.setTime(f1.parse("50")); assertEquals("Incorrect year 50", 50, cal.get(Calendar.YEAR)); } catch (ParseException e) { fail("ParseException"); } } /** * @tests java.text.SimpleDateFormat#setDateFormatSymbols(java.text.DateFormatSymbols) */ public void test_setDateFormatSymbolsLjava_text_DateFormatSymbols() { // Test for method void // java.text.SimpleDateFormat.setDateFormatSymbols(java.text.DateFormatSymbols) SimpleDateFormat f1 = new SimpleDateFormat("a"); DateFormatSymbols symbols = new DateFormatSymbols(); symbols.setAmPmStrings(new String[] { "morning", "night" }); f1.setDateFormatSymbols(symbols); DateFormatSymbols newSym = f1.getDateFormatSymbols(); assertTrue("Set incorrectly", newSym.equals(symbols)); assertTrue("Not a clone", f1.getDateFormatSymbols() != symbols); String result = f1.format(new GregorianCalendar(1999, Calendar.JUNE, 12, 3, 0).getTime()); assertEquals("Incorrect symbols used", "morning", result); symbols.setEras(new String[] { "before", "after" }); assertTrue("Identical symbols", !f1.getDateFormatSymbols().equals( symbols)); } /** * @tests java.text.SimpleDateFormat#toLocalizedPattern() */ public void test_toLocalizedPattern() { // Test for method java.lang.String // java.text.SimpleDateFormat.toLocalizedPattern() SimpleDateFormat f2 = new SimpleDateFormat("GyMdkHmsSEDFwWahKz", new Locale("de", "CH")); String pattern = f2.toLocalizedPattern(); assertTrue("Wrong pattern: " + pattern, pattern .equals("GuMtkHmsSEDFwWahKz")); // test the new "Z" pattern char f2 = new SimpleDateFormat("G y M d Z", new Locale("de", "CH")); pattern = f2.toLocalizedPattern(); assertTrue("Wrong pattern: " + pattern, pattern.equals("G u M t Z")); } /** * @tests java.text.SimpleDateFormat#parse(java.lang.String, * java.text.ParsePosition) */ public void test_parse_with_spaces() { // Regression for HARMONY-502 SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss"); df.setLenient(false); char allowed_chars[] = { 0x9, 0x20 }; String allowed_char_names[] = { "tab", "space" }; for (int i = 0; i < allowed_chars.length; i++) { Date expected = new GregorianCalendar(1970, Calendar.JANUARY, 1, 9, 7, 6).getTime(); ParsePosition pp = new ParsePosition(0); Date d = df.parse(allowed_chars[i] + "9:07:06", pp); assertNotNull("hour may be prefixed by " + allowed_char_names[i], d); assertEquals(expected, d); pp = new ParsePosition(0); d = df.parse("09:" + allowed_chars[i] + "7:06", pp); assertNotNull("minute may be prefixed by " + allowed_char_names[i], d); assertEquals(expected, d); pp = new ParsePosition(0); d = df.parse("09:07:" + allowed_chars[i] + "6", pp); assertNotNull("second may be prefixed by " + allowed_char_names[i], d); assertEquals(expected, d); } char not_allowed_chars[] = { // whitespace 0x1c, 0x1d, 0x1e, 0x1f, 0xa, 0xb, 0xc, 0xd, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2008, 0x2009, 0x200a, 0x200b, 0x2028, 0x2029, 0x3000, // non-breaking space 0xA0, 0x2007, 0x202F }; for (int i = 0; i < not_allowed_chars.length; i++) { ParsePosition pp = new ParsePosition(0); Date d = df.parse(not_allowed_chars[i] + "9:07", pp); assertNull(d); pp = new ParsePosition(0); d = df.parse("09:" + not_allowed_chars[i] + "7", pp); assertNull(d); pp = new ParsePosition(0); d = df.parse("09:07:" + not_allowed_chars[i] + "6", pp); assertNull(d); } } }
package main; import java.awt.Font; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.HashMap; import java.util.Iterator; import java.util.logging.Level; import java.util.logging.Logger; import org.newdawn.slick.AppGameContainer; import org.newdawn.slick.BasicGame; import org.newdawn.slick.Color; import org.newdawn.slick.GameContainer; import org.newdawn.slick.Graphics; import org.newdawn.slick.SlickException; import org.newdawn.slick.TrueTypeFont; import org.newdawn.slick.util.Log; public class Game extends BasicGame { public static int GRID_SIZE_X = 50; public static int GRID_SIZE_Y = 50; public static int PX_PER_GRID = 10; public static int BORDER_SIZE = 10; public static float ticksPerSec = 5F; public static final Color GRID_COLOR = new Color(1F, 1F, 1F, 0.5F); public static final Color BCKG_COLOR = Color.black; public static final Color OFF_COLOR = Color.gray; public static final Color ON_COLOR = Color.cyan; public static final Color ALIVE_COLOR = Color.blue; public static final Color DEAD_COLOR = Color.yellow; public static final Complex[] PRESETS = new Complex[]{ new Complex(new int[][]{ // 0 - GLIDER new int[]{0,1,0}, new int[]{0,0,1}, new int[]{1,1,1} }), new Complex(new int[][]{ // 1 - GLIDER GUN new int[]{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0}, new int[]{0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0}, new int[]{0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1}, new int[]{0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,1,1}, new int[]{1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, new int[]{1,1,0,0,0,0,0,0,0,0,1,0,0,0,1,0,1,1,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0}, new int[]{0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0}, new int[]{0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0}, new int[]{0,0,0,0,0,0,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0} }) }; public GameContainer gameContainer; private boolean closeRequested = false; protected org.newdawn.slick.Font smallFont; protected org.newdawn.slick.Font regularFont; protected static int WIDTH; protected static int HEIGHT; int updatesPerSecond = 0; float tickTimer = 0F; boolean[] map; boolean isSimulating = false; boolean showTutorial = true; public Game(String gamename) { super(gamename); map = new boolean[GRID_SIZE_X * GRID_SIZE_Y]; } @Override public void init(GameContainer gc) throws SlickException { gameContainer = gc; gameContainer.setAlwaysRender(true); smallFont = new TrueTypeFont(new Font("Arial", Font.PLAIN, 10), true); regularFont = new TrueTypeFont(new Font("Arial", Font.BOLD, 20), true); resetMap(); } public void resetMap() { map = new boolean[GRID_SIZE_X * GRID_SIZE_Y]; if (isSimulating) toggleSimulation(); closeRequested = false; gameContainer.getInput().removeAllKeyListeners(); gameContainer.getInput().addKeyListener(new InputInterface(this)); gameContainer.getInput().removeAllMouseListeners(); gameContainer.getInput().addMouseListener(new InputInterface(this)); } @Override public void update(GameContainer container, int delta) throws SlickException { updatesPerSecond = (updatesPerSecond + 1000/delta)/2; if (isSimulating) { if (tickTimer <= 0F) { tickTimer = 1/ticksPerSec; tick(); } else { tickTimer -= delta/1000F; } } if(closeRequested) { Log.info("Close has been requested, exiting..."); container.exit(); } } public void render(GameContainer gc, Graphics g) throws SlickException { // draw borders g.setColor(isSimulating ? ON_COLOR : OFF_COLOR); g.fillRect(0, 0, WIDTH, HEIGHT); g.setColor(BCKG_COLOR); g.fillRect(BORDER_SIZE, BORDER_SIZE, WIDTH-2*BORDER_SIZE, HEIGHT-2*BORDER_SIZE ); // draw grid g.setColor(GRID_COLOR); for(int i=1 ; i<GRID_SIZE_X ; i++) { // vertical lines g.drawLine(i*PX_PER_GRID + BORDER_SIZE, BORDER_SIZE, i*PX_PER_GRID + BORDER_SIZE, HEIGHT - BORDER_SIZE ); } for(int i=1 ; i<GRID_SIZE_Y ; i++) { // horizontal lines g.drawLine(BORDER_SIZE, i*PX_PER_GRID + BORDER_SIZE, WIDTH - BORDER_SIZE, i*PX_PER_GRID + BORDER_SIZE ); } // draw cells for(int i=0; i < map.length; i++) { int[] absCoords = Util.fieldToAbsCoords(i); g.setColor(map[i] ? ALIVE_COLOR : DEAD_COLOR); g.fillRect( absCoords[0]+1, absCoords[2]+1, Math.abs(absCoords[1]-absCoords[0])-2, Math.abs(absCoords[3]-absCoords[2])-2 ); } // draw update counter g.setColor(Color.black); g.setFont(smallFont); g.drawString(""+updatesPerSecond, 2, -2);; // draw tutorial if (showTutorial) { g.setColor(Color.black); g.fillRect(0, 0, WIDTH, HEIGHT); g.setColor(Color.white); g.setFont(regularFont); float scale = regularFont.getLineHeight(); float offsetY = -80F; g.drawString("Tutorial - Game Of Life", BORDER_SIZE+10, HEIGHT/2F-6*scale+offsetY); g.drawString("The rules are:", BORDER_SIZE+10, HEIGHT/2F-4*scale+offsetY); g.drawString("A live cell with less than two live neighbours dies", BORDER_SIZE+10, HEIGHT/2F-3*scale+offsetY); g.drawString("A live cell with more than three live neighbours dies", BORDER_SIZE+10, HEIGHT/2F-2*scale+offsetY); g.drawString("A live cell with two or three live neighbours lives on", BORDER_SIZE+10, HEIGHT/2F-1*scale+offsetY); g.drawString("A dead cell with exactly three live neighbours lives", BORDER_SIZE+10, HEIGHT/2F+offsetY); g.drawString("SPACE - toggle simulation", BORDER_SIZE+10, HEIGHT/2F+2*scale+offsetY); g.drawString("F - do one tick", BORDER_SIZE+10, HEIGHT/2F+3*scale+offsetY); g.drawString("R - reset board", BORDER_SIZE+10, HEIGHT/2F+4*scale+offsetY); g.drawString("SHIFT + R - reset saves", BORDER_SIZE+10, HEIGHT/2F+5*scale+offsetY); g.drawString("ESC - quit", BORDER_SIZE+10, HEIGHT/2F+6*scale+offsetY); g.drawString("LEFT - decrease speed", BORDER_SIZE+10, HEIGHT/2F+7*scale+offsetY); g.drawString("RIGHT - increase speed", BORDER_SIZE+10, HEIGHT/2F+8*scale+offsetY); g.drawString("T - toggle tutorial", BORDER_SIZE+10, HEIGHT/2F+9*scale+offsetY); g.drawString("1-9 - load presets", BORDER_SIZE+10, HEIGHT/2F+10*scale+offsetY); g.drawString("SHIFT + 1-9 - save presets", BORDER_SIZE+10, HEIGHT/2F+11*scale+offsetY); } } public void tick() { HashMap<Integer, Boolean> changes = new HashMap<Integer, Boolean>(); for (int x = 0; x < GRID_SIZE_X; x++) { for (int y = 0; y < GRID_SIZE_Y; y++) { int newVal = -1; boolean isLive = map[Util.coordsToField(x, y)]; int adjLive = 0; for (int _x = x-1; _x <= x+1; _x++) { for (int _y = y-1; _y <= y+1; _y++) { if (_x < 0 || _x >= GRID_SIZE_X || _y < 0 || _y >= GRID_SIZE_Y) continue; if (_x == x && _y == y) continue; if (map[Util.coordsToField(_x, _y)]) adjLive++; } } if (isLive) { if (adjLive < 2 || adjLive > 3) newVal = 0; } else if (adjLive == 3) { newVal = 1; } if (newVal != -1) changes.put(Util.coordsToField(x, y), newVal == 1); } } Iterator<Integer> it = changes.keySet().iterator(); while (it.hasNext()) { int key = it.next(); map[key] = changes.get(key); } } public static void main(String[] args) { try { WIDTH = PX_PER_GRID * GRID_SIZE_X + BORDER_SIZE*2; HEIGHT = PX_PER_GRID * GRID_SIZE_Y + BORDER_SIZE*2; AppGameContainer appgc; appgc = new AppGameContainer(new Game("Game Of Life")); appgc.setDisplayMode(WIDTH, HEIGHT, false); appgc.setShowFPS(false); appgc.start(); } catch (SlickException ex) { Logger.getLogger(Game.class.getName()).log(Level.SEVERE, null, ex); } } public void load(int n) { Complex out = null; try { File f = new File("save_"+n+".dat"); FileInputStream fis = new FileInputStream(f); ObjectInputStream ois = new ObjectInputStream(fis); out = (Complex) ois.readObject(); ois.close(); fis.close(); } catch (IOException | ClassNotFoundException e) { e.printStackTrace(); } if (out != null) load (out); } public void load(int[][] cells) { Complex newCom = new Complex(cells); load (newCom); } public void load(Complex com) { resetMap(); for (int x = 0; x < com.x; x++) { for (int y = 0; y < com.y; y++) { boolean val = com.cells[x+y*com.x]; map[x+y*GRID_SIZE_X] = val; } } } public void close() { closeRequested = true; } public void toggleSimulation() { isSimulating = !isSimulating; } public void toggleTutorial() { showTutorial = !showTutorial; if (showTutorial && isSimulating) toggleSimulation(); } public boolean isInputEnabled() { return !showTutorial; } public void resetSaves () { for (int i = 0; i < PRESETS.length; i++) { resetMap(); load (PRESETS[i]); save (i); } } public void save(int n) { int largestX = 0; int largestY = 0; for (int i=0; i < map.length; i++) { if (!map[i]) continue; int x,y=0; x = Util.fieldToCoords(i)[0]; y = Util.fieldToCoords(i)[1]; largestX = Math.max(largestX, x); largestY = Math.max(largestY, y); } boolean[] cells = new boolean[(largestX+1) * (largestY+1)]; for (int i=0; i < cells.length; i++) { int x,y=0; x = i % (largestX+1); y = i / (largestX+1); cells[i] = map[Util.coordsToField(x, y)]; } Complex saveGame = new Complex(cells, largestX+1, largestY+1); try { File saveFile = new File("save_"+n+".dat"); if (saveFile.exists()) { saveFile.delete(); saveFile.createNewFile(); } FileOutputStream fos = new FileOutputStream(saveFile); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(saveGame); oos.flush(); oos.close(); fos.close(); } catch (IOException e) { e.printStackTrace(); } } } class Complex implements Serializable { private static final long serialVersionUID = -1235168268596715350L; public boolean[] cells; public int x,y; public Complex(boolean[] _cells, int width, int height) { cells = _cells; x = width; y = height; } public Complex(int[][] cells) { x = cells[0].length; y = cells.length; for (int i=0; i < cells.length; i++) if(cells[i].length != x) throw new IllegalArgumentException("Cells-Array must be rectangular"); boolean[] _cells = new boolean[x*y]; for (int _x=0; _x < x; _x++) { for (int _y=0; _y < y; _y++) { _cells[_x+_y*x] = cells[_y][_x] == 1; } } this.cells = _cells; System.out.println("x="+x+" y="+y); } private void writeObject(ObjectOutputStream out) throws IOException { out.defaultWriteObject(); } private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { in.defaultReadObject(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package com.datatorrent.contrib.redis; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.junit.Assert; import org.junit.Test; import redis.clients.jedis.ScanParams; import com.datatorrent.api.DAG; import com.datatorrent.api.DefaultInputPort; import com.datatorrent.api.LocalMode; import com.datatorrent.common.util.BaseOperator; import com.datatorrent.contrib.redis.RedisInputOperatorTest.CollectorModule; import com.datatorrent.lib.helper.OperatorContextTestHelper; import com.datatorrent.lib.util.FieldInfo; import com.datatorrent.lib.util.FieldInfo.SupportType; import com.datatorrent.lib.util.KeyValPair; public class RedisPOJOOperatorTest { private RedisStore operatorStore; private RedisStore testStore; public static class TestClass { private Integer intValue; private String stringValue; public TestClass() { } public TestClass(int v1, String v2) { intValue = v1; stringValue = v2; } public Integer getIntValue() { return intValue; } public void setIntValue(int intValue) { this.intValue = intValue; } public String getStringValue() { return stringValue; } public void setStringValue(String stringValue) { this.stringValue = stringValue; } } @Test public void testOutputOperator() throws IOException { this.operatorStore = new RedisStore(); operatorStore.connect(); String appId = "test_appid"; int operatorId = 0; operatorStore.removeCommittedWindowId(appId, operatorId); operatorStore.disconnect(); RedisPOJOOutputOperator outputOperator = new RedisPOJOOutputOperator(); ArrayList<FieldInfo> fields = new ArrayList<FieldInfo>(); fields.add(new FieldInfo("column1", "intValue", SupportType.INTEGER)); fields.add(new FieldInfo("column2", "getStringValue()", SupportType.STRING)); outputOperator.setDataColumns(fields); try { com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap attributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap(); attributes.put(DAG.APPLICATION_ID, appId); outputOperator.setStore(operatorStore); outputOperator.setup(new OperatorContextTestHelper.TestIdOperatorContext(operatorId, attributes)); outputOperator.beginWindow(101); KeyValPair<String, Object> keyVal = new KeyValPair<String, Object>("test_abc1", new TestClass(1, "abc")); outputOperator.input.process(keyVal); outputOperator.endWindow(); outputOperator.teardown(); operatorStore.connect(); Map<String, String> out = operatorStore.getMap("test_abc1"); Assert.assertEquals("1", out.get("column1")); Assert.assertEquals("abc", out.get("column2")); } finally { operatorStore.remove("test_abc1"); operatorStore.disconnect(); } } public static class ObjectCollectorModule extends BaseOperator { volatile static Map<String, Object> resultMap = new HashMap<String, Object>(); static long resultCount = 0; public final transient DefaultInputPort<KeyValPair<String, Object>> inputPort = new DefaultInputPort<KeyValPair<String, Object>>() { @Override public void process(KeyValPair<String, Object> tuple) { resultMap.put(tuple.getKey(), tuple.getValue()); resultCount++; } }; } @Test public void testInputOperator() throws IOException { @SuppressWarnings("unused") Class<?> clazz = org.codehaus.janino.CompilerFactory.class; this.operatorStore = new RedisStore(); this.testStore = new RedisStore(); testStore.connect(); ScanParams params = new ScanParams(); params.count(100); Map<String, String> value = new HashMap<String, String>(); value.put("Column1", "abc"); value.put("Column2", "1"); Map<String, String> value1 = new HashMap<String, String>(); value1.put("Column1", "def"); value1.put("Column2", "2"); Map<String, String> value2 = new HashMap<String, String>(); value2.put("Column1", "ghi"); value2.put("Column2", "3"); testStore.put("test_abc_in", value); testStore.put("test_def_in", value1); testStore.put("test_ghi_in", value2); try { LocalMode lma = LocalMode.newInstance(); DAG dag = lma.getDAG(); RedisPOJOInputOperator inputOperator = dag.addOperator("input", new RedisPOJOInputOperator()); final ObjectCollectorModule collector = dag.addOperator("collector", new ObjectCollectorModule()); ArrayList<FieldInfo> fields = new ArrayList<FieldInfo>(); fields.add(new FieldInfo("Column1", "stringValue", SupportType.STRING)); fields.add(new FieldInfo("Column2", "intValue", SupportType.INTEGER)); inputOperator.setDataColumns(fields); inputOperator.setOutputClass(TestClass.class.getName()); inputOperator.setStore(operatorStore); dag.addStream("stream", inputOperator.outputPort, collector.inputPort); final LocalMode.Controller lc = lma.getController(); new Thread("LocalClusterController") { @Override public void run() { long startTms = System.currentTimeMillis(); long timeout = 10000L; try { Thread.sleep(1000); while (System.currentTimeMillis() - startTms < timeout) { if (ObjectCollectorModule.resultMap.size() < 3) { Thread.sleep(10); } else { break; } } } catch (InterruptedException ex) { } lc.shutdown(); } }.start(); lc.run(); Assert.assertTrue(ObjectCollectorModule.resultMap.containsKey("test_abc_in")); Assert.assertTrue(ObjectCollectorModule.resultMap.containsKey("test_def_in")); Assert.assertTrue(ObjectCollectorModule.resultMap.containsKey("test_ghi_in")); TestClass a = (TestClass) ObjectCollectorModule.resultMap.get("test_abc_in"); Assert.assertNotNull(a); Assert.assertEquals("abc", a.stringValue); Assert.assertEquals("1", a.intValue.toString()); } finally { for (KeyValPair<String, String> entry : CollectorModule.resultMap) { testStore.remove(entry.getKey()); } testStore.disconnect(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.undertow; import java.io.Closeable; import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; import java.nio.ByteBuffer; import java.nio.channels.Channel; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.concurrent.BlockingDeque; import java.util.concurrent.LinkedBlockingDeque; import java.util.function.Consumer; import java.util.stream.Collectors; import io.undertow.client.ClientCallback; import io.undertow.client.ClientConnection; import io.undertow.client.ClientExchange; import io.undertow.client.ClientRequest; import io.undertow.util.HeaderMap; import io.undertow.util.HttpString; import org.apache.camel.AsyncCallback; import org.apache.camel.Exchange; import org.apache.camel.Message; import org.apache.camel.http.common.HttpHelper; import org.apache.camel.http.common.HttpOperationFailedException; import org.apache.camel.support.ExchangeHelper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xnio.ChannelExceptionHandler; import org.xnio.ChannelListener; import org.xnio.ChannelListeners; import org.xnio.IoUtils; import org.xnio.channels.StreamSinkChannel; /** * Undertow {@link ClientCallback} that will get notified when the HTTP * connection is ready or when the client failed to connect. It will also handle * writing the request and reading the response in * {@link #writeRequest(ClientExchange)} and * {@link #setupResponseListener(ClientExchange)}. The main entry point is * {@link #completed(ClientConnection)} or {@link #failed(IOException)} in case * of errors, every error condition that should terminate Camel {@link Exchange} * should go to {@link #hasFailedWith(Throwable)} and successful execution of * the exchange should end with {@link #finish(Message)}. Any * {@link ClientCallback}s that are added here should extend * {@link ErrorHandlingClientCallback}, best way to do that is to use the * {@link #on(Consumer)} helper method. */ class UndertowClientCallback implements ClientCallback<ClientConnection> { /** * {@link ClientCallback} that handles failures automatically by propagating * the exception to Camel {@link Exchange} and notifies Camel that the * exchange finished by calling {@link AsyncCallback#done(boolean)}. */ final class ErrorHandlingClientCallback<T> implements ClientCallback<T> { private final Consumer<T> consumer; private ErrorHandlingClientCallback(final Consumer<T> consumer) { this.consumer = consumer; } @Override public void completed(final T result) { consumer.accept(result); } @Override public void failed(final IOException e) { hasFailedWith(e); } } private static final Logger LOG = LoggerFactory.getLogger(UndertowClientCallback.class); protected final UndertowEndpoint endpoint; protected final Exchange exchange; protected final ClientRequest request; protected final AsyncCallback callback; /** * A queue of resources that will be closed when the exchange ends, add more * resources via {@link #deferClose(Closeable)}. */ protected final BlockingDeque<Closeable> closables = new LinkedBlockingDeque<>(); private final ByteBuffer body; private final Boolean throwExceptionOnFailure; UndertowClientCallback(final Exchange exchange, final AsyncCallback callback, final UndertowEndpoint endpoint, final ClientRequest request, final ByteBuffer body) { this.exchange = exchange; this.callback = callback; this.endpoint = endpoint; this.request = request; this.body = body; this.throwExceptionOnFailure = endpoint.getThrowExceptionOnFailure(); } @Override public void completed(final ClientConnection connection) { // we have established connection, make sure we close it deferClose(connection); // now we can send the request and perform the exchange: writing the // request and reading the response connection.sendRequest(request, on(this::performClientExchange)); } @Override public void failed(final IOException e) { hasFailedWith(e); } ChannelListener<StreamSinkChannel> asyncWriter(final ByteBuffer body) { return channel -> { try { write(channel, body); if (body.hasRemaining()) { channel.resumeWrites(); } else { flush(channel); } } catch (final IOException e) { hasFailedWith(e); } }; } void deferClose(final Closeable closeable) { try { closables.putFirst(closeable); } catch (final InterruptedException e) { hasFailedWith(e); } } protected void finish(final Message result) { finish(result, true); } protected void finish(final Message result, boolean close) { if (close) { closables.forEach(IoUtils::safeClose); } if (result != null) { if (ExchangeHelper.isOutCapable(exchange)) { exchange.setOut(result); } else { exchange.setIn(result); } } callback.done(false); } void hasFailedWith(final Throwable e) { LOG.trace("Exchange has failed with", e); if (Boolean.TRUE.equals(throwExceptionOnFailure)) { exchange.setException(e); } finish(null); } protected <T> ClientCallback<T> on(final Consumer<T> consumer) { return new ErrorHandlingClientCallback<>(consumer); } void performClientExchange(final ClientExchange clientExchange) { // add response listener to the exchange, we could receive the response // at any time (async) setupResponseListener(clientExchange); // write the request writeRequest(clientExchange); } void setupResponseListener(final ClientExchange clientExchange) { clientExchange.setResponseListener(on((ClientExchange response) -> { LOG.trace("completed: {}", clientExchange); try { storeCookies(clientExchange); final UndertowHttpBinding binding = endpoint.getUndertowHttpBinding(); final Message result = binding.toCamelMessage(clientExchange, exchange); // if there was a http error code then check if we should throw an exception final int code = clientExchange.getResponse().getResponseCode(); LOG.debug("Http responseCode: {}", code); final boolean ok = HttpHelper.isStatusCodeOk(code, "200-299"); if (!ok && throwExceptionOnFailure) { // operation failed so populate exception to throw final String uri = endpoint.getHttpURI().toString(); final String statusText = clientExchange.getResponse().getStatus(); // Convert Message headers (Map<String, Object>) to Map<String, String> as expected by HttpOperationsFailedException // using Message versus clientExchange as its header values have extra formatting final Map<String, String> headers = result.getHeaders().entrySet() .stream() .collect(Collectors.toMap(Map.Entry::getKey, entry -> entry.getValue().toString())); // Since result (Message) isn't associated with an Exchange yet, you can not use result.getBody(String.class) final String bodyText = ExchangeHelper.convertToType(exchange, String.class, result.getBody()); final Exception cause = new HttpOperationFailedException(uri, code, statusText, null, headers, bodyText); if (ExchangeHelper.isOutCapable(exchange)) { exchange.setOut(result); } else { exchange.setIn(result); } // make sure to fail with HttpOperationFailedException hasFailedWith(cause); } else { // we end Camel exchange here finish(result); } } catch (Throwable e) { hasFailedWith(e); } })); } void storeCookies(final ClientExchange clientExchange) throws IOException, URISyntaxException { if (endpoint.getCookieHandler() != null) { // creating the url to use takes 2-steps final String url = UndertowHelper.createURL(exchange, endpoint); final URI uri = UndertowHelper.createURI(exchange, url, endpoint); final HeaderMap headerMap = clientExchange.getResponse().getResponseHeaders(); final Map<String, List<String>> m = new HashMap<>(); for (final HttpString headerName : headerMap.getHeaderNames()) { final List<String> headerValue = new LinkedList<>(); for (int i = 0; i < headerMap.count(headerName); i++) { headerValue.add(headerMap.get(headerName, i)); } m.put(headerName.toString(), headerValue); } endpoint.getCookieHandler().storeCookies(exchange, uri, m); } } protected void writeRequest(final ClientExchange clientExchange) { final StreamSinkChannel requestChannel = clientExchange.getRequestChannel(); if (body != null) { try { // try writing, we could be on IO thread and ready to write to // the socket (or not) write(requestChannel, body); if (body.hasRemaining()) { // we did not write all of body (or at all) register a write // listener to write asynchronously requestChannel.getWriteSetter().set(asyncWriter(body)); requestChannel.resumeWrites(); } else { // we are done, we need to flush the request flush(requestChannel); } } catch (final IOException e) { hasFailedWith(e); } } } static void flush(final StreamSinkChannel channel) throws IOException { // the canonical way of flushing Xnio channels channel.shutdownWrites(); if (!channel.flush()) { final ChannelListener<StreamSinkChannel> safeClose = IoUtils::safeClose; final ChannelExceptionHandler<Channel> closingChannelExceptionHandler = ChannelListeners .closingChannelExceptionHandler(); final ChannelListener<StreamSinkChannel> flushingChannelListener = ChannelListeners .flushingChannelListener(safeClose, closingChannelExceptionHandler); channel.getWriteSetter().set(flushingChannelListener); channel.resumeWrites(); } } static void write(final StreamSinkChannel channel, final ByteBuffer body) throws IOException { int written = 1; while (body.hasRemaining() && written > 0) { written = channel.write(body); } } }
/* * Copyright 2000-2012 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.util.containers; import gnu.trove.THashMap; import gnu.trove.TObjectHashingStrategy; import org.jetbrains.annotations.NotNull; import java.lang.ref.ReferenceQueue; import java.util.*; abstract class RefHashMap<K, V> extends AbstractMap<K, V> implements Map<K, V> { private final MyMap myMap; private final ReferenceQueue<K> myReferenceQueue = new ReferenceQueue<K>(); private final HardKey<K> myHardKeyInstance = new HardKey<K>(); // "singleton" private Set<Entry<K, V>> entrySet = null; private boolean processingQueue; public RefHashMap(int initialCapacity, float loadFactor, @NotNull TObjectHashingStrategy<Key<K>> strategy) { myMap = new MyMap(initialCapacity, loadFactor, strategy); } public RefHashMap(int initialCapacity, float loadFactor) { this(initialCapacity, loadFactor, ContainerUtil.<Key<K>>canonicalStrategy()); } public RefHashMap(int initialCapacity) { this(initialCapacity, 0.8f); } public RefHashMap() { this(4); } public RefHashMap(@NotNull Map<K, V> t) { this(Math.max(2 * t.size(), 11), 0.75f); putAll(t); } public RefHashMap(@NotNull final TObjectHashingStrategy<K> hashingStrategy) { this(4, 0.8f, new TObjectHashingStrategy<Key<K>>() { @Override public int computeHashCode(final Key<K> object) { return hashingStrategy.computeHashCode(object.get()); } @Override public boolean equals(final Key<K> o1, final Key<K> o2) { return hashingStrategy.equals(o1.get(), o2.get()); } }); } private class MyMap extends THashMap<Key<K>,V> { private MyMap(int initialCapacity, float loadFactor, @NotNull TObjectHashingStrategy<Key<K>> strategy) { super(initialCapacity, loadFactor, strategy); } @Override public void compact() { // do not compact the map during many gced references removal because it's bad for performance if (!processingQueue) { super.compact(); } } private void compactIfNecessary() { if (_deadkeys > _size && capacity() > 42) { // Compact if more than 50% of all keys are dead. Also, don't trash small maps compact(); } } @Override protected void rehash(int newCapacity) { // rehash should discard gced keys // because otherwise there is a remote probability of // having two (Weak|Soft)Keys with accidentally equal hashCodes and different but gced key values int oldCapacity = _set.length; Object[] oldKeys = _set; V[] oldVals = _values; _set = new Object[newCapacity]; _values = (V[]) new Object[newCapacity]; for (int i = oldCapacity; i-- > 0;) { Object o = oldKeys[i]; if (o == null || o == REMOVED) continue; Key<K> k = (Key<K>)o; K key = k.get(); if (key == null) continue; int index = insertionIndex(k); if (index < 0) { // make 'key' alive till this point to not allow 'o.referent' to be gced throwObjectContractViolation(_set[-index -1], o + "; key: "+key); } _set[index] = o; _values[index] = oldVals[i]; } } } protected interface Key<T> { T get(); } protected abstract <T> Key<T> createKey(@NotNull T k, @NotNull ReferenceQueue<? super T> q); private static class HardKey<T> implements Key<T> { private T myObject; private int myHash; @Override public T get() { return myObject; } private void set(@NotNull T object) { myObject = object; myHash = object.hashCode(); } private void clear() { myObject = null; myHash = 0; } public boolean equals(Object o) { if (this == o) return true; if (!(o instanceof Key)) return false; T t = myObject; T u = ((Key<T>)o).get(); return t == u || t.equals(u); } public int hashCode() { return myHash; } } void processQueue() { try { processingQueue = true; Key<K> wk; while ((wk = (Key<K>)myReferenceQueue.poll()) != null) { myMap.remove(wk); } } finally { processingQueue = false; } myMap.compactIfNecessary(); } V removeKey(@NotNull Key<K> key) { return myMap.remove(key); } @NotNull Key<K> createKey(@NotNull K key) { return createKey(key, myReferenceQueue); } V putKey(@NotNull Key<K> weakKey, V value) { return myMap.put(weakKey, value); } @Override public int size() { return entrySet().size(); } @Override public boolean isEmpty() { return entrySet().isEmpty(); } @Override public boolean containsKey(Object key) { if (key == null) return false; // optimization: myHardKeyInstance.set((K)key); boolean result = myMap.containsKey(myHardKeyInstance); myHardKeyInstance.clear(); return result; } @Override public V get(Object key) { if (key == null) return null; myHardKeyInstance.set((K)key); V result = myMap.get(myHardKeyInstance); myHardKeyInstance.clear(); return result; } @Override public V put(@NotNull K key, V value) { processQueue(); return putKey(createKey(key), value); } @Override public V remove(@NotNull Object key) { processQueue(); // optimization: myHardKeyInstance.set((K)key); V result = myMap.remove(myHardKeyInstance); myHardKeyInstance.clear(); return result; } @Override public void clear() { processQueue(); myMap.clear(); } private static class MyEntry<K, V> implements Entry<K, V> { private final Entry<?, V> ent; private final K key; /* Strong reference to key, so that the GC will leave it alone as long as this Entry exists */ private MyEntry(@NotNull Entry<?, V> ent, K key) { this.ent = ent; this.key = key; } @Override public K getKey() { return key; } @Override public V getValue() { return ent.getValue(); } @Override public V setValue(V value) { return ent.setValue(value); } private static boolean valEquals(Object o1, Object o2) { return o1 == null ? o2 == null : o1.equals(o2); } public boolean equals(Object o) { if (!(o instanceof Entry)) return false; Entry e = (Entry)o; return valEquals(key, e.getKey()) && valEquals(getValue(), e.getValue()); } public int hashCode() { V v; return (key == null ? 0 : key.hashCode()) ^ ((v = getValue()) == null ? 0 : v.hashCode()); } } /* Internal class for entry sets */ private class EntrySet extends AbstractSet<Entry<K, V>> { private final Set<Entry<Key<K>, V>> hashEntrySet = myMap.entrySet(); @NotNull @Override public Iterator<Entry<K, V>> iterator() { return new Iterator<Entry<K, V>>() { private final Iterator<Entry<Key<K>, V>> hashIterator = hashEntrySet.iterator(); private MyEntry<K, V> next = null; @Override public boolean hasNext() { while (hashIterator.hasNext()) { Entry<Key<K>, V> ent = hashIterator.next(); Key<K> wk = ent.getKey(); K k = null; if (wk != null && (k = wk.get()) == null) { /* Weak key has been cleared by GC */ continue; } next = new MyEntry<K, V>(ent, k); return true; } return false; } @Override public Entry<K, V> next() { if (next == null && !hasNext()) { throw new NoSuchElementException(); } Entry<K, V> e = next; next = null; return e; } @Override public void remove() { hashIterator.remove(); } }; } @Override public boolean isEmpty() { return !iterator().hasNext(); } @Override public int size() { int j = 0; for (Iterator i = iterator(); i.hasNext(); i.next()) j++; return j; } @Override public boolean remove(Object o) { processQueue(); if (!(o instanceof Entry)) return false; Entry<K, V> e = (Entry<K, V>)o; V ev = e.getValue(); // optimization: do not recreate the key myHardKeyInstance.set(e.getKey()); Key<K> key = myHardKeyInstance; V hv = myMap.get(key); boolean toRemove = hv == null ? ev == null && myMap.containsKey(key) : hv.equals(ev); if (toRemove) { myMap.remove(key); } myHardKeyInstance.clear(); return toRemove; } public int hashCode() { int h = 0; for (Entry entry : hashEntrySet) { Key wk = (Key)entry.getKey(); if (wk == null) continue; Object v; h += wk.hashCode() ^ ((v = entry.getValue()) == null ? 0 : v.hashCode()); } return h; } } @NotNull @Override public Set<Entry<K, V>> entrySet() { if (entrySet == null) entrySet = new EntrySet(); return entrySet; } }
package de.lessvoid.nifty.render; import java.util.ArrayDeque; import java.util.Collection; import java.util.Deque; import java.util.HashMap; import java.util.Map; import java.util.logging.Logger; import javax.annotation.Nonnull; import javax.annotation.Nullable; import de.lessvoid.nifty.NiftyStopwatch; import de.lessvoid.nifty.screen.Screen; import de.lessvoid.nifty.spi.render.RenderDevice; import de.lessvoid.nifty.spi.render.RenderFont; import de.lessvoid.nifty.spi.render.RenderImage; import de.lessvoid.nifty.tools.Color; /** * This is the default implementation of the render engine. * * @author void * @author Martin Karing &lt;nitram@illarion.org&gt; */ public class NiftyRenderEngineImpl implements NiftyRenderEngine { @Nonnull private static final Logger log = Logger.getLogger(NiftyRenderEngineImpl.class.getName()); /** * RenderDevice. */ @Nonnull private final RenderDevice renderDevice; /** * Display width and height. This is always the base resolution (when scaling is enabled). */ private int displayWidth; private int displayHeight; /** * This is always the native display resolution. */ private int nativeDisplayWidth; private int nativeDisplayHeight; private boolean autoScaling = false; @Nullable private Float autoScalingScaleX = null; @Nullable private Float autoScalingScaleY = null; private float autoScalingOffsetX = 0; private float autoScalingOffsetY = 0; /** * global position x. */ private float globalPosX = 0; /** * global position y. */ private float globalPosY = 0; /** * current x position. */ private float currentX = 0; /** * current y position. */ private float currentY = 0; /** * font. */ @Nullable private RenderFont font; /** * current color. */ @Nonnull private final Color color = new Color(1.0f, 1.0f, 1.0f, 1.0f); /** * color changed. */ private boolean colorChanged = false; /** * color alpha changed. */ private boolean colorAlphaChanged = false; /** * current imageScale. */ private float imageScale = 1.0f; /** * current textScale. */ private float textScale = 1.0f; /** * font cache. */ @Nonnull private final Map<String, RenderFont> fontCache = new HashMap<String, RenderFont>(); /** * stack to save data. */ @Nonnull private final Deque<SavedRenderState> stack = new ArrayDeque<SavedRenderState>(20); @Nonnull private final Color whiteColor = new Color("#ffff"); private boolean clipEnabled; @Nonnull private final Clip clip = new Clip(0, 0, 0, 0); private final Clip absoluteClip = new Clip(0, 0, 0, 0); @Nonnull private BlendMode blendMode = BlendMode.BLEND; @Nonnull private final NiftyImageManager imageManager; private boolean absoluteClipEnabled; /** * create the device. * * @param renderDeviceParam RenderDevice */ public NiftyRenderEngineImpl(@Nonnull final RenderDevice renderDeviceParam) { renderDevice = new ScalingRenderDevice(this, renderDeviceParam); displayWidth = renderDevice.getWidth(); displayHeight = renderDevice.getHeight(); nativeDisplayWidth = renderDevice.getWidth(); nativeDisplayHeight = renderDevice.getHeight(); imageManager = new NiftyImageManager(renderDeviceParam); absoluteClip.x0 = 0; absoluteClip.y0 = 0; absoluteClip.x1 = displayWidth; absoluteClip.y1 = displayHeight; } @Override public int getWidth() { return displayWidth; } @Override public int getHeight() { return displayHeight; } @Override public void beginFrame() { renderDevice.beginFrame(); colorChanged = false; } @Override public void endFrame() { renderDevice.endFrame(); } @Override public void clear() { renderDevice.clear(); } @Nullable @Override public NiftyImage createImage( @Nonnull final Screen screen, @Nonnull final String filename, final boolean filterLinear) { final RenderImage image = imageManager.registerImage(filename, filterLinear, screen); if (image == null) { return null; } return new NiftyImage(this, image); } @Override @Nullable public RenderFont createFont(@Nonnull final String filename) { if (fontCache.containsKey(filename)) { return fontCache.get(filename); } else { NiftyStopwatch.start(); RenderFont newFont = renderDevice.createFont(filename); fontCache.put(filename, newFont); NiftyStopwatch.stop("RenderDevice.createFont(" + filename + ")"); return newFont; } } @Nonnull @Override public String getFontname(@Nonnull final RenderFont font) { for (Map.Entry<String, RenderFont> entry : fontCache.entrySet()) { if (entry.getValue().equals(font)) { return entry.getKey(); } } throw new IllegalArgumentException("Font not found in storage of this render engine. Maybe it was load by another" + " engine?"); } @Override public void renderQuad(final int x, final int y, final int width, final int height) { renderDevice.renderQuad(x + getX(), y + getY(), width, height, color); } @Override public void renderQuad( final int x, final int y, final int width, final int height, @Nonnull final Color topLeft, @Nonnull final Color topRight, @Nonnull final Color bottomRight, @Nonnull final Color bottomLeft) { if (isColorAlphaChanged()) { Color a = new Color(topLeft, color.getAlpha()); Color b = new Color(topRight, color.getAlpha()); Color c = new Color(bottomRight, color.getAlpha()); Color d = new Color(bottomLeft, color.getAlpha()); renderDevice.renderQuad(x + getX(), y + getY(), width, height, a, b, c, d); } else { renderDevice.renderQuad(x + getX(), y + getY(), width, height, topLeft, topRight, bottomRight, bottomLeft); } } /** * renderImage. * * @param image image * @param x x * @param y y * @param width width * @param height height */ @Override public void renderImage( @Nonnull final NiftyImage image, final int x, final int y, final int width, final int height) { Color c = image.getColor(); if (c == null) { float alpha = color.getAlpha(); whiteColor.setAlpha(alpha); c = whiteColor; } image.render(x + getX(), y + getY(), width, height, c, imageScale); } /** * renderText. * * @param text text * @param x x * @param y y * @param selectionStart selection start * @param selectionEnd selection end * @param textSelectionColor textSelectionColor */ @Override public void renderText( @Nonnull final String text, final int x, final int y, final int selectionStart, final int selectionEnd, @Nonnull final Color textSelectionColor) { if (isSelection(selectionStart, selectionEnd)) { renderSelectionText( text, x + getX(), y + getY(), color, textSelectionColor, textScale, textScale, selectionStart, selectionEnd); } else { if (font == null) { log.warning("missing font in renderText! could it be that you're using <text> elements without a font or " + "style attribute? in case you've replaced <label> with <text> you're probably missing style='nifty-label'" + " :)"); return; } renderDevice.renderFont(font, text, x + getX(), y + getY(), color, textScale, textScale); } } /** * Render a Text with some text selected. * * @param text text * @param x x * @param y y * @param textColor color * @param textSelectionColor textSelectionColor * @param textSizeX text size * @param textSizeY text size * @param selectionStartParam selection start * @param selectionEndParam selection end */ protected void renderSelectionText( @Nonnull final String text, final int x, final int y, @Nonnull final Color textColor, @Nonnull final Color textSelectionColor, final float textSizeX, final float textSizeY, final int selectionStartParam, final int selectionEndParam) { int selectionStart = selectionStartParam; int selectionEnd = selectionEndParam; if (selectionStart < 0) { selectionStart = 0; } if (selectionEnd < 0) { selectionEnd = 0; } if (font == null) { return; } if (isEverythingSelected(text, selectionStart, selectionEnd)) { renderDevice.renderFont(font, text, x, y, textSelectionColor, textSizeX, textSizeY); } else if (isSelectionAtBeginning(selectionStart)) { String selectedString = text.substring(selectionStart, selectionEnd); String unselectedString = text.substring(selectionEnd); renderDevice.renderFont(font, selectedString, x, y, textSelectionColor, textSizeX, textSizeY); renderDevice.renderFont(font, unselectedString, x + font.getWidth(selectedString), y, textColor, textSizeX, textSizeY); } else if (isSelectionAtEnd(text, selectionEnd)) { String unselectedString = text.substring(0, selectionStart); String selectedString = text.substring(selectionStart, selectionEnd); renderDevice.renderFont(font, unselectedString, x, y, textColor, textSizeX, textSizeY); renderDevice.renderFont(font, selectedString, x + font.getWidth(unselectedString), y, textSelectionColor, textSizeX, textSizeY); } else { String unselectedString1 = text.substring(0, selectionStart); String selectedString = text.substring(selectionStart, selectionEnd); String unselectedString2 = text.substring(selectionEnd, text.length()); renderDevice.renderFont(font, unselectedString1, x, y, textColor, textSizeX, textSizeY); int unselectedString1Len = font.getWidth(unselectedString1); renderDevice.renderFont(font, selectedString, x + unselectedString1Len, y, textSelectionColor, textSizeX, textSizeY); int selectedStringLen = font.getWidth(selectedString); renderDevice.renderFont(font, unselectedString2, x + unselectedString1Len + selectedStringLen, y, textColor, textSizeX, textSizeY); } } /** * Returns true of selection is at the end of the string. * * @param text text * @param selectionEnd selection end * @return true or false */ private boolean isSelectionAtEnd(@Nonnull final String text, final int selectionEnd) { return selectionEnd == text.length(); } /** * Returns true if selection starts at the beginning. * * @param selectionStart selection start * @return true or false */ private boolean isSelectionAtBeginning(final int selectionStart) { return selectionStart == 0; } /** * Returns true when everything is selected. * * @param text text * @param selectionStart selection start * @param selectionEnd selection end * @return true when everything is selected */ private boolean isEverythingSelected(@Nonnull final String text, final int selectionStart, final int selectionEnd) { return isSelectionAtBeginning(selectionStart) && isSelectionAtEnd(text, selectionEnd); } /** * set font. * * @param newFont font */ @Override public void setFont(@Nullable final RenderFont newFont) { this.font = newFont; } /** * get font. * * @return font */ @Nullable @Override public RenderFont getFont() { return this.font; } @Override public void setColor(@Nonnull final Color colorParam) { color.setRed(colorParam.getRed()); color.setGreen(colorParam.getGreen()); color.setBlue(colorParam.getBlue()); color.setAlpha(colorParam.getAlpha()); colorChanged = true; colorAlphaChanged = true; } /** * set only the color alpha. * * @param newColorAlpha new alpha value */ @Override public void setColorAlpha(final float newColorAlpha) { color.setAlpha(newColorAlpha); colorAlphaChanged = true; } /** * Set only the color component of the given color. This assumes that alpha has already been changed. * * @param newColor color */ @Override public void setColorIgnoreAlpha(@Nonnull final Color newColor) { color.setRed(newColor.getRed()); color.setGreen(newColor.getGreen()); color.setBlue(newColor.getBlue()); colorChanged = true; if (colorAlphaChanged && color.getAlpha() > newColor.getAlpha()) { color.setAlpha(newColor.getAlpha()); colorAlphaChanged = true; } } /** * return true when color has been changed. * * @return color changed */ @Override public boolean isColorChanged() { return colorChanged; } @Override public boolean isColorAlphaChanged() { return colorAlphaChanged; } @Override public void moveTo(final float xParam, final float yParam) { this.currentX = xParam; this.currentY = yParam; } @Override public void moveToRelative(final float xParam, final float yParam) { currentX = currentX + xParam; currentY = currentY + yParam; } @Override public void setAbsoluteClip(int x0, int y0, int x1, int y1) { absoluteClipEnabled = true; absoluteClip.x0 = x0; absoluteClip.y0 = y0; absoluteClip.x1 = x1; absoluteClip.y1 = y1; } @Override public void applyAbsoluteClip() { if (absoluteClipEnabled) { updateClip(true, absoluteClip.x0, absoluteClip.y0, absoluteClip.x1, absoluteClip.y1); } } @Override public void disableAbsoluteClip() { absoluteClipEnabled = false; } @Override public void enableClip(final int cx0, final int cy0, final int cx1, final int cy1) { // Issue #138: // In case there already is a clipping area we change it to the intersection of the existing with the requested new // one. This way you can further restrict a clipping rectangle but can't override (e.g. make it bigger). int x0 = cx0 + getX(); int y0 = cy0 + getY(); int x1 = cx1 + getX(); int y1 = cy1 + getY(); if (clipEnabled) { // if the new clip rectangle is completely outside the current clipping area we don't modify the existing one if (isOutsideClippingRectangle(x0, y0, x1, y1)) { return; } // if the new clip rectangle is completely inside the current clipping area we can use it as-is directly if (isInsideClippingRectangle(x0, y0, x1, y1)) { updateClip(true, x0, y0, x1, y1); return; } // we need to clip int newX0 = Math.max(x0, clip.x0); int newY0 = Math.max(y0, clip.y0); int newX1 = Math.min(x1, clip.x1); int newY1 = Math.min(y1, clip.y1); updateClip(true, newX0, newY0, newX1, newY1); return; } updateClip(true, x0, y0, x1, y1); } @Override public void disableClip() { updateClip(false, 0, 0, 0, 0); } void updateClip(final boolean enabled, final int x0, final int y0, final int x1, final int y1) { clipEnabled = enabled; clip.init(x0, y0, x1, y1); if (!clipEnabled) { renderDevice.disableClip(); } else { clip.apply(); } } @Override public void setRenderTextSize(final float size) { this.textScale = size; } @Override public void setImageScale(final float scale) { this.imageScale = scale; } @Override public void setGlobalPosition(final float xPos, final float yPos) { globalPosX = xPos; globalPosY = yPos; } @Override public void displayResolutionChanged() { if (!autoScaling) { displayWidth = renderDevice.getWidth(); displayHeight = renderDevice.getHeight(); } nativeDisplayWidth = renderDevice.getWidth(); nativeDisplayHeight = renderDevice.getHeight(); } /** * get x. * * @return x */ private int getX() { return (int) (globalPosX + currentX); } /** * get y. * * @return y */ private int getY() { return (int) (globalPosY + currentY); } /** * has selection. * * @param selectionStart selection start * @param selectionEnd selection end * @return true or false */ private boolean isSelection(final int selectionStart, final int selectionEnd) { return !(selectionStart == -1 && selectionEnd == -1); } @Override public void saveStates() { SavedRenderState savedRenderState = new SavedRenderState(); savedRenderState.save(); stack.push(savedRenderState); } @Override public void restoreStates() { SavedRenderState restored = stack.pop(); restored.restore(); } @Override public void setBlendMode(@Nonnull final BlendMode blendModeParam) { blendMode = blendModeParam; renderDevice.setBlendMode(blendModeParam); } @Override @Nonnull public RenderDevice getRenderDevice() { return renderDevice; } @Override public void disposeImage(@Nonnull final RenderImage image) { imageManager.unregisterImage(image); } @Override @Nonnull public RenderImage reload(@Nonnull final RenderImage image) { return imageManager.reload(image); } private class SavedRenderState { private float x; private float y; private float colorR; private float colorG; private float colorB; private boolean colorChanged; private float colorAlpha; private boolean colorAlphaChanged; @Nullable private RenderFont font; private float textSize; private float imageScale; private boolean clipEnabled; @Nonnull private final Clip clip = new Clip(0, 0, 0, 0); private BlendMode blendMode; public SavedRenderState() { } public void save() { savePosition(); saveColor(); saveColorAlpha(); saveTextSize(); saveImageSize(); saveFont(); saveClipEnabled(); saveBlendMode(); } public void restore() { restorePosition(); restoreColor(); restoreAlpha(); restoreFont(); restoreTextSize(); restoreImageScale(); restoreClip(); restoreBlend(); } private void saveBlendMode() { blendMode = NiftyRenderEngineImpl.this.blendMode; } private void saveClipEnabled() { clipEnabled = NiftyRenderEngineImpl.this.clipEnabled; clip.init(NiftyRenderEngineImpl.this.clip.x0, NiftyRenderEngineImpl.this.clip.y0, NiftyRenderEngineImpl.this.clip.x1, NiftyRenderEngineImpl.this.clip.y1); } private void saveFont() { font = NiftyRenderEngineImpl.this.font; } private void saveImageSize() { imageScale = NiftyRenderEngineImpl.this.imageScale; } private void saveTextSize() { textSize = NiftyRenderEngineImpl.this.textScale; } private void saveColorAlpha() { colorAlpha = NiftyRenderEngineImpl.this.color.getAlpha(); colorAlphaChanged = NiftyRenderEngineImpl.this.colorAlphaChanged; } private void saveColor() { colorR = NiftyRenderEngineImpl.this.color.getRed(); colorG = NiftyRenderEngineImpl.this.color.getGreen(); colorB = NiftyRenderEngineImpl.this.color.getBlue(); colorChanged = NiftyRenderEngineImpl.this.colorChanged; } private void savePosition() { x = NiftyRenderEngineImpl.this.currentX; y = NiftyRenderEngineImpl.this.currentY; } private void restoreBlend() { NiftyRenderEngineImpl.this.setBlendMode(blendMode); } private void restoreClip() { NiftyRenderEngineImpl.this.updateClip(clipEnabled, clip.x0, clip.y0, clip.x1, clip.y1); } private void restoreImageScale() { NiftyRenderEngineImpl.this.imageScale = this.imageScale; } private void restoreTextSize() { NiftyRenderEngineImpl.this.textScale = this.textSize; } private void restoreFont() { NiftyRenderEngineImpl.this.font = font; } private void restoreAlpha() { NiftyRenderEngineImpl.this.color.setAlpha(colorAlpha); NiftyRenderEngineImpl.this.colorAlphaChanged = colorAlphaChanged; } private void restoreColor() { NiftyRenderEngineImpl.this.color.setRed(colorR); NiftyRenderEngineImpl.this.color.setGreen(colorG); NiftyRenderEngineImpl.this.color.setBlue(colorB); NiftyRenderEngineImpl.this.colorChanged = colorChanged; } private void restorePosition() { NiftyRenderEngineImpl.this.currentX = this.x; NiftyRenderEngineImpl.this.currentY = this.y; } } public class Clip { private int x0; private int y0; private int x1; private int y1; public Clip(final int x0, final int y0, final int x1, final int y1) { init(x0, y0, x1, y1); } public void init(final int x0, final int y0, final int x1, final int y1) { this.x0 = x0; this.y0 = y0; this.x1 = x1; this.y1 = y1; } public void apply() { renderDevice.enableClip(x0, y0, x1, y1); } } @Override public int getNativeWidth() { return nativeDisplayWidth; } @Override public int getNativeHeight() { return nativeDisplayHeight; } @Override public int convertToNativeX(final int x) { return (int) Math.floor(x * getScaleX() + autoScalingOffsetX); } @Override public int convertToNativeY(final int y) { return (int) Math.floor(y * getScaleY() + autoScalingOffsetY); } @Override public int convertToNativeWidth(final int x) { return (int) Math.ceil(x * getScaleX()); } @Override public int convertToNativeHeight(final int y) { return (int) Math.ceil(y * getScaleY()); } @Override public int convertFromNativeX(final int x) { return (int) Math.ceil((x - autoScalingOffsetX) * (1.0f / getScaleX())); } @Override public int convertFromNativeY(final int y) { return (int) Math.ceil((y - autoScalingOffsetY) * (1.0f / getScaleY())); } @Override public float convertToNativeTextSizeX(final float size) { return size * getScaleX(); } @Override public float convertToNativeTextSizeY(final float size) { return size * getScaleY(); } private float getScaleX() { if (autoScalingScaleX != null) { return autoScalingScaleX; } return (float) getNativeWidth() / getWidth(); } private float getScaleY() { if (autoScalingScaleY != null) { return autoScalingScaleY; } return (float) getNativeHeight() / getHeight(); } @Override public void enableAutoScaling(final int baseResolutionX, final int baseResolutionY) { autoScaling = true; displayWidth = baseResolutionX; displayHeight = baseResolutionY; autoScalingScaleX = null; autoScalingScaleY = null; autoScalingOffsetX = 0; autoScalingOffsetY = 0; } @Override public void enableAutoScaling( final int baseResolutionX, final int baseResolutionY, final float scaleX, final float scaleY) { autoScaling = true; displayWidth = baseResolutionX; displayHeight = baseResolutionY; autoScalingScaleX = ((float) getNativeWidth() / getWidth()) * scaleX; autoScalingScaleY = ((float) getNativeHeight() / getHeight()) * scaleY; autoScalingOffsetX = getNativeWidth() / 2 - getNativeWidth() / 2 * scaleX; autoScalingOffsetY = getNativeHeight() / 2 - getNativeHeight() / 2 * scaleY; } @Override public void disableAutoScaling() { autoScaling = false; displayWidth = nativeDisplayWidth; displayHeight = nativeDisplayHeight; autoScalingScaleX = null; autoScalingScaleY = null; autoScalingOffsetX = 0; autoScalingOffsetY = 0; } @Override public void screenStarted(@Nonnull final Screen screen) { imageManager.uploadScreenImages(screen); } @Override public void screenEnded(@Nonnull final Screen screen) { imageManager.unloadScreenImages(screen); } @Override public void screensClear(@Nonnull final Collection<Screen> screens) { for (Screen screen : screens) { imageManager.unloadScreenImages(screen); imageManager.screenRemoved(screen); } } @Override public void screenAdded(@Nonnull final Screen screen) { imageManager.screenAdded(screen); } @Override public void screenRemoved(@Nonnull final Screen screen) { imageManager.screenRemoved(screen); } private boolean isOutsideClippingRectangle(final int x0, final int y0, final int x1, final int y1) { if (x0 > clip.x1) { return true; } if (x1 < clip.x0) { return true; } if (y0 > clip.y1) { return true; } if (y1 < clip.y0) { return true; } return false; } private boolean isInsideClippingRectangle(final int x0, final int y0, final int x1, final int y1) { if (x0 >= clip.x0 && x0 <= clip.x1 && x1 >= clip.x0 && x1 <= clip.x1 && y0 >= clip.y0 && y0 <= clip.y1 && y1 >= clip.y0 && y1 <= clip.y1) { return true; } return false; } }
package com.newsblur.service; import android.app.Service; import android.content.ComponentCallbacks2; import android.content.ContentValues; import android.content.Context; import android.content.Intent; import android.database.Cursor; import android.os.IBinder; import android.os.PowerManager; import android.os.Process; import android.text.TextUtils; import android.util.Log; import android.widget.Toast; import com.newsblur.R; import com.newsblur.activity.NbActivity; import com.newsblur.database.BlurDatabaseHelper; import static com.newsblur.database.BlurDatabaseHelper.closeQuietly; import com.newsblur.database.DatabaseConstants; import com.newsblur.domain.Feed; import com.newsblur.domain.Folder; import com.newsblur.domain.SocialFeed; import com.newsblur.domain.Story; import com.newsblur.network.APIConstants; import com.newsblur.network.APIManager; import com.newsblur.network.domain.FeedFolderResponse; import com.newsblur.network.domain.NewsBlurResponse; import com.newsblur.network.domain.StoriesResponse; import com.newsblur.network.domain.UnreadCountResponse; import com.newsblur.network.domain.UnreadStoryHashesResponse; import com.newsblur.util.AppConstants; import com.newsblur.util.FeedSet; import com.newsblur.util.FileCache; import com.newsblur.util.NetworkUtils; import com.newsblur.util.PrefsUtils; import com.newsblur.util.ReadingAction; import com.newsblur.util.ReadFilter; import com.newsblur.util.StateFilter; import com.newsblur.util.StoryOrder; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ExecutorService; import java.util.concurrent.TimeUnit; /** * A background service to handle synchronisation with the NB servers. * * It is the design goal of this service to handle all communication with the API. * Activities and fragments should enqueue actions in the DB or use the methods * provided herein to request an action and let the service handle things. * * Per the contract of the Service class, at most one instance shall be created. It * will be preserved and re-used where possible. Additionally, regularly scheduled * invocations are requested via the Main activity and BootReceiver. * * The service will notify all running activities of an update before, during, and * after sync operations are performed. Activities can then refresh views and * query this class to see if progress indicators should be active. */ public class NBSyncService extends Service { /** * Mode switch for which newly received stories are suitable for display so * that they don't disrupt actively visible pager and list offsets. */ public enum ActivationMode { ALL, OLDER, NEWER }; private static final Object WAKELOCK_MUTEX = new Object(); private static final Object PENDING_FEED_MUTEX = new Object(); private volatile static boolean ActionsRunning = false; private volatile static boolean FFSyncRunning = false; private volatile static boolean StorySyncRunning = false; private volatile static boolean HousekeepingRunning = false; private volatile static boolean RecountsRunning = false; private volatile static boolean DoFeedsFolders = false; private volatile static boolean DoUnreads = false; private volatile static boolean HaltNow = false; private volatile static ActivationMode ActMode = ActivationMode.ALL; private volatile static long ModeCutoff = 0L; /** Informational flag only, as to whether we were offline last time we cycled. */ public volatile static boolean OfflineNow = false; public volatile static Boolean isPremium = null; public volatile static Boolean isStaff = null; private volatile static boolean isMemoryLow = false; private static long lastFeedCount = 0L; private static long lastFFConnMillis = 0L; private static long lastFFReadMillis = 0L; private static long lastFFParseMillis = 0L; private static long lastFFWriteMillis = 0L; /** Feed set that we need to sync immediately for the UI. */ private static FeedSet PendingFeed; private static Integer PendingFeedTarget = 0; /** Feed sets that the API has said to have no more pages left. */ private static Set<FeedSet> ExhaustedFeeds; static { ExhaustedFeeds = new HashSet<FeedSet>(); } /** The number of pages we have collected for the given feed set. */ private static Map<FeedSet,Integer> FeedPagesSeen; static { FeedPagesSeen = new HashMap<FeedSet,Integer>(); } /** The number of stories we have collected for the given feed set. */ private static Map<FeedSet,Integer> FeedStoriesSeen; static { FeedStoriesSeen = new HashMap<FeedSet,Integer>(); } /** Actions that may need to be double-checked locally due to overlapping API calls. */ private static List<ReadingAction> FollowupActions; static { FollowupActions = new ArrayList<ReadingAction>(); } /** Feed IDs (API stype) that have been acted upon and need a double-check for counts. */ private static Set<FeedSet> RecountCandidates; static { RecountCandidates = new HashSet<FeedSet>(); } private volatile static boolean FlushRecounts = false; Set<String> orphanFeedIds; private ExecutorService primaryExecutor; CleanupService cleanupService; OriginalTextService originalTextService; UnreadsService unreadsService; ImagePrefetchService imagePrefetchService; PowerManager.WakeLock wl = null; APIManager apiManager; BlurDatabaseHelper dbHelper; private int lastStartIdCompleted = -1; /** The time of the last hard API failure we encountered. Used to implement back-off so that the sync service doesn't spin in the background chewing up battery when the API is unavailable. */ private static long lastAPIFailure = 0; @Override public void onCreate() { super.onCreate(); if (AppConstants.VERBOSE_LOG) Log.d(this.getClass().getName(), "onCreate"); HaltNow = false; PowerManager pm = (PowerManager) getApplicationContext().getSystemService(POWER_SERVICE); wl = pm.newWakeLock(PowerManager.PARTIAL_WAKE_LOCK, this.getClass().getSimpleName()); wl.setReferenceCounted(true); primaryExecutor = Executors.newFixedThreadPool(1); } /** * Services can be constructed synchrnously by the Main thread, so don't do expensive * parts of construction in onCreate, but save them for when we are in our own thread. */ private void finishConstruction() { if (apiManager == null) { apiManager = new APIManager(this); dbHelper = new BlurDatabaseHelper(this); cleanupService = new CleanupService(this); originalTextService = new OriginalTextService(this); unreadsService = new UnreadsService(this); imagePrefetchService = new ImagePrefetchService(this); } } /** * Called serially, once per "start" of the service. This serves as a wakeup call * that the service should check for outstanding work. */ @Override public int onStartCommand(Intent intent, int flags, final int startId) { // only perform a sync if the app is actually running or background syncs are enabled if (PrefsUtils.isOfflineEnabled(this) || (NbActivity.getActiveActivityCount() > 0)) { // Services actually get invoked on the main system thread, and are not // allowed to do tangible work. We spawn a thread to do so. Runnable r = new Runnable() { public void run() { doSync(startId); } }; primaryExecutor.execute(r); } else { Log.d(this.getClass().getName(), "Skipping sync: app not active and background sync not enabled."); stopSelf(startId); } // indicate to the system that the service should be alive when started, but // needn't necessarily persist under memory pressure return Service.START_NOT_STICKY; } /** * Do the actual work of syncing. */ private synchronized void doSync(final int startId) { try { if (HaltNow) return; incrementRunningChild(); finishConstruction(); if (AppConstants.VERBOSE_LOG) Log.d(this.getClass().getName(), "starting primary sync"); if (NbActivity.getActiveActivityCount() < 1) { // if the UI isn't running, politely run at background priority Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND); } else { // if the UI is running, run just one step below normal priority so we don't step on async tasks that are updating the UI Process.setThreadPriority(Process.THREAD_PRIORITY_DEFAULT + Process.THREAD_PRIORITY_LESS_FAVORABLE); } Thread.currentThread().setName(this.getClass().getName()); if (OfflineNow) { OfflineNow = false; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); } // do this even if background syncs aren't enabled, because it absolutely must happen // on all devices housekeeping(); // check to see if we are on an allowable network only after ensuring we have CPU if (!(PrefsUtils.isBackgroundNetworkAllowed(this) || (NbActivity.getActiveActivityCount() > 0))) { Log.d(this.getClass().getName(), "Abandoning sync: app not active and network type not appropriate for background sync."); return; } // ping activities to indicate that housekeeping is done, and the DB is safe to use NbActivity.updateAllActivities(NbActivity.UPDATE_DB_READY); originalTextService.start(startId); // first: catch up syncActions(); // these requests are expressly enqueued by the UI/user, do them next syncPendingFeedStories(); syncMetadata(startId); checkRecounts(); unreadsService.start(startId); imagePrefetchService.start(startId); finishActions(); if (AppConstants.VERBOSE_LOG) Log.d(this.getClass().getName(), "finishing primary sync"); } catch (Exception e) { Log.e(this.getClass().getName(), "Sync error.", e); } finally { decrementRunningChild(startId); } } /** * Check for upgrades and wipe the DB if necessary, and do DB maintenance */ private void housekeeping() { try { boolean upgraded = PrefsUtils.checkForUpgrade(this); if (upgraded) { HousekeepingRunning = true; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS | NbActivity.UPDATE_REBUILD); // wipe the local DB dbHelper.dropAndRecreateTables(); NbActivity.updateAllActivities(NbActivity.UPDATE_METADATA); // in case this is the first time we have run since moving the cache to the new location, // blow away the old version entirely. This line can be removed some time well after // v61+ is widely deployed FileCache.cleanUpOldCache(this); PrefsUtils.updateVersion(this); } boolean autoVac = PrefsUtils.isTimeToVacuum(this); // this will lock up the DB for a few seconds, only do it if the UI is hidden if (NbActivity.getActiveActivityCount() > 0) autoVac = false; if (upgraded || autoVac) { HousekeepingRunning = true; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); Log.i(this.getClass().getName(), "rebuilding DB . . ."); dbHelper.vacuum(); Log.i(this.getClass().getName(), ". . . . done rebuilding DB"); PrefsUtils.updateLastVacuumTime(this); } } finally { if (HousekeepingRunning) { HousekeepingRunning = false; NbActivity.updateAllActivities(NbActivity.UPDATE_METADATA); } } } /** * Perform any reading actions the user has done before we do anything else. */ private void syncActions() { if (stopSync()) return; if (backoffBackgroundCalls()) return; Cursor c = null; try { c = dbHelper.getActions(false); if (c.getCount() < 1) return; ActionsRunning = true; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); actionsloop : while (c.moveToNext()) { String id = c.getString(c.getColumnIndexOrThrow(DatabaseConstants.ACTION_ID)); ReadingAction ra; try { ra = ReadingAction.fromCursor(c); } catch (IllegalArgumentException e) { Log.e(this.getClass().getName(), "error unfreezing ReadingAction", e); dbHelper.clearAction(id); continue actionsloop; } NewsBlurResponse response = ra.doRemote(apiManager); if (response == null) { Log.e(this.getClass().getName(), "Discarding reading action with client-side error."); dbHelper.clearAction(id); } else if (response.isProtocolError) { // the network failed or we got a non-200, so be sure we retry Log.i(this.getClass().getName(), "Holding reading action with server-side or network error."); noteHardAPIFailure(); continue actionsloop; } else if (response.isError()) { Log.e(this.getClass().getName(), "Discarding reading action with user error."); dbHelper.clearAction(id); String message = response.getErrorMessage(null); if (message != null) NbActivity.toastError(message); } else { // success! dbHelper.clearAction(id); FollowupActions.add(ra); } } } finally { closeQuietly(c); ActionsRunning = false; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); } } /** * Some actions have a final, local step after being done remotely to ensure in-flight * API actions didn't race-overwrite them. Do these, and then clean up the DB. */ private void finishActions() { if (HaltNow) return; if (FollowupActions.size() < 1) return; for (ReadingAction ra : FollowupActions) { ra.doLocal(dbHelper); } if (PendingFeed == null) { FollowupActions.clear(); } } /** * The very first step of a sync - get the feed/folder list, unread counts, and * unread hashes. Doing this resets pagination on the server! */ private void syncMetadata(int startId) { if (DoFeedsFolders || PrefsUtils.isTimeToAutoSync(this)) { PrefsUtils.updateLastSyncTime(this); DoFeedsFolders = false; } else { return; } if (stopSync()) return; if (backoffBackgroundCalls()) return; if (ActMode != ActivationMode.ALL) return; FFSyncRunning = true; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); // there is an issue with feeds that have no folder or folders that list feeds that do not exist. capture them for workarounds. Set<String> debugFeedIdsFromFolders = new HashSet<String>(); Set<String> debugFeedIdsFromFeeds = new HashSet<String>(); orphanFeedIds = new HashSet<String>(); try { // a metadata sync invalidates pagination and feed status ExhaustedFeeds.clear(); FeedPagesSeen.clear(); FeedStoriesSeen.clear(); UnreadsService.clear(); RecountCandidates.clear(); FeedFolderResponse feedResponse = apiManager.getFolderFeedMapping(true); if (feedResponse == null) { noteHardAPIFailure(); return; } // if the response says we aren't logged in, clear the DB and prompt for login. We test this // here, since this the first sync call we make on launch if we believe we are cookied. if (! feedResponse.isAuthenticated) { PrefsUtils.logout(this); return; } lastFFConnMillis = feedResponse.connTime; lastFFReadMillis = feedResponse.readTime; lastFFParseMillis = feedResponse.parseTime; long startTime = System.currentTimeMillis(); isPremium = feedResponse.isPremium; isStaff = feedResponse.isStaff; // clean out the feed / folder tables dbHelper.cleanupFeedsFolders(); // note all feeds that belong to some folder so we can find orphans for (Folder folder : feedResponse.folders) { debugFeedIdsFromFolders.addAll(folder.feedIds); } // data for the feeds table List<ContentValues> feedValues = new ArrayList<ContentValues>(); feedaddloop: for (Feed feed : feedResponse.feeds) { // note all feeds for which the API returned data debugFeedIdsFromFeeds.add(feed.feedId); // sanity-check that the returned feeds actually exist in a folder or at the root // if they do not, they should neither display nor count towards unread numbers if (! debugFeedIdsFromFolders.contains(feed.feedId)) { Log.w(this.getClass().getName(), "Found and ignoring orphan feed (in feeds but not folders): " + feed.feedId ); orphanFeedIds.add(feed.feedId); continue feedaddloop; } if (! feed.active) { // the feed is disabled/hidden, pretend it doesn't exist continue feedaddloop; } feedValues.add(feed.getValues()); } // prune out missiong feed IDs from folders for (String id : debugFeedIdsFromFolders) { if (! debugFeedIdsFromFeeds.contains(id)) { Log.w(this.getClass().getName(), "Found and ignoring orphan feed (in folders but not feeds): " + id ); orphanFeedIds.add(id); } } // data for the folder table List<ContentValues> folderValues = new ArrayList<ContentValues>(); for (Folder folder : feedResponse.folders) { // prune out orphans before pushing to the DB folder.removeOrphanFeedIds(orphanFeedIds); folderValues.add(folder.getValues()); } // data for the the social feeds table List<ContentValues> socialFeedValues = new ArrayList<ContentValues>(); for (SocialFeed feed : feedResponse.socialFeeds) { socialFeedValues.add(feed.getValues()); } dbHelper.insertFeedsFolders(folderValues, feedValues, socialFeedValues); // populate the starred stories count table dbHelper.updateStarredStoriesCount(feedResponse.starredCount); lastFFWriteMillis = System.currentTimeMillis() - startTime; lastFeedCount = feedValues.size(); cleanupService.start(startId); unreadsService.start(startId); UnreadsService.doMetadata(); } finally { FFSyncRunning = false; NbActivity.updateAllActivities(NbActivity.UPDATE_METADATA); } } /** * See if any feeds have been touched in a way that require us to double-check unread counts; */ private void checkRecounts() { if (!FlushRecounts) return; try { if (RecountCandidates.size() < 1) return; RecountsRunning = true; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); // of all candidate feeds that were touched, now check to see if // any of them have mismatched local and remote counts we need to reconcile Set<FeedSet> dirtySets = new HashSet<FeedSet>(); for (FeedSet fs : RecountCandidates) { if (dbHelper.getUnreadCount(fs, StateFilter.SOME) != dbHelper.getLocalUnreadCount(fs, StateFilter.SOME)) { dirtySets.add(fs); } } if (dirtySets.size() < 1) { RecountCandidates.clear(); return; } // if we are offline, the best we can do is perform a local unread recount and // save the true one for when we go back online. if (!NetworkUtils.isOnline(this)) { for (FeedSet fs : RecountCandidates) { dbHelper.updateLocalFeedCounts(fs); } } else { if (stopSync()) return; Set<String> apiIds = new HashSet<String>(); for (FeedSet fs : RecountCandidates) { apiIds.addAll(fs.getFlatFeedIds()); } // if any reading activities are pending, it makes no sense to recount yet if (dbHelper.getActions(false).getCount() > 0) return; UnreadCountResponse apiResponse = apiManager.getFeedUnreadCounts(apiIds); if ((apiResponse == null) || (apiResponse.isError())) { Log.w(this.getClass().getName(), "Bad response to feed_unread_count"); return; } if (apiResponse.feeds != null ) { for (Map.Entry<String,UnreadCountResponse.UnreadMD> entry : apiResponse.feeds.entrySet()) { dbHelper.updateFeedCounts(entry.getKey(), entry.getValue().getValues()); } } if (apiResponse.socialFeeds != null ) { for (Map.Entry<String,UnreadCountResponse.UnreadMD> entry : apiResponse.socialFeeds.entrySet()) { String feedId = entry.getKey().replaceAll(APIConstants.VALUE_PREFIX_SOCIAL, ""); dbHelper.updateSocialFeedCounts(feedId, entry.getValue().getValues()); } } RecountCandidates.clear(); } } finally { if (RecountsRunning) { RecountsRunning = false; NbActivity.updateAllActivities(NbActivity.UPDATE_METADATA | NbActivity.UPDATE_STATUS); } FlushRecounts = false; } } /** * Fetch stories needed because the user is actively viewing a feed or folder. */ private void syncPendingFeedStories() { FeedSet fs = PendingFeed; boolean finished = false; if (fs == null) { return; } try { if (ExhaustedFeeds.contains(fs)) { Log.i(this.getClass().getName(), "No more stories for feed set: " + fs); finished = true; return; } if (!FeedPagesSeen.containsKey(fs)) { FeedPagesSeen.put(fs, 0); FeedStoriesSeen.put(fs, 0); workaroundReadStoryTimestamp = (new Date()).getTime(); } int pageNumber = FeedPagesSeen.get(fs); int totalStoriesSeen = FeedStoriesSeen.get(fs); StoryOrder order = PrefsUtils.getStoryOrder(this, fs); ReadFilter filter = PrefsUtils.getReadFilter(this, fs); while (totalStoriesSeen < PendingFeedTarget) { if (stopSync()) return; if (FlushRecounts) return; if (!fs.equals(PendingFeed)) { // the active view has changed if (fs == null) finished = true; return; } StorySyncRunning = true; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); pageNumber++; StoriesResponse apiResponse = apiManager.getStories(fs, pageNumber, order, filter); if (! isStoryResponseGood(apiResponse)) return; FeedPagesSeen.put(fs, pageNumber); totalStoriesSeen += apiResponse.stories.length; FeedStoriesSeen.put(fs, totalStoriesSeen); // lock in the activation cutoff based upon the timestamp of the first // story received for a given pagination session. it will be the newest // or oldest story for the feedset, as dictated by order. if ((pageNumber == 1) && (apiResponse.stories.length > 0)) { ModeCutoff = apiResponse.stories[0].timestamp; } insertStories(apiResponse, fs); NbActivity.updateAllActivities(NbActivity.UPDATE_STORY); if (apiResponse.stories.length == 0) { ExhaustedFeeds.add(fs); finished = true; return; } } finished = true; } finally { if (StorySyncRunning) { StorySyncRunning = false; NbActivity.updateAllActivities(NbActivity.UPDATE_STATUS); } synchronized (PENDING_FEED_MUTEX) { if (finished && fs.equals(PendingFeed)) PendingFeed = null; } } } private boolean isStoryResponseGood(StoriesResponse response) { if (response == null) { Log.e(this.getClass().getName(), "Null response received while loading stories."); return false; } if (response.stories == null) { Log.e(this.getClass().getName(), "Null stories member received while loading stories."); return false; } return true; } private long workaroundReadStoryTimestamp; private void insertStories(StoriesResponse apiResponse, FeedSet fs) { if (fs.isAllRead()) { // Ugly Hack Warning: the API doesn't vend the sortation key necessary to display // stories when in the "read stories" view. It does, however, return them in the // correct order, so we can fudge a fake last-read-stamp so they will show up. // Stories read locally with have the correct stamp and show up fine. When local // and remote stories are integrated, the remote hack will override the ordering // so they get put into the correct sequence recorded by the API (the authority). for (Story story : apiResponse.stories) { // this fake TS was set when we fetched the first page. have it decrease as // we page through, so they append to the list as if most-recent-first. workaroundReadStoryTimestamp --; story.lastReadTimestamp = workaroundReadStoryTimestamp; } } if (fs.isAllSaved() || fs.isAllRead()) { // Note: for reasons relating to the impl. of the web UI, the API returns incorrect // intel values for stories from these two APIs. Fix them so they don't show green // when they really aren't. for (Story story : apiResponse.stories) { story.intelligence.intelligenceFeed--; } } dbHelper.insertStories(apiResponse, ActMode, ModeCutoff); } void insertStories(StoriesResponse apiResponse) { dbHelper.insertStories(apiResponse, ActMode, ModeCutoff); } void incrementRunningChild() { synchronized (WAKELOCK_MUTEX) { wl.acquire(); } } void decrementRunningChild(int startId) { synchronized (WAKELOCK_MUTEX) { if (wl == null) return; if (wl.isHeld()) { wl.release(); } // our wakelock reference counts. only stop the service if it is in the background and if // we are the last thread to release the lock. if (!wl.isHeld()) { if (NbActivity.getActiveActivityCount() < 1) { stopSelf(startId); } lastStartIdCompleted = startId; } } } static boolean stopSync(Context context) { if (HaltNow) { if (AppConstants.VERBOSE_LOG) Log.d(NBSyncService.class.getName(), "stopping sync, soft interrupt set."); return true; } if (context == null) return false; if (!NetworkUtils.isOnline(context)) { OfflineNow = true; return true; } return false; } boolean stopSync() { return stopSync(this); } private void noteHardAPIFailure() { lastAPIFailure = System.currentTimeMillis(); } private boolean backoffBackgroundCalls() { if (NbActivity.getActiveActivityCount() > 0) return false; if (System.currentTimeMillis() > (lastAPIFailure + AppConstants.API_BACKGROUND_BACKOFF_MILLIS)) return false; Log.i(this.getClass().getName(), "abandoning background sync due to recent API failures."); return true; } public void onTrimMemory (int level) { if (level > ComponentCallbacks2.TRIM_MEMORY_UI_HIDDEN) { isMemoryLow = true; } // this is also called when the UI is hidden, so double check if we need to // stop if ( (lastStartIdCompleted != -1) && (NbActivity.getActiveActivityCount() < 1)) { stopSelf(lastStartIdCompleted); } } /** * Is the main feed/folder list sync running? */ public static boolean isFeedFolderSyncRunning() { return (HousekeepingRunning || ActionsRunning || RecountsRunning || FFSyncRunning || CleanupService.running() || UnreadsService.running() || StorySyncRunning || OriginalTextService.running() || ImagePrefetchService.running()); } public static boolean isFeedCountSyncRunning() { return (HousekeepingRunning || RecountsRunning || FFSyncRunning); } public static boolean isHousekeepingRunning() { return HousekeepingRunning; } /** * Is there a sync for a given FeedSet running? */ public static boolean isFeedSetSyncing(FeedSet fs, Context context) { return (fs.equals(PendingFeed) && (!stopSync(context))); } public static boolean isFeedSetStoriesFresh(FeedSet fs) { Integer count = FeedStoriesSeen.get(fs); if (count == null) return false; if (count < 1) return false; return true; } public static String getSyncStatusMessage(Context context, boolean brief) { if (OfflineNow) return context.getResources().getString(R.string.sync_status_offline); if (brief && !AppConstants.VERBOSE_LOG) return null; if (HousekeepingRunning) return context.getResources().getString(R.string.sync_status_housekeeping); if (ActionsRunning||RecountsRunning) return context.getResources().getString(R.string.sync_status_actions); if (FFSyncRunning) return context.getResources().getString(R.string.sync_status_ffsync); if (CleanupService.running()) return context.getResources().getString(R.string.sync_status_cleanup); if (StorySyncRunning) return context.getResources().getString(R.string.sync_status_stories); if (UnreadsService.running()) return String.format(context.getResources().getString(R.string.sync_status_unreads), UnreadsService.getPendingCount()); if (OriginalTextService.running()) return String.format(context.getResources().getString(R.string.sync_status_text), OriginalTextService.getPendingCount()); if (ImagePrefetchService.running()) return String.format(context.getResources().getString(R.string.sync_status_images), ImagePrefetchService.getPendingCount()); return null; } /** * Force a refresh of feed/folder data on the next sync, even if enough time * hasn't passed for an autosync. */ public static void forceFeedsFolders() { DoFeedsFolders = true; } public static void flushRecounts() { FlushRecounts = true; } /** * Tell the service which stories can be activated if received. See ActivationMode. */ public static void setActivationMode(ActivationMode actMode) { ActMode = actMode; } public static void setActivationMode(ActivationMode actMode, long modeCutoff) { ActMode = actMode; ModeCutoff = modeCutoff; } /** * Requests that the service fetch additional stories for the specified feed/folder. Returns * true if more will be fetched as a result of this request. * * @param desiredStoryCount the minimum number of stories to fetch. * @param totalSeen the number of stories the caller thinks they have seen for the FeedSet * or a negative number if the caller trusts us to track for them */ public static boolean requestMoreForFeed(FeedSet fs, int desiredStoryCount, int callerSeen) { if (ExhaustedFeeds.contains(fs)) { if (AppConstants.VERBOSE_LOG) Log.i(NBSyncService.class.getName(), "rejecting request for feedset that is exhaused"); return false; } synchronized (PENDING_FEED_MUTEX) { Integer alreadyPending = 0; if (fs.equals(PendingFeed)) alreadyPending = PendingFeedTarget; Integer alreadySeen = FeedStoriesSeen.get(fs); if (alreadySeen == null) alreadySeen = 0; if (callerSeen < alreadySeen) { // the caller is probably filtering and thinks they have fewer than we do, so // update our count to agree with them, and force-allow another requet alreadySeen = callerSeen; FeedStoriesSeen.put(fs, callerSeen); alreadyPending = 0; } if (AppConstants.VERBOSE_LOG) Log.d(NBSyncService.class.getName(), "have:" + alreadySeen + " want:" + desiredStoryCount + " pending:" + alreadyPending); if (desiredStoryCount <= alreadySeen) { return false; } if (desiredStoryCount <= alreadyPending) { return false; } PendingFeed = fs; PendingFeedTarget = desiredStoryCount; } return true; } public static void clearPendingStoryRequest() { synchronized (PENDING_FEED_MUTEX) { PendingFeed = null; } } public static void resetFeeds() { ExhaustedFeeds.clear(); FeedPagesSeen.clear(); FeedStoriesSeen.clear(); } public static void getOriginalText(String hash) { OriginalTextService.addHash(hash); } public static void addRecountCandidates(FeedSet fs) { if (fs != null) { RecountCandidates.add(fs); } } public static void addRecountCandidates(Set<FeedSet> fs) { RecountCandidates.addAll(fs); } public static void softInterrupt() { if (AppConstants.VERBOSE_LOG) Log.d(NBSyncService.class.getName(), "soft stop"); HaltNow = true; } /** * Resets any internal temp vars or queues. Called when switching accounts. */ public static void clearState() { clearPendingStoryRequest(); FollowupActions.clear(); RecountCandidates.clear(); resetFeeds(); OriginalTextService.clear(); UnreadsService.clear(); ImagePrefetchService.clear(); } public static void resumeFromInterrupt() { HaltNow = false; } @Override public void onDestroy() { try { if (AppConstants.VERBOSE_LOG) Log.d(this.getClass().getName(), "onDestroy - stopping execution"); HaltNow = true; if (cleanupService != null) cleanupService.shutdown(); if (unreadsService != null) unreadsService.shutdown(); if (originalTextService != null) originalTextService.shutdown(); if (imagePrefetchService != null) imagePrefetchService.shutdown(); if (primaryExecutor != null) { primaryExecutor.shutdown(); try { primaryExecutor.awaitTermination(AppConstants.SHUTDOWN_SLACK_SECONDS, TimeUnit.SECONDS); } catch (InterruptedException e) { primaryExecutor.shutdownNow(); Thread.currentThread().interrupt(); } } if (dbHelper != null) dbHelper.close(); if (AppConstants.VERBOSE_LOG) Log.d(this.getClass().getName(), "onDestroy - execution halted"); super.onDestroy(); } catch (Exception ex) { Log.e(this.getClass().getName(), "unclean shutdown", ex); } } @Override public IBinder onBind(Intent intent) { return null; } public static boolean isMemoryLow() { return isMemoryLow; } public static String getSpeedInfo() { StringBuilder s = new StringBuilder(); s.append(lastFeedCount).append(" feeds in "); s.append(" conn:").append(lastFFConnMillis); s.append(" read:").append(lastFFReadMillis); s.append(" parse:").append(lastFFParseMillis); s.append(" store:").append(lastFFWriteMillis); return s.toString(); } }