gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.wiki.render; import java.util.Properties; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import net.sf.ehcache.CacheManager; import org.apache.commons.lang.time.StopWatch; import org.apache.wiki.TestEngine; import org.apache.wiki.WikiContext; import org.apache.wiki.WikiPage; import org.apache.wiki.parser.MarkupParser; import org.apache.wiki.parser.WikiDocument; public class RenderingManagerTest extends TestCase { RenderingManager m_manager; TestEngine m_engine; protected void setUp() throws Exception { CacheManager.getInstance().removeAllCaches(); Properties props = TestEngine.getTestProperties(); m_engine = new TestEngine( props ); m_manager = m_engine.getRenderingManager(); } protected void tearDown() throws Exception { m_engine.deletePage( "TestPage" ); } /** * Tests the relative speed of the DOM cache with respect to * page being parsed every single time. * @throws Exception */ public void testCache() throws Exception { m_engine.saveText( "TestPage", TEST_TEXT ); StopWatch sw = new StopWatch(); // System.out.println("DOM cache speed test:"); sw.start(); for( int i = 0; i < 300; i++ ) { WikiPage page = m_engine.getPage( "TestPage" ); String pagedata = m_engine.getPureText( page ); WikiContext context = new WikiContext( m_engine, page ); MarkupParser p = m_manager.getParser( context, pagedata ); WikiDocument d = p.parse(); String html = m_manager.getHTML( context, d ); assertNotNull( "noncached got null response",html); } sw.stop(); // System.out.println(" Nocache took "+sw); long nocachetime = sw.getTime(); sw.reset(); sw.start(); for( int i = 0; i < 300; i++ ) { WikiPage page = m_engine.getPage( "TestPage" ); String pagedata = m_engine.getPureText( page ); WikiContext context = new WikiContext( m_engine, page ); String html = m_manager.getHTML( context, pagedata ); assertNotNull("cached got null response",html); } sw.stop(); // System.out.println(" Cache took "+sw); long speedup = nocachetime / sw.getTime(); //System.out.println(" Approx speedup: "+speedup+"x"); } public static Test suite() { TestSuite suite = new TestSuite( RenderingManagerTest.class ); return suite; } private static final String TEST_TEXT = "Please ''check [RecentChanges].\n" + "\n" + "Testing. fewfwefe\n" + "\n" + "CHeck [testpage]\n" + "\n" + "More testing.\n" + "dsadsadsa''\n" + "Is this {{truetype}} or not?\n" + "What about {{{This}}}?\n" + "How about {{this?\n" + "\n" + "{{{\n" + "{{text}}\n" + "}}}\n" + "goo\n" + "\n" + "<b>Not bold</b>\n" + "\n" + "motto\n" + "\n" + "* This is a list which we\n" + "shall continue on a other line.\n" + "* There is a list item here.\n" + "* Another item.\n" + "* More stuff, which continues\n" + "on a second line. And on\n" + "a third line as well.\n" + "And a fourth line.\n" + "* Third item.\n" + "\n" + "Foobar.\n" + "\n" + "----\n" + "\n" + "!!!Really big heading\n" + "Text.\n" + "!! Just a normal heading [with a hyperlink|Main]\n" + "More text.\n" + "!Just a small heading.\n" + "\n" + "This should be __bold__ text.\n" + "\n" + "__more bold text continuing\n" + "on the next line.__\n" + "\n" + "__more bold text continuing\n" + "\n" + "on the next paragraph.__\n" + "\n" + "\n" + "This should be normal.\n" + "\n" + "Now, let's try ''italic text''.\n" + "\n" + "Bulleted lists:\n" + "* One\n" + "Or more.\n" + "* Two\n" + "\n" + "** Two.One\n" + "\n" + "*** Two.One.One\n" + "\n" + "* Three\n" + "\n" + "Numbered lists.\n" + "# One\n" + "# Two\n" + "# Three\n" + "## Three.One\n" + "## Three.Two\n" + "## Three.Three\n" + "### Three.Three.One\n" + "# Four\n" + "\n" + "End?\n" + "\n" + "No, let's {{break}} things.\\ {{{ {{{ {{text}} }}} }}}\n" + "\n" + "More breaking.\n" + "\n" + "{{{\n" + "code.}}\n" + "----\n" + "author: [Asser], [Ebu], [JanneJalkanen], [Jarmo|mailto:jarmo@regex.com.au]\n"; }
package io.swagger.api; import io.swagger.model.*; import com.wordnik.swagger.annotations.*; import com.sun.jersey.multipart.FormDataParam; //import io.swagger.model.Object; import java.util.List; import io.swagger.api.NotFoundException; import java.io.InputStream; import com.sun.jersey.core.header.FormDataContentDisposition; import com.sun.jersey.multipart.FormDataParam; import javax.ws.rs.core.Response; import javax.ws.rs.*; @Path("/apis") @Api(value = "/apis", description = "the apis API") public class ApisApi { @GET // Object @ApiOperation(value = "", notes = "Get a list of available APIs", response = Object.class, responseContainer = "List") @ApiResponses(value = { }) public Response apisGet(@ApiParam(value = "Size of array",required=true) @QueryParam("size") Double size) throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @POST // Void @ApiOperation(value = "", notes = "Create a new API", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisPost() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @GET @Path("/{apiId}") // Void @ApiOperation(value = "", notes = "Get details of an API", response = Void.class) @ApiResponses(value = { }) public Response apisApiIdGet() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @PUT @Path("/{apiId}") // Void @ApiOperation(value = "", notes = "Update an existing API", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisApiIdPut() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @DELETE @Path("/{apiId}") // Void @ApiOperation(value = "", notes = "Delete an existing API", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisApiIdDelete() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @POST @Path("/{apiId}/copy") // Void @ApiOperation(value = "", notes = "Create a new API by coping and existing", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisApiIdCopyPost() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @GET @Path("/{apiId}/documents") // Void @ApiOperation(value = "", notes = "Get a list of documents belonging to an API", response = Void.class) @ApiResponses(value = { }) public Response apisApiIdDocumentsGet() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @POST @Path("/{apiId}/documents") // Void @ApiOperation(value = "", notes = "Add a new document to an API", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisApiIdDocumentsPost() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @GET @Path("/{apiId}/documents/{documentId}") // Void @ApiOperation(value = "", notes = "Get the document details of an API", response = Void.class) @ApiResponses(value = { }) public Response apisApiIdDocumentsDocumentIdGet() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @PUT @Path("/{apiId}/documents/{documentId}") // Void @ApiOperation(value = "", notes = "Update document details", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisApiIdDocumentsDocumentIdPut() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @DELETE @Path("/{apiId}/documents/{documentId}") // Void @ApiOperation(value = "", notes = "Delete a document of an API", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisApiIdDocumentsDocumentIdDelete() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } @POST @Path("/{apiId}/lifecycle") // Void @ApiOperation(value = "", notes = "Change the lifecycle of an API", response = Void.class) @ApiResponses(value = { @ApiResponse(code = 400, message = "Invalid request or validation error") }) public Response apisApiIdLifecyclePost() throws NotFoundException { // do some magic! return Response.ok().entity(new ApiResponseMessage(ApiResponseMessage.OK, "magic!")).build(); } }
package tech.acodesigner.dao; import tech.acodesigner.dto.AboutDto; import tech.acodesigner.dto.ArticleDto; import tech.acodesigner.dto.ArticleLiteDto; import tech.acodesigner.dto.UserDto; import tech.acodesigner.po.ArticlePo; import tech.acodesigner.po.CategoryPo; import tech.acodesigner.util.DBUtil; import tech.acodesigner.util.DateUtil; import tech.acodesigner.util.PageUtil; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; /** * Created by 77239 on 2017/2/13/0013. */ public class ArticleDao { public static AboutDto getAbout() throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "select content from blog_article where categoryId = 1;"; PreparedStatement pstmt = conn.prepareStatement(sql); ResultSet rs = pstmt.executeQuery(); AboutDto about = null; if (rs.next()) { about = new AboutDto(); about.setContent(rs.getString("content")); } DBUtil.closeCon(conn); return about; } public static void updateAbout(String content) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "update blog_article set content=? where categoryId=1;"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setString(1, content); pstmt.executeUpdate(); DBUtil.closeCon(conn); } public static ArrayList<ArticleDto> search(String key) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); ArrayList<ArticleDto> articles = new ArrayList<ArticleDto>(); String sql = "select * from blog_article t1,blog_category t2,blog_user t3 where t1.categoryId=t2.categoryId and t1.userId = t3.userId " + "and title like '%" + key + "%' and t1.categoryId>1 " + "ORDER BY pubDate DESC"; PreparedStatement pstmt = conn.prepareStatement(sql); ResultSet rs = pstmt.executeQuery(); while (rs.next()) { ArticleDto article = new ArticleDto(); article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); article.setContent(rs.getString("content")); article.setPubDate(rs.getString("pubDate")); article.setImage(rs.getString("image")); article.setClicks(rs.getInt("clicks")); UserDto userDto = new UserDto(); userDto.setId(rs.getInt("userId")); userDto.setImage(rs.getString("image")); article.setUser(userDto); CategoryPo category = new CategoryPo(); category.setCategoryId(rs.getInt("categoryId")); category.setCategoryName(rs.getString("categoryName")); article.setCategory(category); articles.add(article); } DBUtil.closeCon(conn); return articles; } public static ArrayList<ArticleDto> pagination(PageUtil pageUtil) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); ArrayList<ArticleDto> articles = new ArrayList<ArticleDto>(); String sql = "select * from blog_article t1,blog_category t2,blog_user t3 " + "where t1.categoryId=t2.categoryId and t1.userId = t3.userId and t1.categoryId>1 ORDER BY pubDate DESC limit " + pageUtil.getStart() + "," + pageUtil.getPageSize(); PreparedStatement pstmt = conn.prepareStatement(sql); ResultSet rs = pstmt.executeQuery(); while (rs.next()) { ArticleDto article = new ArticleDto(); article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); article.setContent(rs.getString("content")); article.setPubDate(rs.getString("pubDate")); article.setImage(rs.getString("image")); article.setClicks(rs.getInt("clicks")); UserDto userDto = new UserDto(); userDto.setId(rs.getInt("userId")); userDto.setUsername(rs.getString("username")); userDto.setImage(rs.getString("image")); article.setUser(userDto); CategoryPo category = new CategoryPo(); category.setCategoryId(rs.getInt("categoryId")); category.setCategoryName(rs.getString("categoryName")); article.setCategory(category); articles.add(article); } DBUtil.closeCon(conn); return articles; } public static ArticleLiteDto getPreArticle(int id) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "SELECT * FROM blog_article WHERE articleId = " + "(SELECT articleId FROM blog_article WHERE articleId < ? and categoryId > 1 ORDER BY articleId DESC LIMIT 1);"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1, id); ResultSet rs = pstmt.executeQuery(); ArticleLiteDto article = new ArticleLiteDto(); if (rs.next()) { article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); } DBUtil.closeCon(conn); return article; } public static ArticleLiteDto getNextArticle(int id) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "SELECT * FROM blog_article WHERE articleId = " + "(SELECT articleId FROM blog_article WHERE articleId > ? and categoryId > 1 ORDER BY articleId ASC LIMIT 1);"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1, id); ResultSet rs = pstmt.executeQuery(); ArticleLiteDto article = new ArticleLiteDto(); if (rs.next()) { article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); } DBUtil.closeCon(conn); return article; } public static ArticleDto getArticleById(int id) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "SELECT * FROM blog_article t1,blog_category t2,blog_user t3" + " WHERE t1.categoryId=t2.categoryId AND t1.userId=t3.userId AND t1.articleId=?;"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1, id); ResultSet rs = pstmt.executeQuery(); ArticleDto article = new ArticleDto(); if (rs.next()) { article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); article.setContent(rs.getString("content")); article.setPubDate(rs.getString("pubDate")); article.setImage(rs.getString("image")); article.setClicks(rs.getInt("clicks")); UserDto userDto = new UserDto(); userDto.setId(rs.getInt("userId")); userDto.setUsername(rs.getString("username")); userDto.setImage(rs.getString("image")); article.setUser(userDto); CategoryPo category = new CategoryPo(); category.setCategoryId(rs.getInt("categoryId")); category.setCategoryName(rs.getString("categoryName")); article.setCategory(category); } DBUtil.closeCon(conn); return article; } public static ArrayList<ArticleDto> getArticles() throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "SELECT articleId,title,content,pubDate FROM blog_article WHERE categoryId>1 ORDER BY pubDate DESC;"; PreparedStatement pstmt = conn.prepareStatement(sql); ResultSet rs = pstmt.executeQuery(); ArrayList<ArticleDto> articles = new ArrayList<ArticleDto>(); while (rs.next()) { ArticleDto article = new ArticleDto(); article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); article.setContent(rs.getString("content")); article.setPubDate(rs.getString("pubDate")); articles.add(article); } DBUtil.closeCon(conn); return articles; } public static ArrayList<ArticleLiteDto> getArticlesByCategoryId(int id) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "SELECT articleId,title,pubDate FROM blog_article WHERE categoryId=? ORDER BY pubDate DESC;"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1, id); ResultSet rs = pstmt.executeQuery(); ArrayList<ArticleLiteDto> articles = new ArrayList<ArticleLiteDto>(); while (rs.next()) { ArticleLiteDto article = new ArticleLiteDto(); article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); article.setPubDate(rs.getString("pubDate")); articles.add(article); } DBUtil.closeCon(conn); return articles; } public static ArrayList<ArticleLiteDto> getRecentArticles() throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "SELECT articleId,title FROM blog_article WHERE categoryId>1 ORDER BY pubDate DESC LIMIT 0,5;"; PreparedStatement pstmt = conn.prepareStatement(sql); ArrayList<ArticleLiteDto> articles = new ArrayList<ArticleLiteDto>(); ResultSet rs = pstmt.executeQuery(); while (rs.next()) { ArticleLiteDto article = new ArticleLiteDto(); article.setId(rs.getInt("articleId")); article.setTitle(rs.getString("title")); articles.add(article); } DBUtil.closeCon(conn); return articles; } public static void updateArticle(ArticlePo article) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "UPDATE blog_article set categoryId=?,title=?,content=?,image=? WHERE articleId=?;"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1, article.getCategoryId()); pstmt.setString(2, article.getTitle()); pstmt.setString(3, article.getContent()); pstmt.setString(4, article.getImage()); pstmt.setInt(5, article.getArticleId()); pstmt.executeUpdate(); DBUtil.closeCon(conn); } public static void saveArticle(ArticlePo article) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "INSERT INTO blog_article VALUES (null,?,1,?,?,?,?,?);"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1, article.getCategoryId()); pstmt.setString(2, article.getTitle()); pstmt.setString(3, article.getContent()); pstmt.setString(4, DateUtil.formatLong(System.currentTimeMillis(), "yyyy-MM-dd HH:mm:ss")); pstmt.setInt(5, article.getClicks()); pstmt.setString(6, article.getImage()); pstmt.executeUpdate(); DBUtil.closeCon(conn); } public static void deleteArticle(int id) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "DELETE FROM blog_article WHERE articleId = ?;"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1, id); pstmt.executeUpdate(); DBUtil.closeCon(conn); } public static void addClicks(int clicks, int id) throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "UPDATE blog_article SET clicks=? WHERE articleId=?;"; PreparedStatement pstmt = conn.prepareStatement(sql); pstmt.setInt(1,clicks); pstmt.setInt(2,id); pstmt.executeUpdate(); DBUtil.closeCon(conn); } public static int count() throws SQLException, ClassNotFoundException { Connection conn = DBUtil.getCon(); String sql = "SELECT COUNT(*) AS total FROM blog_article WHERE categoryId>1;"; PreparedStatement pstmt = conn.prepareStatement(sql); ResultSet rs = pstmt.executeQuery(); int count = 0; if (rs.next()) { count = rs.getInt("total"); } DBUtil.closeCon(conn); return count; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.editor.impl.view; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.*; import com.intellij.openapi.editor.colors.EditorColors; import com.intellij.openapi.editor.ex.*; import com.intellij.openapi.editor.highlighter.HighlighterIterator; import com.intellij.openapi.editor.impl.DocumentMarkupModel; import com.intellij.openapi.editor.markup.EffectType; import com.intellij.openapi.editor.markup.HighlighterLayer; import com.intellij.openapi.editor.markup.HighlighterTargetArea; import com.intellij.openapi.editor.markup.TextAttributes; import com.intellij.openapi.util.Comparing; import com.intellij.util.ArrayUtilRt; import com.intellij.util.CommonProcessors; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; /** * Iterator over editor's text contents. Each iteration step corresponds to a text fragment having common graphical attributes * (font style, foreground and background color, effect type and color). */ // This class should replace com.intellij.openapi.editor.impl.IterationState when new editor rendering engine will become default public class IterationState { private static final Logger LOG = Logger.getInstance(IterationState.class); private static final Comparator<RangeHighlighterEx> BY_LAYER_THEN_ATTRIBUTES = new Comparator<RangeHighlighterEx>() { @Override public int compare(RangeHighlighterEx o1, RangeHighlighterEx o2) { final int result = LayerComparator.INSTANCE.compare(o1, o2); if (result != 0) { return result; } // There is a possible case when more than one highlighter target the same region (e.g. 'identifier under caret' and 'identifier'). // We want to prefer the one that defines foreground color to the one that doesn't define (has either fore- or background colors // while the other one has only foreground color). See IDEA-85697 for concrete example. final TextAttributes a1 = o1.getTextAttributes(); final TextAttributes a2 = o2.getTextAttributes(); if (a1 == null ^ a2 == null) { return a1 == null ? 1 : -1; } if (a1 == null) { return result; } final Color fore1 = a1.getForegroundColor(); final Color fore2 = a2.getForegroundColor(); if (fore1 == null ^ fore2 == null) { return fore1 == null ? 1 : -1; } final Color back1 = a1.getBackgroundColor(); final Color back2 = a2.getBackgroundColor(); if (back1 == null ^ back2 == null) { return back1 == null ? 1 : -1; } return result; } }; private static final Comparator<RangeHighlighterEx> BY_AFFECTED_END_OFFSET_REVERSED = new Comparator<RangeHighlighterEx>() { @Override public int compare(RangeHighlighterEx r1, RangeHighlighterEx r2) { return r2.getAffectedAreaEndOffset() - r1.getAffectedAreaEndOffset(); } }; private final TextAttributes myMergedAttributes = new TextAttributes(); @Nullable private final HighlighterIterator myHighlighterIterator; private final HighlighterSweep myView; private final HighlighterSweep myDoc; private int myStartOffset; private int myEndOffset; private final int myEnd; private final int[] mySelectionStarts; private final int[] mySelectionEnds; private int myCurrentSelectionIndex = 0; private Color myCurrentBackgroundColor; private Color myLastBackgroundColor; private final List<RangeHighlighterEx> myCurrentHighlighters = new ArrayList<RangeHighlighterEx>(); private final FoldingModelEx myFoldingModel; private final TextAttributes myFoldTextAttributes; private FoldRegion myCurrentFold; private final TextAttributes mySelectionAttributes; private final TextAttributes myCaretRowAttributes; private final Color myDefaultBackground; private final Color myDefaultForeground; private final int myCaretRowStart; private final int myCaretRowEnd; private final boolean myCaretRowStartsWithSoftWrap; private final boolean myCaretRowEndsWithSoftWrap; private final List<TextAttributes> myCachedAttributesList = new ArrayList<TextAttributes>(5); private final DocumentEx myDocument; private final EditorEx myEditor; private final Color myReadOnlyColor; private final boolean myUseOnlyFullLineHighlighters; private final boolean myReverseIteration; public IterationState(@NotNull EditorEx editor, int start, int end, boolean useCaretAndSelection, boolean useOnlyFullLineHighlighters, boolean useFoldRegions, boolean iterateBackwards) { ApplicationManager.getApplication().assertReadAccessAllowed(); myDocument = editor.getDocument(); myStartOffset = start; myEnd = end; myEditor = editor; myUseOnlyFullLineHighlighters = useOnlyFullLineHighlighters; myReverseIteration = iterateBackwards; LOG.assertTrue(myReverseIteration ? myStartOffset >= myEnd : myStartOffset <= myEnd); myHighlighterIterator = useOnlyFullLineHighlighters ? null : editor.getHighlighter().createIterator(start); if (!useCaretAndSelection) { mySelectionStarts = ArrayUtilRt.EMPTY_INT_ARRAY; mySelectionEnds = ArrayUtilRt.EMPTY_INT_ARRAY; } else { List<Caret> carets = editor.getCaretModel().getAllCarets(); int caretCount = carets.size(); mySelectionStarts = new int[caretCount]; mySelectionEnds = new int[caretCount]; for (int i = 0; i < caretCount; i++) { Caret caret = carets.get(i); mySelectionStarts[iterateBackwards ? caretCount - i - 1 : i] = caret.getSelectionStart(); mySelectionEnds[iterateBackwards ? caretCount - i - 1 : i] = caret.getSelectionEnd(); } } myFoldingModel = useFoldRegions ? editor.getFoldingModel() : null; myFoldTextAttributes = useFoldRegions ? myFoldingModel.getPlaceholderAttributes() : null; mySelectionAttributes = editor.getSelectionModel().getTextAttributes(); myReadOnlyColor = myEditor.getColorsScheme().getColor(EditorColors.READONLY_FRAGMENT_BACKGROUND_COLOR); CaretModel caretModel = editor.getCaretModel(); myCaretRowAttributes = editor.isRendererMode() ? null : caretModel.getTextAttributes(); myDefaultBackground = editor.getColorsScheme().getDefaultBackground(); myDefaultForeground = editor.getColorsScheme().getDefaultForeground(); myCaretRowStart = useCaretAndSelection ? caretModel.getVisualLineStart() : -1; int visualLineEnd = caretModel.getVisualLineEnd(); if (visualLineEnd == myDocument.getTextLength() && myDocument.getLineCount() > 0 && visualLineEnd > myDocument.getLineStartOffset(myDocument.getLineCount() - 1)) { visualLineEnd++; } myCaretRowEnd = useCaretAndSelection ? visualLineEnd : -1; myCaretRowStartsWithSoftWrap = editor.getSoftWrapModel().getSoftWrap(myCaretRowStart) != null; myCaretRowEndsWithSoftWrap = editor.getSoftWrapModel().getSoftWrap(myCaretRowEnd) != null; MarkupModelEx editorMarkup = editor.getMarkupModel(); myView = new HighlighterSweep(editorMarkup, start, myEnd, useOnlyFullLineHighlighters); final MarkupModelEx docMarkup = (MarkupModelEx)DocumentMarkupModel.forDocument(editor.getDocument(), editor.getProject(), true); myDoc = new HighlighterSweep(docMarkup, start, myEnd, useOnlyFullLineHighlighters); myEndOffset = myStartOffset; advance(); } private class HighlighterSweep { private RangeHighlighterEx myNextHighlighter; int i; private final RangeHighlighterEx[] highlighters; private HighlighterSweep(@NotNull MarkupModelEx markupModel, int start, int end, final boolean onlyFullLine) { // we have to get all highlighters in advance and sort them by affected offsets // since these can be different from the real offsets the highlighters are sorted by in the tree. (See LINES_IN_RANGE perverts) final List<RangeHighlighterEx> list = new ArrayList<RangeHighlighterEx>(); markupModel.processRangeHighlightersOverlappingWith(myReverseIteration ? end : start, myReverseIteration ? start : end, new CommonProcessors.CollectProcessor<RangeHighlighterEx>(list) { @Override protected boolean accept(RangeHighlighterEx ex) { return !onlyFullLine || ex.getTargetArea() == HighlighterTargetArea.LINES_IN_RANGE; } }); highlighters = list.isEmpty() ? RangeHighlighterEx.EMPTY_ARRAY : list.toArray(new RangeHighlighterEx[list.size()]); Arrays.sort(highlighters, myReverseIteration ? BY_AFFECTED_END_OFFSET_REVERSED : RangeHighlighterEx.BY_AFFECTED_START_OFFSET); while (i < highlighters.length) { RangeHighlighterEx highlighter = highlighters[i++]; if (!skipHighlighter(highlighter)) { myNextHighlighter = highlighter; break; } } } private void advance() { if (myNextHighlighter != null) { if (myReverseIteration ? myNextHighlighter.getAffectedAreaEndOffset() < myStartOffset : myNextHighlighter.getAffectedAreaStartOffset() > myStartOffset) { return; } myCurrentHighlighters.add(myNextHighlighter); myNextHighlighter = null; } while (i < highlighters.length) { RangeHighlighterEx highlighter = highlighters[i++]; if (!skipHighlighter(highlighter)) { if (myReverseIteration ? highlighter.getAffectedAreaEndOffset() < myStartOffset : highlighter.getAffectedAreaStartOffset() > myStartOffset) { myNextHighlighter = highlighter; break; } else { myCurrentHighlighters.add(highlighter); } } } } private int getMinSegmentHighlighterEnd() { if (myNextHighlighter != null) { return myReverseIteration ? myNextHighlighter.getAffectedAreaEndOffset(): myNextHighlighter.getAffectedAreaStartOffset(); } return myReverseIteration ? Integer.MIN_VALUE : Integer.MAX_VALUE; } } private boolean skipHighlighter(@NotNull RangeHighlighterEx highlighter) { if (!highlighter.isValid() || highlighter.isAfterEndOfLine() || highlighter.getTextAttributes() == null) return true; final FoldRegion region = myFoldingModel == null ? null : myFoldingModel.getCollapsedRegionAtOffset(highlighter.getAffectedAreaStartOffset()); if (region != null && region == myFoldingModel.getCollapsedRegionAtOffset(highlighter.getAffectedAreaEndOffset())) return true; return !highlighter.getEditorFilter().avaliableIn(myEditor); } public void advance() { myStartOffset = myEndOffset; advanceSegmentHighlighters(); advanceCurrentSelectionIndex(); if (!myUseOnlyFullLineHighlighters) { myCurrentFold = myFoldingModel == null ? null : myFoldingModel.getCollapsedRegionAtOffset(myReverseIteration ? myStartOffset - 1 : myStartOffset); } if (myCurrentFold != null) { myEndOffset = myReverseIteration ? myCurrentFold.getStartOffset() : myCurrentFold.getEndOffset(); } else { myEndOffset = getHighlighterEnd(myStartOffset); setEndOffsetIfCloser(getSelectionEnd()); setEndOffsetIfCloser(getMinSegmentHighlightersEnd()); setEndOffsetIfCloser(getFoldRangesEnd(myStartOffset)); setEndOffsetIfCloser(getCaretEnd(myStartOffset)); setEndOffsetIfCloser(getGuardedBlockEnd(myStartOffset)); } reinit(); } private void setEndOffsetIfCloser(int offset) { if (myReverseIteration ? offset > myEndOffset : offset < myEndOffset) { myEndOffset = offset; } } private int getHighlighterEnd(int start) { if (myHighlighterIterator == null) { return myEnd; } while (!myHighlighterIterator.atEnd()) { int end = myReverseIteration ? myHighlighterIterator.getStart() : myHighlighterIterator.getEnd(); if (myReverseIteration ? end < start : end > start) { return end; } if (myReverseIteration) { myHighlighterIterator.retreat(); } else { myHighlighterIterator.advance(); } } return myEnd; } private int getCaretEnd(int start) { return getNearestValueAhead(start, myCaretRowStart, myCaretRowEnd); } private int getNearestValueAhead(int offset, int rangeStart, int rangeEnd) { if (myReverseIteration) { if (rangeEnd < offset) { return rangeEnd; } if (rangeStart < offset) { return rangeStart; } } else { if (rangeStart > offset) { return rangeStart; } if (rangeEnd > offset) { return rangeEnd; } } return myEnd; } private int getGuardedBlockEnd(int start) { if (myUseOnlyFullLineHighlighters) { return myEnd; } List<RangeMarker> blocks = myDocument.getGuardedBlocks(); int result = myEnd; //noinspection ForLoopReplaceableByForEach for (int i = 0; i < blocks.size(); i++) { RangeMarker block = blocks.get(i); int nearestValue = getNearestValueAhead(start, block.getStartOffset(), block.getEndOffset()); result = myReverseIteration ? Math.max(result, nearestValue) : Math.min(result, nearestValue); } return result; } private void advanceCurrentSelectionIndex() { while (myCurrentSelectionIndex < mySelectionEnds.length && (myReverseIteration ? myStartOffset <= mySelectionStarts[myCurrentSelectionIndex] : myStartOffset >= mySelectionEnds[myCurrentSelectionIndex])) { myCurrentSelectionIndex++; } } private int getSelectionEnd() { if (myCurrentSelectionIndex >= mySelectionStarts.length) { return myEnd; } return getNearestValueAhead(myStartOffset, mySelectionStarts[myCurrentSelectionIndex], mySelectionEnds[myCurrentSelectionIndex]); } private boolean isInSelection() { return myCurrentSelectionIndex < mySelectionStarts.length && (myReverseIteration ? myStartOffset <= mySelectionEnds[myCurrentSelectionIndex] : myStartOffset >= mySelectionStarts[myCurrentSelectionIndex]); } private void advanceSegmentHighlighters() { myDoc.advance(); myView.advance(); boolean fileEnd = myStartOffset == myDocument.getTextLength(); for (int i = myCurrentHighlighters.size() - 1; i >= 0; i--) { RangeHighlighterEx highlighter = myCurrentHighlighters.get(i); if (myReverseIteration ? highlighter.getAffectedAreaStartOffset() >= myStartOffset : fileEnd && highlighter.getTargetArea() == HighlighterTargetArea.LINES_IN_RANGE ? highlighter.getAffectedAreaEndOffset() < myStartOffset : highlighter.getAffectedAreaEndOffset() <= myStartOffset) { myCurrentHighlighters.remove(i); } } } private int getFoldRangesEnd(int startOffset) { if (myUseOnlyFullLineHighlighters || myFoldingModel == null) { return myEnd; } int end = myEnd; FoldRegion[] topLevelCollapsed = myFoldingModel.fetchTopLevel(); if (topLevelCollapsed != null) { if (myReverseIteration) { for (int i = myFoldingModel.getLastCollapsedRegionBefore(startOffset); i >= 0 && i < topLevelCollapsed.length; i--) { FoldRegion range = topLevelCollapsed[i]; if (!range.isValid()) continue; int rangeEnd = range.getEndOffset(); if (rangeEnd < startOffset) { if (rangeEnd > end) { end = rangeEnd; } else { break; } } } } else { for (int i = myFoldingModel.getLastCollapsedRegionBefore(startOffset) + 1; i >= 0 && i < topLevelCollapsed.length; i++) { FoldRegion range = topLevelCollapsed[i]; if (!range.isValid()) continue; int rangeEnd = range.getStartOffset(); if (rangeEnd > startOffset) { if (rangeEnd < end) { end = rangeEnd; } else { break; } } } } } return end; } private int getMinSegmentHighlightersEnd() { int end = myEnd; //noinspection ForLoopReplaceableByForEach for (int i = 0; i < myCurrentHighlighters.size(); i++) { RangeHighlighterEx highlighter = myCurrentHighlighters.get(i); if (myReverseIteration) { if (highlighter.getAffectedAreaStartOffset() > end) { end = highlighter.getAffectedAreaStartOffset(); } } else { if (highlighter.getAffectedAreaEndOffset() < end) { end = highlighter.getAffectedAreaEndOffset(); } } } end = myReverseIteration ? Math.max(end, myDoc.getMinSegmentHighlighterEnd()) : Math.min(end, myDoc.getMinSegmentHighlighterEnd()); end = myReverseIteration ? Math.max(end, myView.getMinSegmentHighlighterEnd()) : Math.min(end, myView.getMinSegmentHighlighterEnd()); return end; } private void reinit() { boolean isInSelection = isInSelection(); boolean isInCaretRow = isInCaretRow(!myReverseIteration, myReverseIteration); boolean isInGuardedBlock = !myUseOnlyFullLineHighlighters && myDocument.getOffsetGuard(myReverseIteration ? myStartOffset - 1 : myStartOffset) != null; TextAttributes syntax = myHighlighterIterator == null || myHighlighterIterator.atEnd() ? null : myHighlighterIterator.getTextAttributes(); TextAttributes selection = isInSelection ? mySelectionAttributes : null; TextAttributes caret = isInCaretRow ? myCaretRowAttributes : null; TextAttributes fold = myCurrentFold != null ? myFoldTextAttributes : null; TextAttributes guard = isInGuardedBlock ? new TextAttributes(null, myReadOnlyColor, null, EffectType.BOXED, Font.PLAIN) : null; final int size = myCurrentHighlighters.size(); if (size > 1) { ContainerUtil.quickSort(myCurrentHighlighters, BY_LAYER_THEN_ATTRIBUTES); } //noinspection ForLoopReplaceableByForEach for (int i = 0; i < size; i++) { RangeHighlighterEx highlighter = myCurrentHighlighters.get(i); if (highlighter.getTextAttributes() == TextAttributes.ERASE_MARKER) { syntax = null; } } List<TextAttributes> cachedAttributes = myCachedAttributesList; cachedAttributes.clear(); //noinspection ForLoopReplaceableByForEach for (int i = 0; i < size; i++) { RangeHighlighterEx highlighter = myCurrentHighlighters.get(i); if (highlighter.getLayer() < HighlighterLayer.SELECTION) { if (selection != null) { cachedAttributes.add(selection); selection = null; } } if (syntax != null && highlighter.getLayer() < HighlighterLayer.SYNTAX) { if (fold != null) { cachedAttributes.add(fold); fold = null; } cachedAttributes.add(syntax); syntax = null; } if (guard != null && highlighter.getLayer() < HighlighterLayer.GUARDED_BLOCKS) { cachedAttributes.add(guard); guard = null; } if (caret != null && highlighter.getLayer() < HighlighterLayer.CARET_ROW) { cachedAttributes.add(caret); caret = null; } TextAttributes textAttributes = highlighter.getTextAttributes(); if (textAttributes != null && textAttributes != TextAttributes.ERASE_MARKER) { cachedAttributes.add(textAttributes); } } if (selection != null) cachedAttributes.add(selection); if (fold != null) cachedAttributes.add(fold); if (guard != null) cachedAttributes.add(guard); if (caret != null) cachedAttributes.add(caret); if (syntax != null) cachedAttributes.add(syntax); Color fore = null; Color back = isInGuardedBlock ? myReadOnlyColor : null; Color effect = null; EffectType effectType = null; int fontType = 0; //noinspection ForLoopReplaceableByForEach for (int i = 0; i < cachedAttributes.size(); i++) { TextAttributes attrs = cachedAttributes.get(i); if (fore == null) { fore = ifDiffers(attrs.getForegroundColor(), myDefaultForeground); } if (back == null) { back = ifDiffers(attrs.getBackgroundColor(), myDefaultBackground); } if (fontType == Font.PLAIN) { fontType = attrs.getFontType(); } if (effect == null) { effect = attrs.getEffectColor(); effectType = attrs.getEffectType(); } } if (fore == null) fore = myDefaultForeground; if (back == null) back = myDefaultBackground; if (effectType == null) effectType = EffectType.BOXED; myMergedAttributes.setAttributes(fore, back, effect, null, effectType, fontType); myLastBackgroundColor = myCurrentBackgroundColor; myCurrentBackgroundColor = back; } private boolean isInCaretRow(boolean includeLineStart, boolean includeLineEnd) { return myStartOffset > myCaretRowStart && myStartOffset < myCaretRowEnd || includeLineStart && myStartOffset == myCaretRowStart || includeLineEnd && myStartOffset == myCaretRowEnd; } @Nullable private static Color ifDiffers(final Color c1, final Color c2) { return Comparing.equal(c1, c2) ? null : c1; } public boolean atEnd() { return myReverseIteration ? myStartOffset <= myEnd : myStartOffset >= myEnd; } public int getStartOffset() { return myStartOffset; } public int getEndOffset() { return myEndOffset; } @NotNull public TextAttributes getMergedAttributes() { return myMergedAttributes; } @NotNull public TextAttributes getPastLineEndBackgroundAttributes() { myMergedAttributes.setBackgroundColor(myEditor.getSoftWrapModel().getSoftWrap(myStartOffset) != null ? getBreakBackgroundColor(true) : myCurrentBackgroundColor); return myMergedAttributes; } @NotNull public TextAttributes getBeforeLineStartBackgroundAttributes() { return new TextAttributes(null, getBreakBackgroundColor(false), null, null, 0); } private Color getBreakBackgroundColor(boolean lineEnd) { return Comparing.equal(myCurrentBackgroundColor, myLastBackgroundColor) ? myCurrentBackgroundColor : isInCaretRow(!myCaretRowStartsWithSoftWrap || !lineEnd, myCaretRowEndsWithSoftWrap && lineEnd) ? myCaretRowAttributes.getBackgroundColor() : myDefaultBackground; } private static class LayerComparator implements Comparator<RangeHighlighterEx> { private static final LayerComparator INSTANCE = new LayerComparator(); @Override public int compare(RangeHighlighterEx o1, RangeHighlighterEx o2) { int layerDiff = o2.getLayer() - o1.getLayer(); if (layerDiff != 0) { return layerDiff; } // prefer more specific region int o1Length = o1.getAffectedAreaEndOffset() - o1.getAffectedAreaStartOffset(); int o2Length = o2.getAffectedAreaEndOffset() - o2.getAffectedAreaStartOffset(); return o1Length - o2Length; } } }
/* * Copyright (C) 2007 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package android.test; import com.android.internal.util.Predicate; import com.android.internal.util.Predicates; import android.app.Activity; import android.app.Instrumentation; import android.os.Bundle; import android.os.Debug; import android.os.Looper; import android.os.Parcelable; import android.os.PerformanceCollector; import android.os.PerformanceCollector.PerformanceResultsWriter; import android.test.suitebuilder.TestMethod; import android.test.suitebuilder.TestPredicates; import android.test.suitebuilder.TestSuiteBuilder; import android.test.suitebuilder.annotation.HasAnnotation; import android.test.suitebuilder.annotation.LargeTest; import android.util.Log; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.PrintStream; import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; import junit.framework.AssertionFailedError; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestListener; import junit.framework.TestResult; import junit.framework.TestSuite; import junit.runner.BaseTestRunner; import junit.textui.ResultPrinter; /** * An {@link Instrumentation} that runs various types of {@link junit.framework.TestCase}s against * an Android package (application). * * <div class="special reference"> * <h3>Developer Guides</h3> * <p>For more information about application testing, read the * <a href="{@docRoot}guide/topics/testing/index.html">Testing</a> developer guide.</p> * </div> * * <h3>Typical Usage</h3> * <ol> * <li>Write {@link junit.framework.TestCase}s that perform unit, functional, or performance tests * against the classes in your package. Typically these are subclassed from: * <ul><li>{@link android.test.ActivityInstrumentationTestCase2}</li> * <li>{@link android.test.ActivityUnitTestCase}</li> * <li>{@link android.test.AndroidTestCase}</li> * <li>{@link android.test.ApplicationTestCase}</li> * <li>{@link android.test.InstrumentationTestCase}</li> * <li>{@link android.test.ProviderTestCase}</li> * <li>{@link android.test.ServiceTestCase}</li> * <li>{@link android.test.SingleLaunchActivityTestCase}</li></ul> * <li>Set the <code>android:targetPackage</code> attribute of the <code>&lt;instrumentation&gt;</code> * element in the test package's manifest. You should set the attribute value * to the package name of the target application under test. * <li>Run the instrumentation using "adb shell am instrument -w", * with no optional arguments, to run all tests (except performance tests). * <li>Run the instrumentation using "adb shell am instrument -w", * with the argument '-e func true' to run all functional tests. These are tests that derive from * {@link android.test.InstrumentationTestCase}. * <li>Run the instrumentation using "adb shell am instrument -w", * with the argument '-e unit true' to run all unit tests. These are tests that <i>do not</i>derive * from {@link android.test.InstrumentationTestCase} (and are not performance tests). * <li>Run the instrumentation using "adb shell am instrument -w", * with the argument '-e class' set to run an individual {@link junit.framework.TestCase}. * </ol> * <p/> * <b>Running all tests:</b> adb shell am instrument -w * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Running all small tests:</b> adb shell am instrument -w * -e size small * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Running all medium tests:</b> adb shell am instrument -w * -e size medium * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Running all large tests:</b> adb shell am instrument -w * -e size large * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Filter test run to tests with given annotation:</b> adb shell am instrument -w * -e annotation com.android.foo.MyAnnotation * com.android.foo/android.test.InstrumentationTestRunner * <p/> * If used with other options, the resulting test run will contain the union of the two options. * e.g. "-e size large -e annotation com.android.foo.MyAnnotation" will run only tests with both * the {@link LargeTest} and "com.android.foo.MyAnnotation" annotations. * <p/> * <b>Filter test run to tests <i>without</i> given annotation:</b> adb shell am instrument -w * -e notAnnotation com.android.foo.MyAnnotation * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Running a single testcase:</b> adb shell am instrument -w * -e class com.android.foo.FooTest * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Running a single test:</b> adb shell am instrument -w * -e class com.android.foo.FooTest#testFoo * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Running multiple tests:</b> adb shell am instrument -w * -e class com.android.foo.FooTest,com.android.foo.TooTest * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Running all tests in a java package:</b> adb shell am instrument -w * -e package com.android.foo.subpkg * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>Including performance tests:</b> adb shell am instrument -w * -e perf true * com.android.foo/android.test.InstrumentationTestRunner * <p/> * <b>To debug your tests, set a break point in your code and pass:</b> * -e debug true * <p/> * <b>To run in 'log only' mode</b> * -e log true * This option will load and iterate through all test classes and methods, but will bypass actual * test execution. Useful for quickly obtaining info on the tests to be executed by an * instrumentation command. * <p/> * <b>To generate EMMA code coverage:</b> * -e coverage true * Note: this requires an emma instrumented build. By default, the code coverage results file * will be saved in a /data/<app>/coverage.ec file, unless overridden by coverageFile flag (see * below) * <p/> * <b> To specify EMMA code coverage results file path:</b> * -e coverageFile /sdcard/myFile.ec * <br/> * in addition to the other arguments. * @deprecated Use * <a href="{@docRoot}reference/android/support/test/runner/AndroidJUnitRunner.html"> * AndroidJUnitRunner</a> instead. New tests should be written using the * <a href="{@docRoot}tools/testing-support-library/index.html">Android Testing Support Library</a>. */ /* (not JavaDoc) * Although not necessary in most case, another way to use this class is to extend it and have the * derived class return the desired test suite from the {@link #getTestSuite()} method. The test * suite returned from this method will be used if no target class is defined in the meta-data or * command line argument parameters. If a derived class is used it needs to be added as an * instrumentation to the AndroidManifest.xml and the command to run it would look like: * <p/> * adb shell am instrument -w com.android.foo/<i>com.android.FooInstrumentationTestRunner</i> * <p/> * Where <i>com.android.FooInstrumentationTestRunner</i> is the derived class. * * This model is used by many existing app tests, but can probably be deprecated. */ @Deprecated public class InstrumentationTestRunner extends Instrumentation implements TestSuiteProvider { /** @hide */ public static final String ARGUMENT_TEST_CLASS = "class"; /** @hide */ public static final String ARGUMENT_TEST_PACKAGE = "package"; /** @hide */ public static final String ARGUMENT_TEST_SIZE_PREDICATE = "size"; /** @hide */ public static final String ARGUMENT_DELAY_MSEC = "delay_msec"; private static final String SMALL_SUITE = "small"; private static final String MEDIUM_SUITE = "medium"; private static final String LARGE_SUITE = "large"; private static final String ARGUMENT_LOG_ONLY = "log"; /** @hide */ static final String ARGUMENT_ANNOTATION = "annotation"; /** @hide */ static final String ARGUMENT_NOT_ANNOTATION = "notAnnotation"; /** * This constant defines the maximum allowed runtime (in ms) for a test included in the "small" * suite. It is used to make an educated guess at what suite an unlabeled test belongs. */ private static final float SMALL_SUITE_MAX_RUNTIME = 100; /** * This constant defines the maximum allowed runtime (in ms) for a test included in the * "medium" suite. It is used to make an educated guess at what suite an unlabeled test belongs. */ private static final float MEDIUM_SUITE_MAX_RUNTIME = 1000; /** * The following keys are used in the status bundle to provide structured reports to * an IInstrumentationWatcher. */ /** * This value, if stored with key {@link android.app.Instrumentation#REPORT_KEY_IDENTIFIER}, * identifies InstrumentationTestRunner as the source of the report. This is sent with all * status messages. */ public static final String REPORT_VALUE_ID = "InstrumentationTestRunner"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * identifies the total number of tests that are being run. This is sent with all status * messages. */ public static final String REPORT_KEY_NUM_TOTAL = "numtests"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * identifies the sequence number of the current test. This is sent with any status message * describing a specific test being started or completed. */ public static final String REPORT_KEY_NUM_CURRENT = "current"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * identifies the name of the current test class. This is sent with any status message * describing a specific test being started or completed. */ public static final String REPORT_KEY_NAME_CLASS = "class"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * identifies the name of the current test. This is sent with any status message * describing a specific test being started or completed. */ public static final String REPORT_KEY_NAME_TEST = "test"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * reports the run time in seconds of the current test. */ private static final String REPORT_KEY_RUN_TIME = "runtime"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * reports the number of total iterations of the current test. */ private static final String REPORT_KEY_NUM_ITERATIONS = "numiterations"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * reports the guessed suite assignment for the current test. */ private static final String REPORT_KEY_SUITE_ASSIGNMENT = "suiteassignment"; /** * If included in the status or final bundle sent to an IInstrumentationWatcher, this key * identifies the path to the generated code coverage file. */ private static final String REPORT_KEY_COVERAGE_PATH = "coverageFilePath"; /** * The test is starting. */ public static final int REPORT_VALUE_RESULT_START = 1; /** * The test completed successfully. */ public static final int REPORT_VALUE_RESULT_OK = 0; /** * The test completed with an error. */ public static final int REPORT_VALUE_RESULT_ERROR = -1; /** * The test completed with a failure. */ public static final int REPORT_VALUE_RESULT_FAILURE = -2; /** * If included in the status bundle sent to an IInstrumentationWatcher, this key * identifies a stack trace describing an error or failure. This is sent with any status * message describing a specific test being completed. */ public static final String REPORT_KEY_STACK = "stack"; // Default file name for code coverage private static final String DEFAULT_COVERAGE_FILE_NAME = "coverage.ec"; private static final String LOG_TAG = "InstrumentationTestRunner"; private final Bundle mResults = new Bundle(); private Bundle mArguments; private AndroidTestRunner mTestRunner; private boolean mDebug; private boolean mJustCount; private boolean mSuiteAssignmentMode; private int mTestCount; private String mPackageOfTests; private boolean mCoverage; private String mCoverageFilePath; private int mDelayMsec; @Override public void onCreate(Bundle arguments) { super.onCreate(arguments); mArguments = arguments; // Apk paths used to search for test classes when using TestSuiteBuilders. String[] apkPaths = {getTargetContext().getPackageCodePath(), getContext().getPackageCodePath()}; ClassPathPackageInfoSource.setApkPaths(apkPaths); Predicate<TestMethod> testSizePredicate = null; Predicate<TestMethod> testAnnotationPredicate = null; Predicate<TestMethod> testNotAnnotationPredicate = null; String testClassesArg = null; boolean logOnly = false; if (arguments != null) { // Test class name passed as an argument should override any meta-data declaration. testClassesArg = arguments.getString(ARGUMENT_TEST_CLASS); mDebug = getBooleanArgument(arguments, "debug"); mJustCount = getBooleanArgument(arguments, "count"); mSuiteAssignmentMode = getBooleanArgument(arguments, "suiteAssignment"); mPackageOfTests = arguments.getString(ARGUMENT_TEST_PACKAGE); testSizePredicate = getSizePredicateFromArg( arguments.getString(ARGUMENT_TEST_SIZE_PREDICATE)); testAnnotationPredicate = getAnnotationPredicate( arguments.getString(ARGUMENT_ANNOTATION)); testNotAnnotationPredicate = getNotAnnotationPredicate( arguments.getString(ARGUMENT_NOT_ANNOTATION)); logOnly = getBooleanArgument(arguments, ARGUMENT_LOG_ONLY); mCoverage = getBooleanArgument(arguments, "coverage"); mCoverageFilePath = arguments.getString("coverageFile"); try { Object delay = arguments.get(ARGUMENT_DELAY_MSEC); // Accept either string or int if (delay != null) mDelayMsec = Integer.parseInt(delay.toString()); } catch (NumberFormatException e) { Log.e(LOG_TAG, "Invalid delay_msec parameter", e); } } TestSuiteBuilder testSuiteBuilder = new TestSuiteBuilder(getClass().getName(), getTargetContext().getClassLoader()); if (testSizePredicate != null) { testSuiteBuilder.addRequirements(testSizePredicate); } if (testAnnotationPredicate != null) { testSuiteBuilder.addRequirements(testAnnotationPredicate); } if (testNotAnnotationPredicate != null) { testSuiteBuilder.addRequirements(testNotAnnotationPredicate); } if (testClassesArg == null) { if (mPackageOfTests != null) { testSuiteBuilder.includePackages(mPackageOfTests); } else { TestSuite testSuite = getTestSuite(); if (testSuite != null) { testSuiteBuilder.addTestSuite(testSuite); } else { // no package or class bundle arguments were supplied, and no test suite // provided so add all tests in application testSuiteBuilder.includePackages(""); } } } else { parseTestClasses(testClassesArg, testSuiteBuilder); } testSuiteBuilder.addRequirements(getBuilderRequirements()); mTestRunner = getAndroidTestRunner(); mTestRunner.setContext(getTargetContext()); mTestRunner.setInstrumentation(this); mTestRunner.setSkipExecution(logOnly); mTestRunner.setTest(testSuiteBuilder.build()); mTestCount = mTestRunner.getTestCases().size(); if (mSuiteAssignmentMode) { mTestRunner.addTestListener(new SuiteAssignmentPrinter()); } else { WatcherResultPrinter resultPrinter = new WatcherResultPrinter(mTestCount); mTestRunner.addTestListener(new TestPrinter("TestRunner", false)); mTestRunner.addTestListener(resultPrinter); mTestRunner.setPerformanceResultsWriter(resultPrinter); } start(); } /** * Get the arguments passed to this instrumentation. * * @return the Bundle object */ public Bundle getArguments() { return mArguments; } /** * Add a {@link TestListener} * @hide */ protected void addTestListener(TestListener listener){ if(mTestRunner!=null && listener!=null){ mTestRunner.addTestListener(listener); } } List<Predicate<TestMethod>> getBuilderRequirements() { return new ArrayList<Predicate<TestMethod>>(); } /** * Parses and loads the specified set of test classes * * @param testClassArg - comma-separated list of test classes and methods * @param testSuiteBuilder - builder to add tests to */ private void parseTestClasses(String testClassArg, TestSuiteBuilder testSuiteBuilder) { String[] testClasses = testClassArg.split(","); for (String testClass : testClasses) { parseTestClass(testClass, testSuiteBuilder); } } /** * Parse and load the given test class and, optionally, method * * @param testClassName - full package name of test class and optionally method to add. * Expected format: com.android.TestClass#testMethod * @param testSuiteBuilder - builder to add tests to */ private void parseTestClass(String testClassName, TestSuiteBuilder testSuiteBuilder) { int methodSeparatorIndex = testClassName.indexOf('#'); String testMethodName = null; if (methodSeparatorIndex > 0) { testMethodName = testClassName.substring(methodSeparatorIndex + 1); testClassName = testClassName.substring(0, methodSeparatorIndex); } testSuiteBuilder.addTestClassByName(testClassName, testMethodName, getTargetContext()); } protected AndroidTestRunner getAndroidTestRunner() { return new AndroidTestRunner(); } private boolean getBooleanArgument(Bundle arguments, String tag) { String tagString = arguments.getString(tag); return tagString != null && Boolean.parseBoolean(tagString); } /* * Returns the size predicate object, corresponding to the "size" argument value. */ private Predicate<TestMethod> getSizePredicateFromArg(String sizeArg) { if (SMALL_SUITE.equals(sizeArg)) { return TestPredicates.SELECT_SMALL; } else if (MEDIUM_SUITE.equals(sizeArg)) { return TestPredicates.SELECT_MEDIUM; } else if (LARGE_SUITE.equals(sizeArg)) { return TestPredicates.SELECT_LARGE; } else { return null; } } /** * Returns the test predicate object, corresponding to the annotation class value provided via * the {@link ARGUMENT_ANNOTATION} argument. * * @return the predicate or <code>null</code> */ private Predicate<TestMethod> getAnnotationPredicate(String annotationClassName) { Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName); if (annotationClass != null) { return new HasAnnotation(annotationClass); } return null; } /** * Returns the negative test predicate object, corresponding to the annotation class value * provided via the {@link ARGUMENT_NOT_ANNOTATION} argument. * * @return the predicate or <code>null</code> */ private Predicate<TestMethod> getNotAnnotationPredicate(String annotationClassName) { Class<? extends Annotation> annotationClass = getAnnotationClass(annotationClassName); if (annotationClass != null) { return Predicates.not(new HasAnnotation(annotationClass)); } return null; } /** * Helper method to return the annotation class with specified name * * @param annotationClassName the fully qualified name of the class * @return the annotation class or <code>null</code> */ private Class<? extends Annotation> getAnnotationClass(String annotationClassName) { if (annotationClassName == null) { return null; } try { Class<?> annotationClass = Class.forName(annotationClassName); if (annotationClass.isAnnotation()) { return (Class<? extends Annotation>)annotationClass; } else { Log.e(LOG_TAG, String.format("Provided annotation value %s is not an Annotation", annotationClassName)); } } catch (ClassNotFoundException e) { Log.e(LOG_TAG, String.format("Could not find class for specified annotation %s", annotationClassName)); } return null; } /** * Initialize the current thread as a looper. * <p/> * Exposed for unit testing. */ void prepareLooper() { Looper.prepare(); } @Override public void onStart() { prepareLooper(); if (mJustCount) { mResults.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); mResults.putInt(REPORT_KEY_NUM_TOTAL, mTestCount); finish(Activity.RESULT_OK, mResults); } else { if (mDebug) { Debug.waitForDebugger(); } ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); PrintStream writer = new PrintStream(byteArrayOutputStream); try { StringResultPrinter resultPrinter = new StringResultPrinter(writer); mTestRunner.addTestListener(resultPrinter); long startTime = System.currentTimeMillis(); mTestRunner.runTest(); long runTime = System.currentTimeMillis() - startTime; resultPrinter.printResult(mTestRunner.getTestResult(), runTime); } catch (Throwable t) { // catch all exceptions so a more verbose error message can be outputted writer.println(String.format("Test run aborted due to unexpected exception: %s", t.getMessage())); t.printStackTrace(writer); } finally { mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, String.format("\nTest results for %s=%s", mTestRunner.getTestClassName(), byteArrayOutputStream.toString())); if (mCoverage) { generateCoverageReport(); } writer.close(); finish(Activity.RESULT_OK, mResults); } } } public TestSuite getTestSuite() { return getAllTests(); } /** * Override this to define all of the tests to run in your package. */ public TestSuite getAllTests() { return null; } /** * Override this to provide access to the class loader of your package. */ public ClassLoader getLoader() { return null; } private void generateCoverageReport() { // use reflection to call emma dump coverage method, to avoid // always statically compiling against emma jar String coverageFilePath = getCoverageFilePath(); java.io.File coverageFile = new java.io.File(coverageFilePath); try { Class<?> emmaRTClass = Class.forName("com.vladium.emma.rt.RT"); Method dumpCoverageMethod = emmaRTClass.getMethod("dumpCoverageData", coverageFile.getClass(), boolean.class, boolean.class); dumpCoverageMethod.invoke(null, coverageFile, false, false); // output path to generated coverage file so it can be parsed by a test harness if // needed mResults.putString(REPORT_KEY_COVERAGE_PATH, coverageFilePath); // also output a more user friendly msg final String currentStream = mResults.getString( Instrumentation.REPORT_KEY_STREAMRESULT); mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, String.format("%s\nGenerated code coverage data to %s", currentStream, coverageFilePath)); } catch (ClassNotFoundException e) { reportEmmaError("Is emma jar on classpath?", e); } catch (SecurityException e) { reportEmmaError(e); } catch (NoSuchMethodException e) { reportEmmaError(e); } catch (IllegalArgumentException e) { reportEmmaError(e); } catch (IllegalAccessException e) { reportEmmaError(e); } catch (InvocationTargetException e) { reportEmmaError(e); } } private String getCoverageFilePath() { if (mCoverageFilePath == null) { return getTargetContext().getFilesDir().getAbsolutePath() + File.separator + DEFAULT_COVERAGE_FILE_NAME; } else { return mCoverageFilePath; } } private void reportEmmaError(Exception e) { reportEmmaError("", e); } private void reportEmmaError(String hint, Exception e) { String msg = "Failed to generate emma coverage. " + hint; Log.e(LOG_TAG, msg, e); mResults.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "\nError: " + msg); } // TODO kill this, use status() and prettyprint model for better output private class StringResultPrinter extends ResultPrinter { public StringResultPrinter(PrintStream writer) { super(writer); } public synchronized void printResult(TestResult result, long runTime) { printHeader(runTime); printFooter(result); } } /** * This class sends status reports back to the IInstrumentationWatcher about * which suite each test belongs. */ private class SuiteAssignmentPrinter implements TestListener { private Bundle mTestResult; private long mStartTime; private long mEndTime; private boolean mTimingValid; public SuiteAssignmentPrinter() { } /** * send a status for the start of a each test, so long tests can be seen as "running" */ public void startTest(Test test) { mTimingValid = true; mStartTime = System.currentTimeMillis(); } /** * @see junit.framework.TestListener#addError(Test, Throwable) */ public void addError(Test test, Throwable t) { mTimingValid = false; } /** * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) */ public void addFailure(Test test, AssertionFailedError t) { mTimingValid = false; } /** * @see junit.framework.TestListener#endTest(Test) */ public void endTest(Test test) { float runTime; String assignmentSuite; mEndTime = System.currentTimeMillis(); mTestResult = new Bundle(); if (!mTimingValid || mStartTime < 0) { assignmentSuite = "NA"; runTime = -1; } else { runTime = mEndTime - mStartTime; if (runTime < SMALL_SUITE_MAX_RUNTIME && !InstrumentationTestCase.class.isAssignableFrom(test.getClass())) { assignmentSuite = SMALL_SUITE; } else if (runTime < MEDIUM_SUITE_MAX_RUNTIME) { assignmentSuite = MEDIUM_SUITE; } else { assignmentSuite = LARGE_SUITE; } } // Clear mStartTime so that we can verify that it gets set next time. mStartTime = -1; mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, test.getClass().getName() + "#" + ((TestCase) test).getName() + "\nin " + assignmentSuite + " suite\nrunTime: " + String.valueOf(runTime) + "\n"); mTestResult.putFloat(REPORT_KEY_RUN_TIME, runTime); mTestResult.putString(REPORT_KEY_SUITE_ASSIGNMENT, assignmentSuite); sendStatus(0, mTestResult); } } /** * This class sends status reports back to the IInstrumentationWatcher */ private class WatcherResultPrinter implements TestListener, PerformanceResultsWriter { private final Bundle mResultTemplate; Bundle mTestResult; int mTestNum = 0; int mTestResultCode = 0; String mTestClass = null; PerformanceCollector mPerfCollector = new PerformanceCollector(); boolean mIsTimedTest = false; boolean mIncludeDetailedStats = false; public WatcherResultPrinter(int numTests) { mResultTemplate = new Bundle(); mResultTemplate.putString(Instrumentation.REPORT_KEY_IDENTIFIER, REPORT_VALUE_ID); mResultTemplate.putInt(REPORT_KEY_NUM_TOTAL, numTests); } /** * send a status for the start of a each test, so long tests can be seen * as "running" */ public void startTest(Test test) { String testClass = test.getClass().getName(); String testName = ((TestCase)test).getName(); mTestResult = new Bundle(mResultTemplate); mTestResult.putString(REPORT_KEY_NAME_CLASS, testClass); mTestResult.putString(REPORT_KEY_NAME_TEST, testName); mTestResult.putInt(REPORT_KEY_NUM_CURRENT, ++mTestNum); // pretty printing if (testClass != null && !testClass.equals(mTestClass)) { mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, String.format("\n%s:", testClass)); mTestClass = testClass; } else { mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, ""); } Method testMethod = null; try { testMethod = test.getClass().getMethod(testName); // Report total number of iterations, if test is repetitive if (testMethod.isAnnotationPresent(RepetitiveTest.class)) { int numIterations = testMethod.getAnnotation( RepetitiveTest.class).numIterations(); mTestResult.putInt(REPORT_KEY_NUM_ITERATIONS, numIterations); } } catch (NoSuchMethodException e) { // ignore- the test with given name does not exist. Will be handled during test // execution } // The delay_msec parameter is normally used to provide buffers of idle time // for power measurement purposes. To make sure there is a delay before and after // every test in a suite, we delay *after* every test (see endTest below) and also // delay *before* the first test. So, delay test1 delay test2 delay. try { if (mTestNum == 1) Thread.sleep(mDelayMsec); } catch (InterruptedException e) { throw new IllegalStateException(e); } sendStatus(REPORT_VALUE_RESULT_START, mTestResult); mTestResultCode = 0; mIsTimedTest = false; mIncludeDetailedStats = false; try { // Look for TimedTest annotation on both test class and test method if (testMethod != null && testMethod.isAnnotationPresent(TimedTest.class)) { mIsTimedTest = true; mIncludeDetailedStats = testMethod.getAnnotation( TimedTest.class).includeDetailedStats(); } else if (test.getClass().isAnnotationPresent(TimedTest.class)) { mIsTimedTest = true; mIncludeDetailedStats = test.getClass().getAnnotation( TimedTest.class).includeDetailedStats(); } } catch (SecurityException e) { // ignore - the test with given name cannot be accessed. Will be handled during // test execution } if (mIsTimedTest && mIncludeDetailedStats) { mPerfCollector.beginSnapshot(""); } else if (mIsTimedTest) { mPerfCollector.startTiming(""); } } /** * @see junit.framework.TestListener#addError(Test, Throwable) */ public void addError(Test test, Throwable t) { mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); mTestResultCode = REPORT_VALUE_RESULT_ERROR; // pretty printing mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, String.format("\nError in %s:\n%s", ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t))); } /** * @see junit.framework.TestListener#addFailure(Test, AssertionFailedError) */ public void addFailure(Test test, AssertionFailedError t) { mTestResult.putString(REPORT_KEY_STACK, BaseTestRunner.getFilteredTrace(t)); mTestResultCode = REPORT_VALUE_RESULT_FAILURE; // pretty printing mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, String.format("\nFailure in %s:\n%s", ((TestCase)test).getName(), BaseTestRunner.getFilteredTrace(t))); } /** * @see junit.framework.TestListener#endTest(Test) */ public void endTest(Test test) { if (mIsTimedTest && mIncludeDetailedStats) { mTestResult.putAll(mPerfCollector.endSnapshot()); } else if (mIsTimedTest) { writeStopTiming(mPerfCollector.stopTiming("")); } if (mTestResultCode == 0) { mTestResult.putString(Instrumentation.REPORT_KEY_STREAMRESULT, "."); } sendStatus(mTestResultCode, mTestResult); try { // Sleep after every test, if specified Thread.sleep(mDelayMsec); } catch (InterruptedException e) { throw new IllegalStateException(e); } } public void writeBeginSnapshot(String label) { // Do nothing } public void writeEndSnapshot(Bundle results) { // Copy all snapshot data fields into mResults, which is outputted // via Instrumentation.finish mResults.putAll(results); } public void writeStartTiming(String label) { // Do nothing } public void writeStopTiming(Bundle results) { // Copy results into mTestResult by flattening list of iterations, // which is outputted via WatcherResultPrinter.endTest int i = 0; for (Parcelable p : results.getParcelableArrayList(PerformanceCollector.METRIC_KEY_ITERATIONS)) { Bundle iteration = (Bundle)p; String index = "iteration" + i + "."; mTestResult.putString(index + PerformanceCollector.METRIC_KEY_LABEL, iteration.getString(PerformanceCollector.METRIC_KEY_LABEL)); mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_CPU_TIME, iteration.getLong(PerformanceCollector.METRIC_KEY_CPU_TIME)); mTestResult.putLong(index + PerformanceCollector.METRIC_KEY_EXECUTION_TIME, iteration.getLong(PerformanceCollector.METRIC_KEY_EXECUTION_TIME)); i++; } } public void writeMeasurement(String label, long value) { mTestResult.putLong(label, value); } public void writeMeasurement(String label, float value) { mTestResult.putFloat(label, value); } public void writeMeasurement(String label, String value) { mTestResult.putString(label, value); } // TODO report the end of the cycle } }
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.dialogflow.cx.v3; import static com.google.cloud.dialogflow.cx.v3.ChangelogsClient.ListChangelogsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Timestamp; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.UUID; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class ChangelogsClientTest { private static MockChangelogs mockChangelogs; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private ChangelogsClient client; @BeforeClass public static void startStaticServer() { mockChangelogs = new MockChangelogs(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockChangelogs)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); ChangelogsSettings settings = ChangelogsSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = ChangelogsClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void listChangelogsTest() throws Exception { Changelog responsesElement = Changelog.newBuilder().build(); ListChangelogsResponse expectedResponse = ListChangelogsResponse.newBuilder() .setNextPageToken("") .addAllChangelogs(Arrays.asList(responsesElement)) .build(); mockChangelogs.addResponse(expectedResponse); AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]"); ListChangelogsPagedResponse pagedListResponse = client.listChangelogs(parent); List<Changelog> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getChangelogsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockChangelogs.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListChangelogsRequest actualRequest = ((ListChangelogsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listChangelogsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockChangelogs.addException(exception); try { AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]"); client.listChangelogs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listChangelogsTest2() throws Exception { Changelog responsesElement = Changelog.newBuilder().build(); ListChangelogsResponse expectedResponse = ListChangelogsResponse.newBuilder() .setNextPageToken("") .addAllChangelogs(Arrays.asList(responsesElement)) .build(); mockChangelogs.addResponse(expectedResponse); String parent = "parent-995424086"; ListChangelogsPagedResponse pagedListResponse = client.listChangelogs(parent); List<Changelog> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals(expectedResponse.getChangelogsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockChangelogs.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListChangelogsRequest actualRequest = ((ListChangelogsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listChangelogsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockChangelogs.addException(exception); try { String parent = "parent-995424086"; client.listChangelogs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getChangelogTest() throws Exception { Changelog expectedResponse = Changelog.newBuilder() .setName( ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString()) .setUserEmail("userEmail315299473") .setDisplayName("displayName1714148973") .setAction("action-1422950858") .setType("type3575610") .setResource("resource-341064690") .setCreateTime(Timestamp.newBuilder().build()) .build(); mockChangelogs.addResponse(expectedResponse); ChangelogName name = ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]"); Changelog actualResponse = client.getChangelog(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockChangelogs.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetChangelogRequest actualRequest = ((GetChangelogRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getChangelogExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockChangelogs.addException(exception); try { ChangelogName name = ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]"); client.getChangelog(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getChangelogTest2() throws Exception { Changelog expectedResponse = Changelog.newBuilder() .setName( ChangelogName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[CHANGELOG]").toString()) .setUserEmail("userEmail315299473") .setDisplayName("displayName1714148973") .setAction("action-1422950858") .setType("type3575610") .setResource("resource-341064690") .setCreateTime(Timestamp.newBuilder().build()) .build(); mockChangelogs.addResponse(expectedResponse); String name = "name3373707"; Changelog actualResponse = client.getChangelog(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockChangelogs.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetChangelogRequest actualRequest = ((GetChangelogRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getChangelogExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockChangelogs.addException(exception); try { String name = "name3373707"; client.getChangelog(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
/* * Copyright 2019 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.engine.module; import com.google.common.collect.Sets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.terasology.assets.Asset; import org.terasology.config.Config; import org.terasology.config.SystemConfig; import org.terasology.engine.TerasologyConstants; import org.terasology.engine.paths.PathManager; import org.terasology.module.ClasspathModule; import org.terasology.module.DependencyInfo; import org.terasology.module.Module; import org.terasology.module.ModuleEnvironment; import org.terasology.module.ModuleLoader; import org.terasology.module.ModuleMetadata; import org.terasology.module.ModuleMetadataJsonAdapter; import org.terasology.module.ModulePathScanner; import org.terasology.module.ModuleRegistry; import org.terasology.module.TableModuleRegistry; import org.terasology.module.sandbox.APIScanner; import org.terasology.module.sandbox.BytecodeInjector; import org.terasology.module.sandbox.ModuleSecurityManager; import org.terasology.module.sandbox.ModuleSecurityPolicy; import org.terasology.module.sandbox.PermissionProviderFactory; import org.terasology.module.sandbox.StandardPermissionProviderFactory; import org.terasology.module.sandbox.WarnOnlyProviderFactory; import org.terasology.naming.Name; import org.terasology.nui.UIWidget; import org.terasology.reflection.TypeRegistry; import java.io.IOException; import java.io.InputStreamReader; import java.io.Reader; import java.lang.reflect.ReflectPermission; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.security.Policy; import java.util.ArrayList; import java.util.Collections; import java.util.Enumeration; import java.util.List; import java.util.Set; import java.util.stream.Collectors; public class ModuleManagerImpl implements ModuleManager { private static final Logger logger = LoggerFactory.getLogger(ModuleManagerImpl.class); private StandardPermissionProviderFactory permissionProviderFactory = new StandardPermissionProviderFactory(); private PermissionProviderFactory wrappingPermissionProviderFactory = new WarnOnlyProviderFactory(permissionProviderFactory); private ModuleRegistry registry; private ModuleEnvironment environment; private ModuleMetadataJsonAdapter metadataReader; private ModuleInstallManager installManager; public ModuleManagerImpl(String masterServerAddress) { this(masterServerAddress, Collections.emptyList()); } public ModuleManagerImpl(String masterServerAddress, List<Class<?>> classesOnClasspathsToAddToEngine) { metadataReader = new ModuleMetadataJsonAdapter(); for (ModuleExtension ext : StandardModuleExtension.values()) { metadataReader.registerExtension(ext.getKey(), ext.getValueType()); } for (ModuleExtension ext : ExtraDataModuleExtension.values()) { metadataReader.registerExtension(ext.getKey(), ext.getValueType()); } Module engineModule; try (Reader reader = new InputStreamReader(getClass().getResourceAsStream("/engine-module.txt"), TerasologyConstants.CHARSET)) { ModuleMetadata metadata = metadataReader.read(reader); List<Class<?>> additionalClassesList = new ArrayList<>(classesOnClasspathsToAddToEngine.size() + 2); additionalClassesList.add(Module.class); // provide access to gestalt-module.jar additionalClassesList.add(Asset.class); // provide access to gestalt-asset-core.jar additionalClassesList.add(UIWidget.class); // provide access to nui.jar additionalClassesList.add(TypeRegistry.class); // provide access to nui-reflect.jar additionalClassesList.addAll(classesOnClasspathsToAddToEngine); // provide access to any facade-provided classes Class<?>[] additionalClassesArray = new Class[additionalClassesList.size()]; additionalClassesArray = additionalClassesList.toArray(additionalClassesArray); engineModule = ClasspathModule.create(metadata, getClass(), additionalClassesArray); } catch (IOException e) { throw new RuntimeException("Failed to read engine metadata", e); } catch (URISyntaxException e) { throw new RuntimeException("Failed to convert engine library location to path", e); } registry = new TableModuleRegistry(); registry.add(engineModule); loadModulesFromClassPath(); ModulePathScanner scanner = new ModulePathScanner(new ModuleLoader(metadataReader)); scanner.getModuleLoader().setModuleInfoPath(TerasologyConstants.MODULE_INFO_FILENAME); scanner.scan(registry, PathManager.getInstance().getModulePaths()); DependencyInfo engineDep = new DependencyInfo(); engineDep.setId(engineModule.getId()); engineDep.setMinVersion(engineModule.getVersion()); engineDep.setMaxVersion(engineModule.getVersion().getNextPatchVersion()); registry.stream().filter(mod -> mod != engineModule).forEach(mod -> mod.getMetadata().getDependencies().add(engineDep)); setupSandbox(); loadEnvironment(Sets.newHashSet(engineModule), true); installManager = new ModuleInstallManager(this, masterServerAddress); } public ModuleManagerImpl(Config config) { this(config, Collections.emptyList()); } public ModuleManagerImpl(Config config, List<Class<?>> classesOnClasspathsToAddToEngine) { this(config.getNetwork().getMasterServer(), classesOnClasspathsToAddToEngine); } /** * Overrides modules in modules/ with those specified via -classpath in the JVM */ private void loadModulesFromClassPath() { ClassLoader classLoader = ClassLoader.getSystemClassLoader(); ModuleLoader loader = new ModuleLoader(metadataReader); Enumeration<URL> moduleInfosInClassPath; loader.setModuleInfoPath(TerasologyConstants.MODULE_INFO_FILENAME); // We're looking for jars on the classpath with a module.txt try { moduleInfosInClassPath = classLoader.getResources(TerasologyConstants.MODULE_INFO_FILENAME.toString()); } catch (IOException e) { logger.warn("Failed to search for classpath modules: {}", e); return; } for (URL url : Collections.list(moduleInfosInClassPath)) { if (!url.getProtocol().equalsIgnoreCase("jar")) { continue; } try (Reader reader = new InputStreamReader(url.openStream(), TerasologyConstants.CHARSET)) { ModuleMetadata metaData = metadataReader.read(reader); String displayName = metaData.getDisplayName().toString(); Name id = metaData.getId(); // if the display name is empty or the id is null, this probably isn't a Terasology module if (null == id || displayName.equalsIgnoreCase("")) { logger.warn("Found a module-like JAR on the class path with no id or display name. Skipping"); logger.warn("{}", url); continue; } logger.info("Loading module {} from class path at {}", displayName, url.getFile()); // the url contains a protocol, and points to the module.txt // we need to trim both of those away to get the module's path String targetFile = url.getFile() .replace("file:", "") .replace("!/" + TerasologyConstants.MODULE_INFO_FILENAME, "") .replace("/" + TerasologyConstants.MODULE_INFO_FILENAME, ""); // Windows specific check - Path doesn't like /C:/... style Strings indicating files if (targetFile.matches("/[a-zA-Z]:.*")) { targetFile = targetFile.substring(1); } Path path = Paths.get(targetFile); Module module = loader.load(path); registry.add(module); } catch (IOException e) { logger.warn("Failed to load module.txt for classpath module {}", url); } } } private void setupSandbox() { ExternalApiWhitelist.CLASSES.stream().forEach(clazz -> permissionProviderFactory.getBasePermissionSet().addAPIClass(clazz)); ExternalApiWhitelist.PACKAGES.stream().forEach(packagee -> permissionProviderFactory.getBasePermissionSet().addAPIPackage(packagee)); APIScanner apiScanner = new APIScanner(permissionProviderFactory); registry.stream().filter(Module::isOnClasspath).forEach(apiScanner::scan); permissionProviderFactory.getBasePermissionSet().grantPermission("com.google.gson", ReflectPermission.class); permissionProviderFactory.getBasePermissionSet().grantPermission("com.google.gson.internal", ReflectPermission.class); Policy.setPolicy(new ModuleSecurityPolicy()); System.setSecurityManager(new ModuleSecurityManager()); } @Override public ModuleRegistry getRegistry() { return registry; } @Override public ModuleInstallManager getInstallManager() { return installManager; } @Override public ModuleEnvironment getEnvironment() { return environment; } @Override public ModuleEnvironment loadEnvironment(Set<Module> modules, boolean asPrimary) { Set<Module> finalModules = Sets.newLinkedHashSet(modules); finalModules.addAll(registry.stream().filter(Module::isOnClasspath).collect(Collectors.toList())); ModuleEnvironment newEnvironment; boolean permissiveSecurityEnabled = Boolean.parseBoolean(System.getProperty(SystemConfig.PERMISSIVE_SECURITY_ENABLED_PROPERTY)); if (permissiveSecurityEnabled) { newEnvironment = new ModuleEnvironment(finalModules, wrappingPermissionProviderFactory, Collections.<BytecodeInjector>emptyList()); } else { newEnvironment = new ModuleEnvironment(finalModules, permissionProviderFactory, Collections.<BytecodeInjector>emptyList()); } if (asPrimary) { environment = newEnvironment; } return newEnvironment; } @Override public ModuleMetadataJsonAdapter getModuleMetadataReader() { return metadataReader; } }
package com.artificial.developmentkit; import com.artificial.cachereader.fs.CacheSystem; import com.artificial.cachereader.wrappers.Dynamic; import com.artificial.cachereader.wrappers.Script; import com.artificial.cachereader.wrappers.Wrapper; import com.artificial.cachereader.wrappers.WrapperLoader; import javax.swing.*; import javax.swing.event.MenuEvent; import javax.swing.event.MenuListener; import javax.swing.table.DefaultTableModel; import java.awt.*; import java.awt.event.KeyAdapter; import java.awt.event.KeyEvent; import java.util.*; import java.util.List; class DevelopmentKit extends JFrame { private final CacheSystem cache; private final TypeLoader[] typeLoaders; private static final List<Wrapper<?>> CACHED_DEFINITIONS = new ArrayList<>(); private static final List<String> CACHED_DISPLAY_NAMES = new ArrayList<>(); private static final int LOAD_LIMIT = 175000; private final LoadedChildrenFrame loadedChildrenFrame = new LoadedChildrenFrame(); private ComponentExplorer componentExplorer = null; private TypeLoader currentLoader = null; private Wrapper<?> selectedDefinition = null; DevelopmentKit(final CacheSystem cache, final TypeLoader[] typeLoaders) { this.cache = cache; this.typeLoaders = typeLoaders; initMenuBar(); initComponents(); /*initial load*/ currentLoader = typeLoaders[0]; loadDefinitions(currentLoader); } private void initMenuBar() { final JMenuBar menuBar = new JMenuBar(); final JMenu explorerItem = new JMenu("Component Explorer"); explorerItem.addMenuListener(new MenuListener() { @Override public void menuSelected(MenuEvent e) { if (componentExplorer == null) { componentExplorer = new ComponentExplorer(cache); } componentExplorer.setVisible(true); componentExplorer.requestFocus(); } @Override public void menuDeselected(MenuEvent e) { } @Override public void menuCanceled(MenuEvent e) { } }); explorerItem.setPopupMenuVisible(false); menuBar.add(explorerItem); this.setJMenuBar(menuBar); } private void filteredObjectsListValueChanged() { final String selected = filteredObjectsList.getSelectedValue(); if (selected != null) { final int id = Integer.parseInt(selected.substring(selected.lastIndexOf("(") + 1, selected.lastIndexOf(")"))); loadAttributes(id); } } private void loadAttributes(final int id) { for (final Wrapper<?> def : CACHED_DEFINITIONS) { if (def.id() == id) { selectedDefinition = def; break; } } if (selectedDefinition == null) { return; } final List<Object[]> properties = new LinkedList<>(); for (final Map.Entry<String, Object> entry : selectedDefinition.getDeclaredFields().entrySet()) { properties.add(new Object[]{entry.getKey(), entry.getValue()}); } attributesTable.setModel(new DefaultTableModel(properties.toArray(new Object[properties.size()][2]), new Object[]{"Field", "Value"})); showChildrenButton.setEnabled(selectedDefinition instanceof Dynamic && ((Dynamic) selectedDefinition).dynamic()); } private void loaderComboActionPerformed() { final TypeLoader selected = (TypeLoader) loaderCombo.getSelectedItem(); /*to prevent loading the same definitions again*/ if (selected != currentLoader) { loadDefinitions(selected); currentLoader = selected; } } private void loadDefinitions(final TypeLoader typeLoader) { CACHED_DEFINITIONS.clear(); CACHED_DISPLAY_NAMES.clear(); final WrapperLoader<?, ?> loader = cache.getLoader(typeLoader.getWrapperClass()); final DefaultListModel<String> listModel = new DefaultListModel<>(); for (int i = 0; i < LOAD_LIMIT; i++) { if (loader.canLoad(i)) { final Wrapper<?> def; CACHED_DEFINITIONS.add(def = loader.load(i)); final StringBuilder builder = new StringBuilder(); if (def.getDeclaredFields().containsKey("name")) { builder.append(def.getDeclaredFields().get("name")).append(" (").append(i).append(")"); } else { builder.append(i); } listModel.addElement(builder.toString()); CACHED_DISPLAY_NAMES.add(builder.toString()); } } filteredObjectsList.setModel(listModel); } private void filterTextFieldKeyReleased() { filteredObjectsList.clearSelection(); final DefaultListModel<String> listModel = new DefaultListModel<>(); final List<String> arr = filterObjects(filterTextField.getText()); arr.forEach(listModel::addElement); filteredObjectsList.setModel(listModel); } private List<String> filterObjects(String text) { final List<String> list = new ArrayList<>(); text = text.toLowerCase(); for (final String i : CACHED_DISPLAY_NAMES) { if (i.toLowerCase().contains(text)) { list.add(i); } } return list; } private void showChildrenButtonActionPerformed() { if (loadedChildrenFrame.isVisible()) { loadedChildrenFrame.requestFocus(); return; } final Dynamic object = (Dynamic) selectedDefinition; if (object.dynamic()) { final WrapperLoader<?, ?> defLoader = cache.getLoader(currentLoader.getWrapperClass()); final List<Object[]> children = new LinkedList<>(); final int[] childrenIds = object.childrenIds(); for (int i = 0; i < childrenIds.length; i++) { final int id = childrenIds[i]; if (id == -1 || !defLoader.canLoad(id)) continue; final Wrapper<?> def = defLoader.load(id); children.add(new Object[]{i, id, def.getDeclaredFields().get("name"), def.getDeclaredFields().get("actions")}); } final int configId = object.configId(); final int scriptId = object.scriptId(); String varpString = configToString(configId); if (scriptId != -1) { final WrapperLoader<?, ?> scriptLoader = cache.getLoader(currentLoader.getScriptClass()); if (!scriptLoader.canLoad(scriptId)) { loadedChildrenFrame.loadChildren("Cache does not contain script definition for " + scriptId, Collections.emptyList()); return; } final Script scriptDef = (Script) scriptLoader.load(scriptId); final int varp = scriptDef.configId(); final int lowerBitIndex = scriptDef.lowerBitIndex(); final int upperBitIndex = scriptDef.upperBitIndex(); varpString = scriptToString(varp, lowerBitIndex, upperBitIndex); } loadedChildrenFrame.loadChildren( varpString, children ); loadedChildrenFrame.setVisible(true); } } private static String scriptToString(final int varp, final int lowerBitIndex, final int upperBitIndex) { return "ctx.varpbits.varpbit(" + varp + ", " + lowerBitIndex + ", 0x" + Integer.toHexString(getMask(upperBitIndex, lowerBitIndex)) + ");"; } private static String configToString(final int varp) { return "ctx.varpbits.varpbit(" + varp + ");"; } private static int getMask(final int upperBitIndex, final int lowerBitIndex) { return (1 << (upperBitIndex - lowerBitIndex) + 1) - 1; } private void initComponents() { // JFormDesigner - Component initialization - DO NOT MODIFY //GEN-BEGIN:initComponents panel1 = new JPanel(); panel3 = new JPanel(); panel4 = new JPanel(); scrollPane1 = new JScrollPane(); filteredObjectsList = new JList<>(); filterTextField = new JTextField(); panel2 = new JPanel(); showChildrenButton = new JButton(); loaderCombo = new JComboBox<TypeLoader>(new DefaultComboBoxModel<TypeLoader>(typeLoaders)); panel5 = new JPanel(); scrollPane2 = new JScrollPane(); attributesTable = new JTable(); //======== this ======== setTitle("Development Kit by Artificial"); setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); setName("this"); setAlwaysOnTop(true); Container contentPane = getContentPane(); contentPane.setLayout(new BoxLayout(contentPane, BoxLayout.Y_AXIS)); //======== panel1 ======== { panel1.setName("panel1"); panel1.setLayout(new BoxLayout(panel1, BoxLayout.X_AXIS)); //======== panel3 ======== { panel3.setName("panel3"); panel3.setLayout(new BoxLayout(panel3, BoxLayout.Y_AXIS)); //======== panel4 ======== { panel4.setName("panel4"); panel4.setLayout(new BoxLayout(panel4, BoxLayout.Y_AXIS)); //======== scrollPane1 ======== { scrollPane1.setPreferredSize(new Dimension(250, 350)); scrollPane1.setName("scrollPane1"); //---- filteredObjectsList ---- filteredObjectsList.setName("filteredObjectsList"); filteredObjectsList.addListSelectionListener(e -> filteredObjectsListValueChanged()); scrollPane1.setViewportView(filteredObjectsList); } panel4.add(scrollPane1); } panel3.add(panel4); //---- filterTextField ---- filterTextField.setName("filterTextField"); filterTextField.addKeyListener(new KeyAdapter() { @Override public void keyReleased(KeyEvent e) { filterTextFieldKeyReleased(); } }); panel3.add(filterTextField); //======== panel2 ======== { panel2.setName("panel2"); panel2.setLayout(new BoxLayout(panel2, BoxLayout.X_AXIS)); //---- showChildrenButton ---- showChildrenButton.setText("Show Children"); showChildrenButton.setEnabled(false); showChildrenButton.setName("showChildrenButton"); showChildrenButton.addActionListener(e -> showChildrenButtonActionPerformed()); panel2.add(showChildrenButton); //---- loaderCombo ---- loaderCombo.setName("loaderCombo"); loaderCombo.addActionListener(e -> loaderComboActionPerformed()); panel2.add(loaderCombo); } panel3.add(panel2); } panel1.add(panel3); //======== panel5 ======== { panel5.setName("panel5"); panel5.setLayout(new BorderLayout()); //======== scrollPane2 ======== { scrollPane2.setName("scrollPane2"); //---- attributesTable ---- attributesTable.setName("attributesTable"); scrollPane2.setViewportView(attributesTable); } panel5.add(scrollPane2, BorderLayout.CENTER); } panel1.add(panel5); } contentPane.add(panel1); pack(); setLocationRelativeTo(getOwner()); // JFormDesigner - End of component initialization //GEN-END:initComponents } // JFormDesigner - Variables declaration - DO NOT MODIFY //GEN-BEGIN:variables private JPanel panel1; private JPanel panel3; private JPanel panel4; private JScrollPane scrollPane1; private JList<String> filteredObjectsList; private JTextField filterTextField; private JPanel panel2; private JButton showChildrenButton; private JComboBox<TypeLoader> loaderCombo; private JPanel panel5; private JScrollPane scrollPane2; private JTable attributesTable; // JFormDesigner - End of variables declaration //GEN-END:variables }
/* * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package java.security; import java.security.spec.AlgorithmParameterSpec; import java.util.*; import java.io.*; import java.nio.ByteBuffer; import sun.security.jca.JCAUtil; /** * This class defines the <i>Service Provider Interface</i> (<b>SPI</b>) * for the {@code Signature} class, which is used to provide the * functionality of a digital signature algorithm. Digital signatures are used * for authentication and integrity assurance of digital data. *. * <p> All the abstract methods in this class must be implemented by each * cryptographic service provider who wishes to supply the implementation * of a particular signature algorithm. * * @author Benjamin Renaud * * * @see Signature */ public abstract class SignatureSpi { /** * Application-specified source of randomness. */ protected SecureRandom appRandom = null; /** * Initializes this signature object with the specified * public key for verification operations. * * @param publicKey the public key of the identity whose signature is * going to be verified. * * @exception InvalidKeyException if the key is improperly * encoded, parameters are missing, and so on. */ protected abstract void engineInitVerify(PublicKey publicKey) throws InvalidKeyException; /** * Initializes this signature object with the specified * private key for signing operations. * * @param privateKey the private key of the identity whose signature * will be generated. * * @exception InvalidKeyException if the key is improperly * encoded, parameters are missing, and so on. */ protected abstract void engineInitSign(PrivateKey privateKey) throws InvalidKeyException; /** * Initializes this signature object with the specified * private key and source of randomness for signing operations. * * <p>This concrete method has been added to this previously-defined * abstract class. (For backwards compatibility, it cannot be abstract.) * * @param privateKey the private key of the identity whose signature * will be generated. * @param random the source of randomness * * @exception InvalidKeyException if the key is improperly * encoded, parameters are missing, and so on. */ protected void engineInitSign(PrivateKey privateKey, SecureRandom random) throws InvalidKeyException { this.appRandom = random; engineInitSign(privateKey); } /** * Updates the data to be signed or verified * using the specified byte. * * @param b the byte to use for the update. * * @exception SignatureException if the engine is not initialized * properly. */ protected abstract void engineUpdate(byte b) throws SignatureException; /** * Updates the data to be signed or verified, using the * specified array of bytes, starting at the specified offset. * * @param b the array of bytes * @param off the offset to start from in the array of bytes * @param len the number of bytes to use, starting at offset * * @exception SignatureException if the engine is not initialized * properly */ protected abstract void engineUpdate(byte[] b, int off, int len) throws SignatureException; /** * Updates the data to be signed or verified using the specified * ByteBuffer. Processes the {@code data.remaining()} bytes * starting at at {@code data.position()}. * Upon return, the buffer's position will be equal to its limit; * its limit will not have changed. * * @param input the ByteBuffer * @since 1.5 */ protected void engineUpdate(ByteBuffer input) { if (input.hasRemaining() == false) { return; } try { if (input.hasArray()) { byte[] b = input.array(); int ofs = input.arrayOffset(); int pos = input.position(); int lim = input.limit(); engineUpdate(b, ofs + pos, lim - pos); input.position(lim); } else { int len = input.remaining(); byte[] b = new byte[JCAUtil.getTempArraySize(len)]; while (len > 0) { int chunk = Math.min(len, b.length); input.get(b, 0, chunk); engineUpdate(b, 0, chunk); len -= chunk; } } } catch (SignatureException e) { // is specified to only occur when the engine is not initialized // this case should never occur as it is caught in Signature.java throw new ProviderException("update() failed", e); } } /** * Returns the signature bytes of all the data * updated so far. * The format of the signature depends on the underlying * signature scheme. * * @return the signature bytes of the signing operation's result. * * @exception SignatureException if the engine is not * initialized properly or if this signature algorithm is unable to * process the input data provided. */ protected abstract byte[] engineSign() throws SignatureException; /** * Finishes this signature operation and stores the resulting signature * bytes in the provided buffer {@code outbuf}, starting at * {@code offset}. * The format of the signature depends on the underlying * signature scheme. * * <p>The signature implementation is reset to its initial state * (the state it was in after a call to one of the * {@code engineInitSign} methods) * and can be reused to generate further signatures with the same private * key. * * This method should be abstract, but we leave it concrete for * binary compatibility. Knowledgeable providers should override this * method. * * @param outbuf buffer for the signature result. * * @param offset offset into {@code outbuf} where the signature is * stored. * * @param len number of bytes within {@code outbuf} allotted for the * signature. * Both this default implementation and the SUN provider do not * return partial digests. If the value of this parameter is less * than the actual signature length, this method will throw a * SignatureException. * This parameter is ignored if its value is greater than or equal to * the actual signature length. * * @return the number of bytes placed into {@code outbuf} * * @exception SignatureException if the engine is not * initialized properly, if this signature algorithm is unable to * process the input data provided, or if {@code len} is less * than the actual signature length. * * @since 1.2 */ protected int engineSign(byte[] outbuf, int offset, int len) throws SignatureException { byte[] sig = engineSign(); if (len < sig.length) { throw new SignatureException ("partial signatures not returned"); } if (outbuf.length - offset < sig.length) { throw new SignatureException ("insufficient space in the output buffer to store the " + "signature"); } System.arraycopy(sig, 0, outbuf, offset, sig.length); return sig.length; } /** * Verifies the passed-in signature. * * @param sigBytes the signature bytes to be verified. * * @return true if the signature was verified, false if not. * * @exception SignatureException if the engine is not * initialized properly, the passed-in signature is improperly * encoded or of the wrong type, if this signature algorithm is unable to * process the input data provided, etc. */ protected abstract boolean engineVerify(byte[] sigBytes) throws SignatureException; /** * Verifies the passed-in signature in the specified array * of bytes, starting at the specified offset. * * <p> Note: Subclasses should overwrite the default implementation. * * * @param sigBytes the signature bytes to be verified. * @param offset the offset to start from in the array of bytes. * @param length the number of bytes to use, starting at offset. * * @return true if the signature was verified, false if not. * * @exception SignatureException if the engine is not * initialized properly, the passed-in signature is improperly * encoded or of the wrong type, if this signature algorithm is unable to * process the input data provided, etc. * @since 1.4 */ protected boolean engineVerify(byte[] sigBytes, int offset, int length) throws SignatureException { byte[] sigBytesCopy = new byte[length]; System.arraycopy(sigBytes, offset, sigBytesCopy, 0, length); return engineVerify(sigBytesCopy); } /** * Sets the specified algorithm parameter to the specified * value. This method supplies a general-purpose mechanism through * which it is possible to set the various parameters of this object. * A parameter may be any settable parameter for the algorithm, such as * a parameter size, or a source of random bits for signature generation * (if appropriate), or an indication of whether or not to perform * a specific but optional computation. A uniform algorithm-specific * naming scheme for each parameter is desirable but left unspecified * at this time. * * @param param the string identifier of the parameter. * * @param value the parameter value. * * @exception InvalidParameterException if {@code param} is an * invalid parameter for this signature algorithm engine, * the parameter is already set * and cannot be set again, a security exception occurs, and so on. * * @deprecated Replaced by {@link * #engineSetParameter(java.security.spec.AlgorithmParameterSpec) * engineSetParameter}. */ @Deprecated protected abstract void engineSetParameter(String param, Object value) throws InvalidParameterException; /** * <p>This method is overridden by providers to initialize * this signature engine with the specified parameter set. * * @param params the parameters * * @exception UnsupportedOperationException if this method is not * overridden by a provider * * @exception InvalidAlgorithmParameterException if this method is * overridden by a provider and the given parameters * are inappropriate for this signature engine */ protected void engineSetParameter(AlgorithmParameterSpec params) throws InvalidAlgorithmParameterException { throw new UnsupportedOperationException(); } /** * <p>This method is overridden by providers to return the * parameters used with this signature engine, or null * if this signature engine does not use any parameters. * * <p>The returned parameters may be the same that were used to initialize * this signature engine, or may contain a combination of default and * randomly generated parameter values used by the underlying signature * implementation if this signature engine requires algorithm parameters * but was not initialized with any. * * @return the parameters used with this signature engine, or null if this * signature engine does not use any parameters * * @exception UnsupportedOperationException if this method is * not overridden by a provider * @since 1.4 */ protected AlgorithmParameters engineGetParameters() { throw new UnsupportedOperationException(); } /** * Gets the value of the specified algorithm parameter. * This method supplies a general-purpose mechanism through which it * is possible to get the various parameters of this object. A parameter * may be any settable parameter for the algorithm, such as a parameter * size, or a source of random bits for signature generation (if * appropriate), or an indication of whether or not to perform a * specific but optional computation. A uniform algorithm-specific * naming scheme for each parameter is desirable but left unspecified * at this time. * * @param param the string name of the parameter. * * @return the object that represents the parameter value, or null if * there is none. * * @exception InvalidParameterException if {@code param} is an * invalid parameter for this engine, or another exception occurs while * trying to get this parameter. * * @deprecated */ @Deprecated protected abstract Object engineGetParameter(String param) throws InvalidParameterException; /** * Returns a clone if the implementation is cloneable. * * @return a clone if the implementation is cloneable. * * @exception CloneNotSupportedException if this is called * on an implementation that does not support {@code Cloneable}. */ public Object clone() throws CloneNotSupportedException { if (this instanceof Cloneable) { return super.clone(); } else { throw new CloneNotSupportedException(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.pig.test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; import java.util.Set; import org.junit.Assert; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.pig.LoadFunc; import org.apache.pig.impl.logicalLayer.FrontendException; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; public class TestLoadFunc { private static Path curHdfsDir; private static String curHdfsRoot = "hdfs://localhost.localdomain:12345"; @BeforeClass public static void setUpBeforeClass() throws Exception { curHdfsDir = new Path(curHdfsRoot + "/user/pig/"); } @AfterClass public static void tearDownAfterClass() throws Exception { } @Before public void setUp() throws Exception { } @After public void tearDown() throws Exception { } @Test public void testGetAbsolutePath() throws IOException { // test case: simple absolute path Assert.assertEquals("/hadoop/test/passwd", LoadFunc.getAbsolutePath("/hadoop/test/passwd", curHdfsDir)); } @Test public void testGetAbsolutePath2() throws IOException { // test case: simple relative path Assert.assertEquals(curHdfsRoot + "/user/pig/data/passwd", LoadFunc.getAbsolutePath("data/passwd", curHdfsDir)); } @Test public void testGetAbsolutePath3() throws IOException { // test case: remote hdfs path String absPath = "hdfs://myhost.mydomain:37765/data/passwd"; Assert.assertEquals(absPath, LoadFunc.getAbsolutePath(absPath, curHdfsDir)); } @Test public void testGetAbsolutePath4() throws IOException { // test case: non dfs scheme Assert.assertEquals("http://myhost:12345/data/passwd", LoadFunc.getAbsolutePath("http://myhost:12345/data/passwd", curHdfsDir)); } @Test public void testCommaSeparatedString() throws Exception { // test case: comma separated absolute paths Assert.assertEquals("/usr/pig/a,/usr/pig/b,/usr/pig/c", LoadFunc.getAbsolutePath("/usr/pig/a,/usr/pig/b,/usr/pig/c", curHdfsDir)); } @Test public void testCommaSeparatedString2() throws Exception { // test case: comma separated relative paths Assert.assertEquals(curHdfsRoot + "/user/pig/t?s*," + curHdfsRoot + "/user/pig/test", LoadFunc.getAbsolutePath("t?s*,test", curHdfsDir)); } @Test public void testCommaSeparatedString4() throws Exception { // test case: comma separated paths with hadoop glob Assert.assertEquals(curHdfsRoot + "/user/pig/test/{a,c}," + curHdfsRoot + "/user/pig/test/b", LoadFunc.getAbsolutePath("test/{a,c},test/b", curHdfsDir)); } @Test public void testCommaSeparatedString5() throws Exception { // test case: comma separated paths with hadoop glob Assert.assertEquals("/test/data/{a,c}," + curHdfsRoot + "/user/pig/test/b", LoadFunc.getAbsolutePath("/test/data/{a,c},test/b", curHdfsDir)); } @Test public void testCommaSeparatedString6() throws Exception { // test case: comma separated paths with hasoop glob Assert.assertEquals(curHdfsRoot + "/user/pig/test/{a,c},/test/data/b", LoadFunc.getAbsolutePath("test/{a,c},/test/data/b", curHdfsDir)); } @Test public void testCommaSeparatedString7() throws Exception { // test case: comma separated paths with white spaces Assert.assertEquals(curHdfsRoot + "/user/pig/test/{a,c},/test/data/b", LoadFunc.getAbsolutePath("test/{a,c}, /test/data/b", curHdfsDir)); } @Test(expected=IllegalArgumentException.class) public void testCommaSeparatedString8() throws Exception { // test case: comma separated paths with empty string Assert.assertEquals(curHdfsRoot + "/user/pig/," + curHdfsRoot + "/test/data/b", LoadFunc.getAbsolutePath(", /test/data/b", curHdfsDir)); } @Test public void testHarUrl() throws Exception { // test case: input location is a har:// url Assert.assertEquals("har:///user/pig/harfile", LoadFunc.getAbsolutePath("har:///user/pig/harfile", curHdfsDir)); } @Test public void testGlobPaths() throws IOException { final String basedir = "file://" + System.getProperty("user.dir"); final String tempdir = Long.toString(System.currentTimeMillis()); final String nonexistentpath = basedir + "/" + tempdir + "/this_path_does_not_exist"; String locationStr = null; Set<Path> paths; Configuration conf = new Configuration(); // existent path locationStr = basedir; paths = LoadFunc.getGlobPaths(locationStr, conf, true); assertFalse(paths.isEmpty()); // non-existent path locationStr = nonexistentpath; try { paths = LoadFunc.getGlobPaths(locationStr, conf, true); fail("Paths with pattern are not readable"); } catch (IOException e) { assertTrue(e.getMessage().contains("matches 0 files")); } // empty glob pattern locationStr = basedir + "/{}"; try { paths = LoadFunc.getGlobPaths(locationStr, conf, true); fail(); } catch (IOException e) { assertTrue(e.getMessage().contains("matches 0 files")); } paths = LoadFunc.getGlobPaths(locationStr, conf, false); assertTrue(paths.isEmpty()); // bad glob pattern locationStr = basedir + "/{1,"; try { LoadFunc.getGlobPaths(locationStr, conf, true); Assert.fail("Negative test to test illegal file pattern. Should not be succeeding!"); } catch (IOException e) { // The message of the exception for illegal file pattern is rather // long, so we simply confirm if it contains 'illegal file pattern'. assertTrue(e.getMessage().contains("Illegal file pattern")); } } }
/* * Copyright 2010-2015 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.directory.model; import java.io.Serializable; /** * <p> * Contains information about an AD Connector directory. * </p> */ public class DirectoryConnectSettingsDescription implements Serializable, Cloneable { /** * The identifier of the VPC that the AD Connector is in. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(vpc-[0-9a-f]{8})$<br/> */ private String vpcId; /** * A list of subnet identifiers in the VPC that the AD connector is in. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIds; /** * The username of the service account in the on-premises directory. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * <b>Pattern: </b>[a-zA-Z0-9._-]+<br/> */ private String customerUserName; /** * The security group identifier for the AD Connector directory. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(sg-[0-9a-f]{8})$<br/> */ private String securityGroupId; /** * A list of the Availability Zones that the directory is in. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> availabilityZones; /** * The IP addresses of the AD Connector servers. */ private com.amazonaws.internal.ListWithAutoConstructFlag<String> connectIps; /** * The identifier of the VPC that the AD Connector is in. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(vpc-[0-9a-f]{8})$<br/> * * @return The identifier of the VPC that the AD Connector is in. */ public String getVpcId() { return vpcId; } /** * The identifier of the VPC that the AD Connector is in. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(vpc-[0-9a-f]{8})$<br/> * * @param vpcId The identifier of the VPC that the AD Connector is in. */ public void setVpcId(String vpcId) { this.vpcId = vpcId; } /** * The identifier of the VPC that the AD Connector is in. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(vpc-[0-9a-f]{8})$<br/> * * @param vpcId The identifier of the VPC that the AD Connector is in. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withVpcId(String vpcId) { this.vpcId = vpcId; return this; } /** * A list of subnet identifiers in the VPC that the AD connector is in. * * @return A list of subnet identifiers in the VPC that the AD connector is in. */ public java.util.List<String> getSubnetIds() { if (subnetIds == null) { subnetIds = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); subnetIds.setAutoConstruct(true); } return subnetIds; } /** * A list of subnet identifiers in the VPC that the AD connector is in. * * @param subnetIds A list of subnet identifiers in the VPC that the AD connector is in. */ public void setSubnetIds(java.util.Collection<String> subnetIds) { if (subnetIds == null) { this.subnetIds = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(subnetIds.size()); subnetIdsCopy.addAll(subnetIds); this.subnetIds = subnetIdsCopy; } /** * A list of subnet identifiers in the VPC that the AD connector is in. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setSubnetIds(java.util.Collection)} or {@link * #withSubnetIds(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param subnetIds A list of subnet identifiers in the VPC that the AD connector is in. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withSubnetIds(String... subnetIds) { if (getSubnetIds() == null) setSubnetIds(new java.util.ArrayList<String>(subnetIds.length)); for (String value : subnetIds) { getSubnetIds().add(value); } return this; } /** * A list of subnet identifiers in the VPC that the AD connector is in. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param subnetIds A list of subnet identifiers in the VPC that the AD connector is in. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withSubnetIds(java.util.Collection<String> subnetIds) { if (subnetIds == null) { this.subnetIds = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> subnetIdsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(subnetIds.size()); subnetIdsCopy.addAll(subnetIds); this.subnetIds = subnetIdsCopy; } return this; } /** * The username of the service account in the on-premises directory. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * <b>Pattern: </b>[a-zA-Z0-9._-]+<br/> * * @return The username of the service account in the on-premises directory. */ public String getCustomerUserName() { return customerUserName; } /** * The username of the service account in the on-premises directory. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * <b>Pattern: </b>[a-zA-Z0-9._-]+<br/> * * @param customerUserName The username of the service account in the on-premises directory. */ public void setCustomerUserName(String customerUserName) { this.customerUserName = customerUserName; } /** * The username of the service account in the on-premises directory. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * <b>Pattern: </b>[a-zA-Z0-9._-]+<br/> * * @param customerUserName The username of the service account in the on-premises directory. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withCustomerUserName(String customerUserName) { this.customerUserName = customerUserName; return this; } /** * The security group identifier for the AD Connector directory. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(sg-[0-9a-f]{8})$<br/> * * @return The security group identifier for the AD Connector directory. */ public String getSecurityGroupId() { return securityGroupId; } /** * The security group identifier for the AD Connector directory. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(sg-[0-9a-f]{8})$<br/> * * @param securityGroupId The security group identifier for the AD Connector directory. */ public void setSecurityGroupId(String securityGroupId) { this.securityGroupId = securityGroupId; } /** * The security group identifier for the AD Connector directory. * <p> * Returns a reference to this object so that method calls can be chained together. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>^(sg-[0-9a-f]{8})$<br/> * * @param securityGroupId The security group identifier for the AD Connector directory. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withSecurityGroupId(String securityGroupId) { this.securityGroupId = securityGroupId; return this; } /** * A list of the Availability Zones that the directory is in. * * @return A list of the Availability Zones that the directory is in. */ public java.util.List<String> getAvailabilityZones() { if (availabilityZones == null) { availabilityZones = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); availabilityZones.setAutoConstruct(true); } return availabilityZones; } /** * A list of the Availability Zones that the directory is in. * * @param availabilityZones A list of the Availability Zones that the directory is in. */ public void setAvailabilityZones(java.util.Collection<String> availabilityZones) { if (availabilityZones == null) { this.availabilityZones = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> availabilityZonesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(availabilityZones.size()); availabilityZonesCopy.addAll(availabilityZones); this.availabilityZones = availabilityZonesCopy; } /** * A list of the Availability Zones that the directory is in. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setAvailabilityZones(java.util.Collection)} or * {@link #withAvailabilityZones(java.util.Collection)} if you want to * override the existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param availabilityZones A list of the Availability Zones that the directory is in. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withAvailabilityZones(String... availabilityZones) { if (getAvailabilityZones() == null) setAvailabilityZones(new java.util.ArrayList<String>(availabilityZones.length)); for (String value : availabilityZones) { getAvailabilityZones().add(value); } return this; } /** * A list of the Availability Zones that the directory is in. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param availabilityZones A list of the Availability Zones that the directory is in. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withAvailabilityZones(java.util.Collection<String> availabilityZones) { if (availabilityZones == null) { this.availabilityZones = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> availabilityZonesCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(availabilityZones.size()); availabilityZonesCopy.addAll(availabilityZones); this.availabilityZones = availabilityZonesCopy; } return this; } /** * The IP addresses of the AD Connector servers. * * @return The IP addresses of the AD Connector servers. */ public java.util.List<String> getConnectIps() { if (connectIps == null) { connectIps = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(); connectIps.setAutoConstruct(true); } return connectIps; } /** * The IP addresses of the AD Connector servers. * * @param connectIps The IP addresses of the AD Connector servers. */ public void setConnectIps(java.util.Collection<String> connectIps) { if (connectIps == null) { this.connectIps = null; return; } com.amazonaws.internal.ListWithAutoConstructFlag<String> connectIpsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(connectIps.size()); connectIpsCopy.addAll(connectIps); this.connectIps = connectIpsCopy; } /** * The IP addresses of the AD Connector servers. * <p> * <b>NOTE:</b> This method appends the values to the existing list (if * any). Use {@link #setConnectIps(java.util.Collection)} or {@link * #withConnectIps(java.util.Collection)} if you want to override the * existing values. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param connectIps The IP addresses of the AD Connector servers. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withConnectIps(String... connectIps) { if (getConnectIps() == null) setConnectIps(new java.util.ArrayList<String>(connectIps.length)); for (String value : connectIps) { getConnectIps().add(value); } return this; } /** * The IP addresses of the AD Connector servers. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param connectIps The IP addresses of the AD Connector servers. * * @return A reference to this updated object so that method calls can be chained * together. */ public DirectoryConnectSettingsDescription withConnectIps(java.util.Collection<String> connectIps) { if (connectIps == null) { this.connectIps = null; } else { com.amazonaws.internal.ListWithAutoConstructFlag<String> connectIpsCopy = new com.amazonaws.internal.ListWithAutoConstructFlag<String>(connectIps.size()); connectIpsCopy.addAll(connectIps); this.connectIps = connectIpsCopy; } return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getVpcId() != null) sb.append("VpcId: " + getVpcId() + ","); if (getSubnetIds() != null) sb.append("SubnetIds: " + getSubnetIds() + ","); if (getCustomerUserName() != null) sb.append("CustomerUserName: " + getCustomerUserName() + ","); if (getSecurityGroupId() != null) sb.append("SecurityGroupId: " + getSecurityGroupId() + ","); if (getAvailabilityZones() != null) sb.append("AvailabilityZones: " + getAvailabilityZones() + ","); if (getConnectIps() != null) sb.append("ConnectIps: " + getConnectIps() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getVpcId() == null) ? 0 : getVpcId().hashCode()); hashCode = prime * hashCode + ((getSubnetIds() == null) ? 0 : getSubnetIds().hashCode()); hashCode = prime * hashCode + ((getCustomerUserName() == null) ? 0 : getCustomerUserName().hashCode()); hashCode = prime * hashCode + ((getSecurityGroupId() == null) ? 0 : getSecurityGroupId().hashCode()); hashCode = prime * hashCode + ((getAvailabilityZones() == null) ? 0 : getAvailabilityZones().hashCode()); hashCode = prime * hashCode + ((getConnectIps() == null) ? 0 : getConnectIps().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DirectoryConnectSettingsDescription == false) return false; DirectoryConnectSettingsDescription other = (DirectoryConnectSettingsDescription)obj; if (other.getVpcId() == null ^ this.getVpcId() == null) return false; if (other.getVpcId() != null && other.getVpcId().equals(this.getVpcId()) == false) return false; if (other.getSubnetIds() == null ^ this.getSubnetIds() == null) return false; if (other.getSubnetIds() != null && other.getSubnetIds().equals(this.getSubnetIds()) == false) return false; if (other.getCustomerUserName() == null ^ this.getCustomerUserName() == null) return false; if (other.getCustomerUserName() != null && other.getCustomerUserName().equals(this.getCustomerUserName()) == false) return false; if (other.getSecurityGroupId() == null ^ this.getSecurityGroupId() == null) return false; if (other.getSecurityGroupId() != null && other.getSecurityGroupId().equals(this.getSecurityGroupId()) == false) return false; if (other.getAvailabilityZones() == null ^ this.getAvailabilityZones() == null) return false; if (other.getAvailabilityZones() != null && other.getAvailabilityZones().equals(this.getAvailabilityZones()) == false) return false; if (other.getConnectIps() == null ^ this.getConnectIps() == null) return false; if (other.getConnectIps() != null && other.getConnectIps().equals(this.getConnectIps()) == false) return false; return true; } @Override public DirectoryConnectSettingsDescription clone() { try { return (DirectoryConnectSettingsDescription) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.dongliu.apk.parser.cert.asn1.ber; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.nio.ByteBuffer; /** * {@link BerDataValueReader} which reads from an {@link InputStream} returning BER-encoded data * values. See {@code X.690} for the encoding. */ public class InputStreamBerDataValueReader implements BerDataValueReader { private final InputStream mIn; public InputStreamBerDataValueReader(InputStream in) { if (in == null) { throw new NullPointerException("in == null"); } mIn = in; } @Override public BerDataValue readDataValue() throws BerDataValueFormatException { return readDataValue(mIn); } /** * Returns the next data value or {@code null} if end of input has been reached. * * @throws BerDataValueFormatException if the value being read is malformed. */ @SuppressWarnings("resource") private static BerDataValue readDataValue(InputStream input) throws BerDataValueFormatException { RecordingInputStream in = new RecordingInputStream(input); try { int firstIdentifierByte = in.read(); if (firstIdentifierByte == -1) { // End of input return null; } int tagNumber = readTagNumber(in, firstIdentifierByte); int firstLengthByte = in.read(); if (firstLengthByte == -1) { throw new BerDataValueFormatException("Missing length"); } boolean constructed = BerEncoding.isConstructed((byte) firstIdentifierByte); int contentsLength; int contentsOffsetInDataValue; if ((firstLengthByte & 0x80) == 0) { // short form length contentsLength = readShortFormLength(firstLengthByte); contentsOffsetInDataValue = in.getReadByteCount(); skipDefiniteLengthContents(in, contentsLength); } else if ((firstLengthByte & 0xff) != 0x80) { // long form length contentsLength = readLongFormLength(in, firstLengthByte); contentsOffsetInDataValue = in.getReadByteCount(); skipDefiniteLengthContents(in, contentsLength); } else { // indefinite length contentsOffsetInDataValue = in.getReadByteCount(); contentsLength = constructed ? skipConstructedIndefiniteLengthContents(in) : skipPrimitiveIndefiniteLengthContents(in); } byte[] encoded = in.getReadBytes(); ByteBuffer encodedContents = ByteBuffer.wrap(encoded, contentsOffsetInDataValue, contentsLength); return new BerDataValue( ByteBuffer.wrap(encoded), encodedContents, BerEncoding.getTagClass((byte) firstIdentifierByte), constructed, tagNumber); } catch (IOException e) { throw new BerDataValueFormatException("Failed to read data value", e); } } private static int readTagNumber(InputStream in, int firstIdentifierByte) throws IOException, BerDataValueFormatException { int tagNumber = BerEncoding.getTagNumber((byte) firstIdentifierByte); if (tagNumber == 0x1f) { // high-tag-number form return readHighTagNumber(in); } else { // low-tag-number form return tagNumber; } } private static int readHighTagNumber(InputStream in) throws IOException, BerDataValueFormatException { // Base-128 big-endian form, where each byte has the highest bit set, except for the last // byte where the highest bit is not set int b; int result = 0; do { b = in.read(); if (b == -1) { throw new BerDataValueFormatException("Truncated tag number"); } if (result > Integer.MAX_VALUE >>> 7) { throw new BerDataValueFormatException("Tag number too large"); } result <<= 7; result |= b & 0x7f; } while ((b & 0x80) != 0); return result; } private static int readShortFormLength(int firstLengthByte) { return firstLengthByte & 0x7f; } private static int readLongFormLength(InputStream in, int firstLengthByte) throws IOException, BerDataValueFormatException { // The low 7 bits of the first byte represent the number of bytes (following the first // byte) in which the length is in big-endian base-256 form int byteCount = firstLengthByte & 0x7f; if (byteCount > 4) { throw new BerDataValueFormatException("Length too large: " + byteCount + " bytes"); } int result = 0; for (int i = 0; i < byteCount; i++) { int b = in.read(); if (b == -1) { throw new BerDataValueFormatException("Truncated length"); } if (result > Integer.MAX_VALUE >>> 8) { throw new BerDataValueFormatException("Length too large"); } result <<= 8; result |= b & 0xff; } return result; } private static void skipDefiniteLengthContents(InputStream in, int len) throws IOException, BerDataValueFormatException { long bytesRead = 0; while (len > 0) { int skipped = (int) in.skip(len); if (skipped <= 0) { throw new BerDataValueFormatException( "Truncated definite-length contents: " + bytesRead + " bytes read" + ", " + len + " missing"); } len -= skipped; bytesRead += skipped; } } private static int skipPrimitiveIndefiniteLengthContents(InputStream in) throws IOException, BerDataValueFormatException { // Contents are terminated by 0x00 0x00 boolean prevZeroByte = false; int bytesRead = 0; while (true) { int b = in.read(); if (b == -1) { throw new BerDataValueFormatException( "Truncated indefinite-length contents: " + bytesRead + " bytes read"); } bytesRead++; if (bytesRead < 0) { throw new BerDataValueFormatException("Indefinite-length contents too long"); } if (b == 0) { if (prevZeroByte) { // End of contents reached -- we've read the value and its terminator 0x00 0x00 return bytesRead - 2; } prevZeroByte = true; continue; } else { prevZeroByte = false; } } } private static int skipConstructedIndefiniteLengthContents(RecordingInputStream in) throws BerDataValueFormatException { // Contents are terminated by 0x00 0x00. However, this data value is constructed, meaning it // can contain data values which are indefinite length encoded as well. As a result, we // must parse the direct children of this data value to correctly skip over the contents of // this data value. int readByteCountBefore = in.getReadByteCount(); while (true) { // We can't easily peek for the 0x00 0x00 terminator using the provided InputStream. // Thus, we use the fact that 0x00 0x00 parses as a data value whose encoded form we // then check below to see whether it's 0x00 0x00. BerDataValue dataValue = readDataValue(in); if (dataValue == null) { throw new BerDataValueFormatException( "Truncated indefinite-length contents: " + (in.getReadByteCount() - readByteCountBefore) + " bytes read"); } if (in.getReadByteCount() <= 0) { throw new BerDataValueFormatException("Indefinite-length contents too long"); } ByteBuffer encoded = dataValue.getEncoded(); if ((encoded.remaining() == 2) && (encoded.get(0) == 0) && (encoded.get(1) == 0)) { // 0x00 0x00 encountered return in.getReadByteCount() - readByteCountBefore - 2; } } } private static class RecordingInputStream extends InputStream { private final InputStream mIn; private final ByteArrayOutputStream mBuf; private RecordingInputStream(InputStream in) { mIn = in; mBuf = new ByteArrayOutputStream(); } public byte[] getReadBytes() { return mBuf.toByteArray(); } public int getReadByteCount() { return mBuf.size(); } @Override public int read() throws IOException { int b = mIn.read(); if (b != -1) { mBuf.write(b); } return b; } @Override public int read(byte[] b) throws IOException { int len = mIn.read(b); if (len > 0) { mBuf.write(b, 0, len); } return len; } @Override public int read(byte[] b, int off, int len) throws IOException { len = mIn.read(b, off, len); if (len > 0) { mBuf.write(b, off, len); } return len; } @Override public long skip(long n) throws IOException { if (n <= 0) { return mIn.skip(n); } byte[] buf = new byte[4096]; int len = mIn.read(buf, 0, (int) Math.min(buf.length, n)); if (len > 0) { mBuf.write(buf, 0, len); } return (len < 0) ? 0 : len; } @Override public int available() throws IOException { return super.available(); } @Override public void close() throws IOException { super.close(); } @Override public synchronized void mark(int readlimit) {} @Override public synchronized void reset() throws IOException { throw new IOException("mark/reset not supported"); } @Override public boolean markSupported() { return false; } } }
package com.github.clans.fab; import android.animation.AnimatorSet; import android.animation.ObjectAnimator; import android.animation.ValueAnimator; import android.content.Context; import android.content.res.ColorStateList; import android.content.res.TypedArray; import android.graphics.Color; import android.graphics.Typeface; import android.graphics.drawable.Drawable; import android.os.Handler; import android.text.TextUtils; import android.util.AttributeSet; import android.util.Log; import android.util.TypedValue; import android.view.ContextThemeWrapper; import android.view.MotionEvent; import android.view.View; import android.view.ViewGroup; import android.view.animation.Animation; import android.view.animation.AnimationUtils; import android.view.animation.AnticipateInterpolator; import android.view.animation.Interpolator; import android.view.animation.OvershootInterpolator; import android.widget.ImageView; import java.util.ArrayList; import java.util.List; public class FloatingActionMenu extends ViewGroup implements FloatingMenu { private static final int ANIMATION_DURATION = 300; private static final float CLOSED_PLUS_ROTATION = 0f; private static final float OPENED_PLUS_ROTATION_LEFT = -90f - 45f; private static final float OPENED_PLUS_ROTATION_RIGHT = 90f + 45f; private static final int OPEN_UP = 0; private static final int OPEN_DOWN = 1; private static final int LABELS_POSITION_LEFT = 0; private static final int LABELS_POSITION_RIGHT = 1; private AnimatorSet mOpenAnimatorSet = new AnimatorSet(); private AnimatorSet mCloseAnimatorSet = new AnimatorSet(); private AnimatorSet mIconToggleSet; private int mButtonSpacing = Util.dpToPx(getContext(), 0f); private FloatingActionButton mMenuButton; private int mMaxButtonWidth; private int mLabelsMargin = Util.dpToPx(getContext(), 0f); private int mLabelsVerticalOffset = Util.dpToPx(getContext(), 0f); private int mButtonsCount; private boolean mMenuOpened; private boolean mIsMenuOpening; private Handler mUiHandler = new Handler(); private int mLabelsShowAnimation; private int mLabelsHideAnimation; private int mLabelsPaddingTop = Util.dpToPx(getContext(), 4f); private int mLabelsPaddingRight = Util.dpToPx(getContext(), 8f); private int mLabelsPaddingBottom = Util.dpToPx(getContext(), 4f); private int mLabelsPaddingLeft = Util.dpToPx(getContext(), 8f); private ColorStateList mLabelsTextColor; private float mLabelsTextSize; private int mLabelsCornerRadius = Util.dpToPx(getContext(), 3f); private boolean mLabelsShowShadow; private int mLabelsColorNormal; private int mLabelsColorPressed; private int mLabelsColorRipple; private boolean mMenuShowShadow; private int mMenuShadowColor; private float mMenuShadowRadius = 4f; private float mMenuShadowXOffset = 1f; private float mMenuShadowYOffset = 3f; private int mMenuColorNormal; private int mMenuColorPressed; private int mMenuColorRipple; private Drawable mIcon; private int mAnimationDelayPerItem; private Interpolator mOpenInterpolator; private Interpolator mCloseInterpolator; private boolean mIsAnimated = true; private boolean mLabelsSingleLine; private int mLabelsEllipsize; private int mLabelsMaxLines; private int mMenuFabSize; private int mLabelsStyle; private Typeface mCustomTypefaceFromFont; private boolean mIconAnimated = true; private ImageView mImageToggle; private Animation mMenuButtonShowAnimation; private Animation mMenuButtonHideAnimation; private Animation mImageToggleShowAnimation; private Animation mImageToggleHideAnimation; private boolean mIsMenuButtonAnimationRunning; private boolean mIsSetClosedOnTouchOutside; private boolean animationInProgress = false; private int mOpenDirection; private FloatingMenuToggleListener mToggleListener; private ValueAnimator mShowBackgroundAnimator; private ValueAnimator mHideBackgroundAnimator; private int mBackgroundColor; private int mLabelsPosition; private Context mLabelsContext; private String mMenuLabelText; private boolean mUsingMenuLabel; public FloatingActionMenu(Context context) { this(context, null); } public FloatingActionMenu(Context context, AttributeSet attrs) { this(context, attrs, 0); } public FloatingActionMenu(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(context, attrs); } private void init(Context context, AttributeSet attrs) { TypedArray attr = context.obtainStyledAttributes(attrs, R.styleable.FloatingActionMenu, 0, 0); mButtonSpacing = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_buttonSpacing, mButtonSpacing); mLabelsMargin = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_labels_margin, mLabelsMargin); mLabelsPosition = attr.getInt(R.styleable.FloatingActionMenu_menu_labels_position, LABELS_POSITION_LEFT); mLabelsShowAnimation = attr.getResourceId(R.styleable.FloatingActionMenu_menu_labels_showAnimation, mLabelsPosition == LABELS_POSITION_LEFT ? R.anim.fab_slide_in_from_right : R.anim.fab_slide_in_from_left); mLabelsHideAnimation = attr.getResourceId(R.styleable.FloatingActionMenu_menu_labels_hideAnimation, mLabelsPosition == LABELS_POSITION_LEFT ? R.anim.fab_slide_out_to_right : R.anim.fab_slide_out_to_left); mLabelsPaddingTop = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_labels_paddingTop, mLabelsPaddingTop); mLabelsPaddingRight = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_labels_paddingRight, mLabelsPaddingRight); mLabelsPaddingBottom = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_labels_paddingBottom, mLabelsPaddingBottom); mLabelsPaddingLeft = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_labels_paddingLeft, mLabelsPaddingLeft); mLabelsTextColor = attr.getColorStateList(R.styleable.FloatingActionMenu_menu_labels_textColor); // set default value if null same as for textview if (mLabelsTextColor == null) { mLabelsTextColor = ColorStateList.valueOf(Color.WHITE); } mLabelsTextSize = attr.getDimension(R.styleable.FloatingActionMenu_menu_labels_textSize, getResources().getDimension(R.dimen.labels_text_size)); mLabelsCornerRadius = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_labels_cornerRadius, mLabelsCornerRadius); mLabelsShowShadow = attr.getBoolean(R.styleable.FloatingActionMenu_menu_labels_showShadow, true); mLabelsColorNormal = attr.getColor(R.styleable.FloatingActionMenu_menu_labels_colorNormal, 0xFF333333); mLabelsColorPressed = attr.getColor(R.styleable.FloatingActionMenu_menu_labels_colorPressed, 0xFF444444); mLabelsColorRipple = attr.getColor(R.styleable.FloatingActionMenu_menu_labels_colorRipple, 0x66FFFFFF); mMenuShowShadow = attr.getBoolean(R.styleable.FloatingActionMenu_menu_showShadow, true); mMenuShadowColor = attr.getColor(R.styleable.FloatingActionMenu_menu_shadowColor, 0x66000000); mMenuShadowRadius = attr.getDimension(R.styleable.FloatingActionMenu_menu_shadowRadius, mMenuShadowRadius); mMenuShadowXOffset = attr.getDimension(R.styleable.FloatingActionMenu_menu_shadowXOffset, mMenuShadowXOffset); mMenuShadowYOffset = attr.getDimension(R.styleable.FloatingActionMenu_menu_shadowYOffset, mMenuShadowYOffset); mMenuColorNormal = attr.getColor(R.styleable.FloatingActionMenu_menu_colorNormal, 0xFFDA4336); mMenuColorPressed = attr.getColor(R.styleable.FloatingActionMenu_menu_colorPressed, 0xFFE75043); mMenuColorRipple = attr.getColor(R.styleable.FloatingActionMenu_menu_colorRipple, 0x99FFFFFF); mAnimationDelayPerItem = attr.getInt(R.styleable.FloatingActionMenu_menu_animationDelayPerItem, 50); mIcon = attr.getDrawable(R.styleable.FloatingActionMenu_menu_icon); if (mIcon == null) { mIcon = getResources().getDrawable(R.drawable.fab_add); } mLabelsSingleLine = attr.getBoolean(R.styleable.FloatingActionMenu_menu_labels_singleLine, false); mLabelsEllipsize = attr.getInt(R.styleable.FloatingActionMenu_menu_labels_ellipsize, 0); mLabelsMaxLines = attr.getInt(R.styleable.FloatingActionMenu_menu_labels_maxLines, -1); mMenuFabSize = attr.getInt(R.styleable.FloatingActionMenu_menu_fab_size, FloatingActionButton.SIZE_NORMAL); mLabelsStyle = attr.getResourceId(R.styleable.FloatingActionMenu_menu_labels_style, 0); String customFont = attr.getString(R.styleable.FloatingActionMenu_menu_labels_customFont); try { if (!TextUtils.isEmpty(customFont)) { mCustomTypefaceFromFont = Typeface.createFromAsset(getContext().getAssets(), customFont); } } catch (RuntimeException ex) { throw new IllegalArgumentException("Unable to load specified custom font: " + customFont, ex); } mOpenDirection = attr.getInt(R.styleable.FloatingActionMenu_menu_openDirection, OPEN_UP); mBackgroundColor = attr.getColor(R.styleable.FloatingActionMenu_menu_backgroundColor, Color.TRANSPARENT); if (attr.hasValue(R.styleable.FloatingActionMenu_menu_fab_label)) { mUsingMenuLabel = true; mMenuLabelText = attr.getString(R.styleable.FloatingActionMenu_menu_fab_label); } if (attr.hasValue(R.styleable.FloatingActionMenu_menu_labels_padding)) { int padding = attr.getDimensionPixelSize(R.styleable.FloatingActionMenu_menu_labels_padding, 0); initPadding(padding); } mOpenInterpolator = new OvershootInterpolator(); mCloseInterpolator = new AnticipateInterpolator(); mLabelsContext = new ContextThemeWrapper(getContext(), mLabelsStyle); initBackgroundDimAnimation(); createMenuButton(); initMenuButtonAnimations(attr); attr.recycle(); } private void initMenuButtonAnimations(TypedArray attr) { int showResId = attr.getResourceId(R.styleable.FloatingActionMenu_menu_fab_show_animation, R.anim.fab_scale_up); setMenuButtonShowAnimation(AnimationUtils.loadAnimation(getContext(), showResId)); mImageToggleShowAnimation = AnimationUtils.loadAnimation(getContext(), showResId); int hideResId = attr.getResourceId(R.styleable.FloatingActionMenu_menu_fab_hide_animation, R.anim.fab_scale_down); setMenuButtonHideAnimation(AnimationUtils.loadAnimation(getContext(), hideResId)); mImageToggleHideAnimation = AnimationUtils.loadAnimation(getContext(), hideResId); } private void initBackgroundDimAnimation() { final int maxAlpha = Color.alpha(mBackgroundColor); final int red = Color.red(mBackgroundColor); final int green = Color.green(mBackgroundColor); final int blue = Color.blue(mBackgroundColor); mShowBackgroundAnimator = ValueAnimator.ofInt(0, maxAlpha); mShowBackgroundAnimator.setDuration(ANIMATION_DURATION); mShowBackgroundAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { Integer alpha = (Integer) animation.getAnimatedValue(); setBackgroundColor(Color.argb(alpha, red, green, blue)); } }); mHideBackgroundAnimator = ValueAnimator.ofInt(maxAlpha, 0); mHideBackgroundAnimator.setDuration(ANIMATION_DURATION); mHideBackgroundAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() { @Override public void onAnimationUpdate(ValueAnimator animation) { Integer alpha = (Integer) animation.getAnimatedValue(); setBackgroundColor(Color.argb(alpha, red, green, blue)); } }); } private boolean isBackgroundEnabled() { return mBackgroundColor != Color.TRANSPARENT; } private void initPadding(int padding) { mLabelsPaddingTop = padding; mLabelsPaddingRight = padding; mLabelsPaddingBottom = padding; mLabelsPaddingLeft = padding; } private void createMenuButton() { mMenuButton = new FloatingActionButton(getContext()); mMenuButton.mShowShadow = mMenuShowShadow; if (mMenuShowShadow) { mMenuButton.mShadowRadius = Util.dpToPx(getContext(), mMenuShadowRadius); mMenuButton.mShadowXOffset = Util.dpToPx(getContext(), mMenuShadowXOffset); mMenuButton.mShadowYOffset = Util.dpToPx(getContext(), mMenuShadowYOffset); } mMenuButton.setColors(mMenuColorNormal, mMenuColorPressed, mMenuColorRipple); mMenuButton.mShadowColor = mMenuShadowColor; mMenuButton.mFabSize = mMenuFabSize; mMenuButton.updateBackground(); mMenuButton.setLabelText(mMenuLabelText); mImageToggle = new ImageView(getContext()); mImageToggle.setImageDrawable(mIcon); addView(mMenuButton, super.generateDefaultLayoutParams()); addView(mImageToggle); createDefaultIconAnimation(); } private void createDefaultIconAnimation() { float collapseAngle; float expandAngle; if (mOpenDirection == OPEN_UP) { collapseAngle = mLabelsPosition == LABELS_POSITION_LEFT ? OPENED_PLUS_ROTATION_LEFT : OPENED_PLUS_ROTATION_RIGHT; expandAngle = mLabelsPosition == LABELS_POSITION_LEFT ? OPENED_PLUS_ROTATION_LEFT : OPENED_PLUS_ROTATION_RIGHT; } else { collapseAngle = mLabelsPosition == LABELS_POSITION_LEFT ? OPENED_PLUS_ROTATION_RIGHT : OPENED_PLUS_ROTATION_LEFT; expandAngle = mLabelsPosition == LABELS_POSITION_LEFT ? OPENED_PLUS_ROTATION_RIGHT : OPENED_PLUS_ROTATION_LEFT; } ObjectAnimator collapseAnimator = ObjectAnimator.ofFloat( mImageToggle, "rotation", collapseAngle, CLOSED_PLUS_ROTATION ); ObjectAnimator expandAnimator = ObjectAnimator.ofFloat( mImageToggle, "rotation", CLOSED_PLUS_ROTATION, expandAngle ); mOpenAnimatorSet.play(expandAnimator); mCloseAnimatorSet.play(collapseAnimator); mOpenAnimatorSet.setInterpolator(mOpenInterpolator); mCloseAnimatorSet.setInterpolator(mCloseInterpolator); mOpenAnimatorSet.setDuration(ANIMATION_DURATION); mCloseAnimatorSet.setDuration(ANIMATION_DURATION); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int width = 0; int height = 0; mMaxButtonWidth = 0; int maxLabelWidth = 0; measureChildWithMargins(mImageToggle, widthMeasureSpec, 0, heightMeasureSpec, 0); for (int i = 0; i < mButtonsCount; i++) { View child = getChildAt(i); if (child.getVisibility() == GONE || child == mImageToggle) continue; measureChildWithMargins(child, widthMeasureSpec, 0, heightMeasureSpec, 0); mMaxButtonWidth = Math.max(mMaxButtonWidth, child.getMeasuredWidth()); } for (int i = 0; i < mButtonsCount; i++) { int usedWidth = 0; View child = getChildAt(i); if (child.getVisibility() == GONE || child == mImageToggle) continue; usedWidth += child.getMeasuredWidth(); height += child.getMeasuredHeight(); Label label = (Label) child.getTag(R.id.fab_label); if (label != null) { int labelOffset = (mMaxButtonWidth - child.getMeasuredWidth()) / (mUsingMenuLabel ? 1 : 2); int labelUsedWidth = child.getMeasuredWidth() + label.calculateShadowWidth() + mLabelsMargin + labelOffset; measureChildWithMargins(label, widthMeasureSpec, labelUsedWidth, heightMeasureSpec, 0); usedWidth += label.getMeasuredWidth(); maxLabelWidth = Math.max(maxLabelWidth, usedWidth + labelOffset); } } width = Math.max(mMaxButtonWidth, maxLabelWidth + mLabelsMargin) + getPaddingLeft() + getPaddingRight(); height += mButtonSpacing * (mButtonsCount - 1) + getPaddingTop() + getPaddingBottom(); height = adjustForOvershoot(height); if (getLayoutParams().width == LayoutParams.MATCH_PARENT) { width = getDefaultSize(getSuggestedMinimumWidth(), widthMeasureSpec); } if (getLayoutParams().height == LayoutParams.MATCH_PARENT) { height = getDefaultSize(getSuggestedMinimumHeight(), heightMeasureSpec); } setMeasuredDimension(width, height); } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { int buttonsHorizontalCenter = mLabelsPosition == LABELS_POSITION_LEFT ? r - l - mMaxButtonWidth / 2 - getPaddingRight() : mMaxButtonWidth / 2 + getPaddingLeft(); boolean openUp = mOpenDirection == OPEN_UP; int menuButtonTop = openUp ? b - t - mMenuButton.getMeasuredHeight() - getPaddingBottom() : getPaddingTop(); int menuButtonLeft = buttonsHorizontalCenter - mMenuButton.getMeasuredWidth() / 2; mMenuButton.layout(menuButtonLeft, menuButtonTop, menuButtonLeft + mMenuButton.getMeasuredWidth(), menuButtonTop + mMenuButton.getMeasuredHeight()); int imageLeft = buttonsHorizontalCenter - mImageToggle.getMeasuredWidth() / 2; int imageTop = menuButtonTop + mMenuButton.getMeasuredHeight() / 2 - mImageToggle.getMeasuredHeight() / 2; mImageToggle.layout(imageLeft, imageTop, imageLeft + mImageToggle.getMeasuredWidth(), imageTop + mImageToggle.getMeasuredHeight()); int nextY = openUp ? menuButtonTop + mMenuButton.getMeasuredHeight() + mButtonSpacing : menuButtonTop; for (int i = mButtonsCount - 1; i >= 0; i--) { View child = getChildAt(i); if (child == mImageToggle) continue; FloatingActionButton fab = (FloatingActionButton) child; if (fab.getVisibility() == GONE) continue; int childX = buttonsHorizontalCenter - fab.getMeasuredWidth() / 2; int childY = openUp ? nextY - fab.getMeasuredHeight() - mButtonSpacing : nextY; if (fab != mMenuButton) { fab.layout(childX, childY, childX + fab.getMeasuredWidth(), childY + fab.getMeasuredHeight()); if (!mIsMenuOpening) { fab.hide(false); } } View label = (View) fab.getTag(R.id.fab_label); if (label != null) { int labelsOffset = (mUsingMenuLabel ? mMaxButtonWidth / 2 : fab.getMeasuredWidth() / 2) + mLabelsMargin; int labelXNearButton = mLabelsPosition == LABELS_POSITION_LEFT ? buttonsHorizontalCenter - labelsOffset : buttonsHorizontalCenter + labelsOffset; int labelXAwayFromButton = mLabelsPosition == LABELS_POSITION_LEFT ? labelXNearButton - label.getMeasuredWidth() : labelXNearButton + label.getMeasuredWidth(); int labelLeft = mLabelsPosition == LABELS_POSITION_LEFT ? labelXAwayFromButton : labelXNearButton; int labelRight = mLabelsPosition == LABELS_POSITION_LEFT ? labelXNearButton : labelXAwayFromButton; int labelTop = childY - mLabelsVerticalOffset + (fab.getMeasuredHeight() - label.getMeasuredHeight()) / 2; label.layout(labelLeft, labelTop, labelRight, labelTop + label.getMeasuredHeight()); if (!mIsMenuOpening) { label.setVisibility(INVISIBLE); } } nextY = openUp ? childY - mButtonSpacing : childY + child.getMeasuredHeight() + mButtonSpacing; } } private int adjustForOvershoot(int dimension) { return (int) (dimension * 0.03 + dimension); } @Override protected void onFinishInflate() { super.onFinishInflate(); bringChildToFront(mMenuButton); bringChildToFront(mImageToggle); mButtonsCount = getChildCount(); setAnimationInProgressListener(); createLabels(); } private void createLabels() { for (int i = 0; i < mButtonsCount; i++) { if (getChildAt(i) == mImageToggle) continue; final FloatingActionButton fab = (FloatingActionButton) getChildAt(i); if (fab.getTag(R.id.fab_label) != null) continue; addLabel(fab); if (fab == mMenuButton) { mMenuButton.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { toggle(mIsAnimated); } }); } } } private void addLabel(FloatingActionButton fab) { String text = fab.getLabelText(); if (TextUtils.isEmpty(text)) return; final Label label = new Label(mLabelsContext); label.setClickable(true); label.setFab(fab); label.setShowAnimation(AnimationUtils.loadAnimation(getContext(), mLabelsShowAnimation)); label.setHideAnimation(AnimationUtils.loadAnimation(getContext(), mLabelsHideAnimation)); if (mLabelsStyle > 0) { label.setTextAppearance(getContext(), mLabelsStyle); label.setShowShadow(false); label.setUsingStyle(true); } else { label.setColors(mLabelsColorNormal, mLabelsColorPressed, mLabelsColorRipple); label.setShowShadow(mLabelsShowShadow); label.setCornerRadius(mLabelsCornerRadius); if (mLabelsEllipsize > 0) { setLabelEllipsize(label); } label.setMaxLines(mLabelsMaxLines); label.updateBackground(); label.setTextSize(TypedValue.COMPLEX_UNIT_PX, mLabelsTextSize); label.setTextColor(mLabelsTextColor); int left = mLabelsPaddingLeft; int top = mLabelsPaddingTop; if (mLabelsShowShadow) { left += fab.getShadowRadius() + Math.abs(fab.getShadowXOffset()); top += fab.getShadowRadius() + Math.abs(fab.getShadowYOffset()); } label.setPadding( left, top, mLabelsPaddingLeft, mLabelsPaddingTop ); if (mLabelsMaxLines < 0 || mLabelsSingleLine) { label.setSingleLine(mLabelsSingleLine); } } if (mCustomTypefaceFromFont != null) { label.setTypeface(mCustomTypefaceFromFont); } label.setText(text); label.setOnClickListener(fab.getOnClickListener()); addView(label); fab.setTag(R.id.fab_label, label); } private void setLabelEllipsize(Label label) { switch (mLabelsEllipsize) { case 1: label.setEllipsize(TextUtils.TruncateAt.START); break; case 2: label.setEllipsize(TextUtils.TruncateAt.MIDDLE); break; case 3: label.setEllipsize(TextUtils.TruncateAt.END); break; case 4: label.setEllipsize(TextUtils.TruncateAt.MARQUEE); break; } } @Override public MarginLayoutParams generateLayoutParams(AttributeSet attrs) { return new MarginLayoutParams(getContext(), attrs); } @Override protected MarginLayoutParams generateLayoutParams(LayoutParams p) { return new MarginLayoutParams(p); } @Override protected MarginLayoutParams generateDefaultLayoutParams() { return new MarginLayoutParams(MarginLayoutParams.WRAP_CONTENT, MarginLayoutParams.WRAP_CONTENT); } @Override protected boolean checkLayoutParams(LayoutParams p) { return p instanceof MarginLayoutParams; } private void hideMenuButtonWithImage(boolean animate) { if (!isMenuButtonHidden()) { mMenuButton.hide(animate); if (animate) { mImageToggle.startAnimation(mImageToggleHideAnimation); } mImageToggle.setVisibility(INVISIBLE); mIsMenuButtonAnimationRunning = false; } } private void showMenuButtonWithImage(boolean animate) { if (isMenuButtonHidden()) { mMenuButton.show(animate); if (animate) { mImageToggle.startAnimation(mImageToggleShowAnimation); } mImageToggle.setVisibility(VISIBLE); } } @Override public boolean onTouchEvent(MotionEvent event) { if (mIsSetClosedOnTouchOutside) { boolean handled = false; switch (event.getAction()) { case MotionEvent.ACTION_DOWN: handled = isOpened(); break; case MotionEvent.ACTION_UP: close(mIsAnimated); handled = true; } return handled; } return super.onTouchEvent(event); } @Override public boolean isOpened() { return mMenuOpened; } /* ===== API methods ===== */ public void toggle(boolean animate) { if (isOpened()) { close(animate); } else { open(animate); } } public void open(final boolean animate) { if (!isOpened()) { if (isBackgroundEnabled()) { mShowBackgroundAnimator.start(); } if (mIconAnimated) { if (mIconToggleSet != null) { mIconToggleSet.start(); } else { mCloseAnimatorSet.cancel(); mOpenAnimatorSet.start(); } } int delay = 0; int counter = 0; mIsMenuOpening = true; for (int i = getChildCount() - 1; i >= 0; i--) { View child = getChildAt(i); if (child instanceof FloatingActionButton && child.getVisibility() != GONE) { counter++; final FloatingActionButton fab = (FloatingActionButton) child; mUiHandler.postDelayed(new Runnable() { @Override public void run() { if (isOpened()) return; if (fab != mMenuButton) { fab.show(animate); } Label label = (Label) fab.getTag(R.id.fab_label); if (label != null && label.isHandleVisibilityChanges()) { label.show(animate); } } }, delay); delay += mAnimationDelayPerItem; } } mUiHandler.postDelayed(new Runnable() { @Override public void run() { mMenuOpened = true; if (mToggleListener != null) { mToggleListener.onMenuToggle(true); } } }, ++counter * mAnimationDelayPerItem); } } public void close(final boolean animate) { if (isOpened()) { if (isBackgroundEnabled()) { mHideBackgroundAnimator.start(); } if (mIconAnimated) { if (mIconToggleSet != null) { mIconToggleSet.start(); } else { mCloseAnimatorSet.start(); mOpenAnimatorSet.cancel(); } } int delay = 0; int counter = 0; mIsMenuOpening = false; for (int i = 0; i < getChildCount(); i++) { View child = getChildAt(i); if (child instanceof FloatingActionButton && child.getVisibility() != GONE) { counter++; final FloatingActionButton fab = (FloatingActionButton) child; mUiHandler.postDelayed(new Runnable() { @Override public void run() { if (!isOpened()) return; if (fab != mMenuButton) { fab.hide(animate); } Label label = (Label) fab.getTag(R.id.fab_label); if (label != null && label.isHandleVisibilityChanges()) { label.hide(animate); } } }, delay); delay += mAnimationDelayPerItem; } } mUiHandler.postDelayed(new Runnable() { @Override public void run() { mMenuOpened = false; if (mToggleListener != null) { mToggleListener.onMenuToggle(false); } } }, ++counter * mAnimationDelayPerItem); } } @Override public float getMenuX() { return getPivotX(); } @Override public float getMenuY() { return getPivotY(); } /** * Sets the {@link android.view.animation.Interpolator} for <b>FloatingActionButton's</b> icon animation. * * @param interpolator the Interpolator to be used in animation */ public void setIconAnimationInterpolator(Interpolator interpolator) { mOpenAnimatorSet.setInterpolator(interpolator); mCloseAnimatorSet.setInterpolator(interpolator); } public void setIconAnimationOpenInterpolator(Interpolator openInterpolator) { mOpenAnimatorSet.setInterpolator(openInterpolator); } public void setIconAnimationCloseInterpolator(Interpolator closeInterpolator) { mCloseAnimatorSet.setInterpolator(closeInterpolator); } public boolean isAnimated() { return mIsAnimated; } /** * Sets whether open and close actions should be animated * * @param animated if <b>false</b> - menu items will appear/disappear instantly without any animation */ public void setAnimated(boolean animated) { mIsAnimated = animated; mOpenAnimatorSet.setDuration(animated ? ANIMATION_DURATION : 0); mCloseAnimatorSet.setDuration(animated ? ANIMATION_DURATION : 0); } public FloatingActionButton getMenuButton() { return mMenuButton; } public int getAnimationDelayPerItem() { return mAnimationDelayPerItem; } public void setAnimationDelayPerItem(int animationDelayPerItem) { mAnimationDelayPerItem = animationDelayPerItem; } public void setOnMenuToggleListener(FloatingMenuToggleListener listener) { mToggleListener = listener; } public boolean isIconAnimated() { return mIconAnimated; } public void setIconAnimated(boolean animated) { mIconAnimated = animated; } @Override public ImageView getMenuIconView() { return mImageToggle; } @Override public AnimatorSet getIconToggleAnimatorSet() { return mIconToggleSet; } public void setIconToggleAnimatorSet(AnimatorSet toggleAnimatorSet) { mIconToggleSet = toggleAnimatorSet; } @Override public void setMenuButtonShowAnimation(Animation showAnimation) { mMenuButtonShowAnimation = showAnimation; mMenuButton.setShowAnimation(showAnimation); } @Override public void setMenuButtonHideAnimation(Animation hideAnimation) { mMenuButtonHideAnimation = hideAnimation; mMenuButton.setHideAnimation(hideAnimation); } public void setAnimationInProgressListener() { setLayoutAnimationListener(new Animation.AnimationListener() { @Override public void onAnimationStart(Animation animation) { animationInProgress = true; } @Override public void onAnimationEnd(Animation animation) { animationInProgress = false; } @Override public void onAnimationRepeat(Animation animation) { } }); } /** * Returns true if an animation on this layout has not ended yet. */ public boolean isAnimating() { return animationInProgress; } public boolean isMenuHidden() { return getVisibility() == INVISIBLE; } public boolean isMenuButtonHidden() { return mMenuButton.isHidden(); } /** * Makes the whole {@link #FloatingActionMenu} to appear and sets its visibility to {@link #VISIBLE} * * @param animate if true - plays "show animation" */ public void showMenu(boolean animate) { setCorrectPivot(); if (isMenuHidden()) { if (animate) { startAnimation(mMenuButtonShowAnimation); } setVisibility(VISIBLE); } } /** * Makes the {@link #FloatingActionMenu} to disappear and sets its visibility to {@link #INVISIBLE} * * @param animate if true - plays "hide animation" */ public void hideMenu(final boolean animate) { setCorrectPivot(); if (!isMenuHidden() && !mIsMenuButtonAnimationRunning) { mIsMenuButtonAnimationRunning = true; if (isOpened()) { close(animate); mUiHandler.postDelayed(new Runnable() { @Override public void run() { if (animate) { startAnimation(mMenuButtonHideAnimation); } setVisibility(INVISIBLE); mIsMenuButtonAnimationRunning = false; } }, mAnimationDelayPerItem * mButtonsCount); } else { if (animate) { startAnimation(mMenuButtonHideAnimation); } setVisibility(INVISIBLE); mIsMenuButtonAnimationRunning = false; } } } public void toggleMenu(boolean animate) { if (isMenuHidden()) { showMenu(animate); } else { hideMenu(animate); } } /** * Makes the {@link FloatingActionButton} to appear inside the {@link #FloatingActionMenu} and * sets its visibility to {@link #VISIBLE} * * @param animate if true - plays "show animation" */ public void showMenuButton(boolean animate) { setCorrectPivot(); if (isMenuButtonHidden()) { showMenuButtonWithImage(animate); } } /** * Makes the {@link FloatingActionButton} to disappear inside the {@link #FloatingActionMenu} and * sets its visibility to {@link #INVISIBLE} * * @param animate if true - plays "hide animation" */ public void hideMenuButton(final boolean animate) { setCorrectPivot(); if (!isMenuButtonHidden() && !mIsMenuButtonAnimationRunning) { mIsMenuButtonAnimationRunning = true; if (isOpened()) { close(animate); mUiHandler.postDelayed(new Runnable() { @Override public void run() { hideMenuButtonWithImage(animate); } }, mAnimationDelayPerItem * mButtonsCount); } else { hideMenuButtonWithImage(animate); } } } public void toggleMenuButton(boolean animate) { if (isMenuButtonHidden()) { showMenuButton(animate); } else { hideMenuButton(animate); } } public void setClosedOnTouchOutside(boolean close) { mIsSetClosedOnTouchOutside = close; } public void setMenuButtonColorNormalResId(int colorResId) { mMenuColorNormal = getResources().getColor(colorResId); mMenuButton.setColorNormalResId(colorResId); } public int getMenuButtonColorNormal() { return mMenuColorNormal; } public void setMenuButtonColorNormal(int color) { mMenuColorNormal = color; mMenuButton.setColorNormal(color); } public void setMenuButtonColorPressedResId(int colorResId) { mMenuColorPressed = getResources().getColor(colorResId); mMenuButton.setColorPressedResId(colorResId); } public int getMenuButtonColorPressed() { return mMenuColorPressed; } public void setMenuButtonColorPressed(int color) { mMenuColorPressed = color; mMenuButton.setColorPressed(color); } public void setMenuButtonColorRippleResId(int colorResId) { mMenuColorRipple = getResources().getColor(colorResId); mMenuButton.setColorRippleResId(colorResId); } public int getMenuButtonColorRipple() { return mMenuColorRipple; } public void setMenuButtonColorRipple(int color) { mMenuColorRipple = color; mMenuButton.setColorRipple(color); } public void addMenuButton(FloatingActionButton fab) { addView(fab, mButtonsCount - 2); mButtonsCount++; addLabel(fab); } public void removeMenuButton(FloatingActionButton fab) { removeView(fab.getLabelView()); removeView(fab); mButtonsCount--; } public void addMenuButton(FloatingActionButton fab, int index) { int size = mButtonsCount - 2; if (index < 0) { index = 0; } else if (index > size) { index = size; } addView(fab, index); mButtonsCount++; addLabel(fab); } @Override public void setCorrectPivot() { int pivotX = (Util.getScreenWidth(getContext()) - (getPaddingRight() + (mMenuButton.getCircleSize() / 2))); float pivotY = (mImageToggle.getY() + mImageToggle.getHeight() / 2); setPivotX(pivotX); setPivotY(pivotY); mMenuButton.setPivotX(pivotX); mMenuButton.setPivotY(pivotY); } public void removeAllMenuButtons() { close(true); List<FloatingActionButton> viewsToRemove = new ArrayList<>(); for (int i = 0; i < getChildCount(); i++) { View v = getChildAt(i); if (v != mMenuButton && v != mImageToggle && v instanceof FloatingActionButton) { viewsToRemove.add((FloatingActionButton) v); } } for (FloatingActionButton v : viewsToRemove) { removeMenuButton(v); } } public String getMenuButtonLabelText() { return mMenuLabelText; } public void setMenuButtonLabelText(String text) { mMenuButton.setLabelText(text); } public void setOnMenuButtonClickListener(OnClickListener clickListener) { mMenuButton.setOnClickListener(clickListener); } public void setOnMenuButtonLongClickListener(OnLongClickListener longClickListener) { mMenuButton.setOnLongClickListener(longClickListener); } }
/** *============================================================================ * Copyright The Ohio State University Research Foundation, The University of Chicago - * Argonne National Laboratory, Emory University, SemanticBits LLC, and * Ekagra Software Technologies Ltd. * * Distributed under the OSI-approved BSD 3-Clause License. * See http://ncip.github.com/cagrid-core/LICENSE.txt for details. *============================================================================ **/ package gov.nih.nci.cagrid.graph.geometry; import java.awt.Point; import java.awt.Rectangle; import java.awt.geom.Line2D; public class LineSegment { public Point p1; public Point p2; public static final int LEFT = 0; public static final int RIGHT = 1; public static final int UP = 2; public static final int DOWN = 3; public static final int EVEN = 4; public static final int TOP = 5; public static final int BOTTOM = 6; public int getSway(LineSegment ls, int orientation) { return EVEN; } public LineSegment(int x1, int y1, int x2, int y2) { this.p1 = new Point(x1, y1); this.p2 = new Point(x2, y2); } public FloatingPoint getIntersection(LineSegment ls) { /*ToDo: Two linesegments that are the same...what to do? * Two linesegments that are coincident, but are not the same line segment...what to do? */ FloatingPoint rval = new FloatingPoint(0,0); Line2D line1 = new Line2D.Float(this.p1.x, this.p1.y, this.p2.x, this.p2.y); Line2D line2 = new Line2D.Float(ls.p1.x, ls.p1.y, ls.p2.x, ls.p2.y); if((this.p1.x == this.p2.x && this.p1.y==this.p2.y) || (ls.p1.x==ls.p2.x && ls.p1.y==ls.p2.y) ) { return null; } if(line1.intersectsLine(line2)) { if (this.isParallel(ls)) return null; float numerator = (float)( (ls.p2.x-ls.p1.x)*(this.p1.y-ls.p1.y) - (ls.p2.y-ls.p1.y)*(this.p1.x-ls.p1.x) ); float denominator = (float)( (ls.p2.y-ls.p1.y)*(this.p2.x - this.p1.x)- (ls.p2.x-ls.p1.x)*(this.p2.y-this.p1.y) ); float ua = numerator/denominator; rval.x = this.p1.x + ua*(this.p2.x-this.p1.x); rval.y = this.p1.y + ua*(this.p2.y-this.p1.y); return rval; } return null; } public boolean isVertical() { return (this.p1.x == this.p2.x); } public boolean isHorizontal() { return (this.p1.y == this.p2.y); } public boolean isParallel(LineSegment ls) { if(this.isVertical() && ls.isVertical()) return true; int rise1 = this.p2.y - this.p1.y; int run1 = this.p2.x - this.p1.x; float slope1 = (float)rise1/(float)run1; int rise2 = ls.p2.y - ls.p1.y; int run2 = ls.p2.x - ls.p1.x; float slope2 = (float)rise2/(float)run2; if(slope1 == slope2) return true; else return false; } public String toString() { return "LineSegment[" + this.p1.x + ", " + this.p1.y + ", " + this.p2.x + ", " + this.p2.y + "]"; } public float cutsRectangle(Rectangle r) { float rval = 0; float area = r.width * r.height; if(r.intersectsLine(this.p1.x, this.p1.y, this.p2.x, this.p2.y)) { LineSegment top = new LineSegment(r.x, r.y, r.x + r.width, r.y); LineSegment bot = new LineSegment(r.x, r.y + r.height, r.x + r.width, r.y + r.height); LineSegment left = new LineSegment(r.x, r.y, r.x, r.y + r.height); LineSegment rite = new LineSegment(r.x + r.width, r.y, r.x + r.width, r.y + r.height); FloatingPoint top_int = this.getIntersection(top); FloatingPoint bot_int = this.getIntersection(bot); FloatingPoint left_int = this.getIntersection(left); FloatingPoint rite_int = this.getIntersection(rite); if(top_int != null && bot_int == null && left_int == null && rite_int == null) { } else if(top_int == null && bot_int != null && left_int == null && rite_int == null) { } else if(top_int == null && bot_int == null && left_int != null && rite_int == null) { } else if(top_int == null && bot_int == null && left_int == null && rite_int != null) { } else if(top_int == null && bot_int == null && left_int == null && rite_int == null) { } else if(top_int != null && bot_int != null && left_int != null && rite_int != null) { return 1; } // top + others else if(top_int != null && bot_int == null && left_int != null && rite_int == null) { float triangle_area = ((float)(top_int.x - r.x) * (left_int.y - r.y))/((float)2); float area_diff = area - triangle_area; return (float)triangle_area/(float)area_diff; } else if(top_int != null && bot_int != null && left_int == null && rite_int == null) { return 1; } else if(top_int != null && bot_int == null && left_int == null && rite_int != null) { float triangle_area = ((float)(r.x + r.width - top_int.x) * (rite_int.y - r.y))/((float)2); float area_diff = area - triangle_area; return (float)triangle_area/(float)area_diff; } // bottom + others else if(top_int == null && bot_int != null && left_int != null && rite_int == null) { float triangle_area = ((float)(bot_int.x - r.x) * (r.y + r.height - left_int.y))/((float)2); float area_diff = area - triangle_area; return (float)triangle_area/(float)area_diff; } else if(top_int == null && bot_int != null && left_int == null && rite_int != null) { float triangle_area = ((float)(r.x + r.width - bot_int.x) * (r.y + r.height - rite_int.y))/((float)2); float area_diff = area - triangle_area; return (float)triangle_area/(float)area_diff; } // left + right else if(top_int == null && bot_int == null && left_int != null && rite_int != null) { return 1; } // 4 other diagonal special cases // topleft + others else if(top_int != null && left_int != null && bot_int != null && rite_int == null) { float triangle_area = ((float)(bot_int.x - r.x) * (r.y + r.height - left_int.y))/((float)2); float area_diff = area - triangle_area; return (float)triangle_area/(float)area_diff; } else if(top_int != null && left_int != null && bot_int == null && rite_int == null) { } // toprite + others else if(top_int != null && left_int == null && bot_int != null && rite_int != null) { } else if(top_int != null && left_int != null && bot_int != null && rite_int == null) { } // botleft + others else if(top_int == null && left_int != null && bot_int != null && rite_int != null) { } // botrite + others // else else { System.out.println("Shouldn't be here"); } } return rval; } }
/* Android Asynchronous Http Client Copyright (c) 2011 James Smith <james@loopj.com> http://loopj.com Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package com.pispower.video.sdk.net; import java.io.BufferedInputStream; import java.io.IOException; import java.io.InputStream; import java.lang.reflect.Field; import java.net.InetAddress; import java.net.Socket; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.UnrecoverableKeyException; import java.security.cert.Certificate; import java.security.cert.CertificateException; import java.security.cert.CertificateFactory; import java.security.cert.X509Certificate; import javax.net.ssl.HttpsURLConnection; import javax.net.ssl.SSLContext; import javax.net.ssl.TrustManager; import javax.net.ssl.X509TrustManager; import org.apache.http.HttpVersion; import org.apache.http.conn.ClientConnectionManager; import org.apache.http.conn.scheme.PlainSocketFactory; import org.apache.http.conn.scheme.Scheme; import org.apache.http.conn.scheme.SchemeRegistry; import org.apache.http.conn.ssl.SSLSocketFactory; import org.apache.http.impl.client.DefaultHttpClient; import org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager; import org.apache.http.params.BasicHttpParams; import org.apache.http.params.HttpParams; import org.apache.http.params.HttpProtocolParams; import org.apache.http.protocol.HTTP; import android.os.Build; /** * This file is introduced to fix HTTPS Post bug on API &lt; ICS see * http://code.google.com/p/android/issues/detail?id=13117#c14 * <p> * &nbsp; * </p> * Warning! This omits SSL certificate validation on every device, use with * caution */ public class SimpleSSLSocketFactory extends SSLSocketFactory { SSLContext sslContext = SSLContext.getInstance("TLS"); /** * Creates a new SSL Socket Factory with the given KeyStore. * * @param truststore * A KeyStore to create the SSL Socket Factory in context of * @throws NoSuchAlgorithmException * NoSuchAlgorithmException * @throws KeyManagementException * KeyManagementException * @throws KeyStoreException * KeyStoreException * @throws UnrecoverableKeyException * UnrecoverableKeyException */ public SimpleSSLSocketFactory(KeyStore truststore) throws NoSuchAlgorithmException, KeyManagementException, KeyStoreException, UnrecoverableKeyException { super(truststore); X509TrustManager tm = new X509TrustManager() { public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException { } public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException { } public X509Certificate[] getAcceptedIssuers() { return null; } }; sslContext.init(null, new TrustManager[] { tm }, null); } @Override public Socket createSocket(Socket socket, String host, int port, boolean autoClose) throws IOException { injectHostname(socket, host); return sslContext.getSocketFactory().createSocket(socket, host, port, autoClose); } @Override public Socket createSocket() throws IOException { return sslContext.getSocketFactory().createSocket(); } /** * Makes HttpsURLConnection trusts a set of certificates specified by the * KeyStore */ public void fixHttpsURLConnection() { HttpsURLConnection.setDefaultSSLSocketFactory(sslContext .getSocketFactory()); } /** * Gets a KeyStore containing the Certificate * * @param cert * InputStream of the Certificate * @return KeyStore */ public static KeyStore getKeystoreOfCA(InputStream cert) { // Load CAs from an InputStream InputStream caInput = null; Certificate ca = null; try { CertificateFactory cf = CertificateFactory.getInstance("X.509"); caInput = new BufferedInputStream(cert); ca = cf.generateCertificate(caInput); } catch (CertificateException e1) { e1.printStackTrace(); } finally { try { if (caInput != null) { caInput.close(); } } catch (IOException e) { e.printStackTrace(); } } // Create a KeyStore containing our trusted CAs String keyStoreType = KeyStore.getDefaultType(); KeyStore keyStore = null; try { keyStore = KeyStore.getInstance(keyStoreType); keyStore.load(null, null); keyStore.setCertificateEntry("ca", ca); } catch (Exception e) { e.printStackTrace(); } return keyStore; } /** * Gets a Default KeyStore * * @return KeyStore */ public static KeyStore getKeystore() { KeyStore trustStore = null; try { trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); trustStore.load(null, null); } catch (Throwable t) { t.printStackTrace(); } return trustStore; } /** * Returns a SSlSocketFactory which trusts all certificates * * @return SSLSocketFactory */ public static SSLSocketFactory getFixedSocketFactory() { SSLSocketFactory socketFactory; try { socketFactory = new SimpleSSLSocketFactory(getKeystore()); socketFactory .setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); } catch (Throwable t) { t.printStackTrace(); socketFactory = SSLSocketFactory.getSocketFactory(); } return socketFactory; } /** * Pre-ICS Android had a bug resolving HTTPS addresses. This workaround * fixes that bug. * * @param socket * The socket to alter * @param host * Hostname to connect to * @see <a * href="https://code.google.com/p/android/issues/detail?id=13117#c14">https://code.google.com/p/android/issues/detail?id=13117#c14</a> */ @SuppressWarnings("deprecation") private void injectHostname(Socket socket, String host) { try { if (Integer.valueOf(Build.VERSION.SDK) >= 4) { Field field = InetAddress.class.getDeclaredField("hostName"); field.setAccessible(true); field.set(socket.getInetAddress(), host); } } catch (Exception ignored) { } } /** * Gets a DefaultHttpClient which trusts a set of certificates specified by * the KeyStore * * @param keyStore * custom provided KeyStore instance * @return DefaultHttpClient */ public static DefaultHttpClient getDefaultHttpClient() { try { KeyStore trustStore = KeyStore.getInstance(KeyStore .getDefaultType()); trustStore.load(null, null); SSLSocketFactory sf = new SimpleSSLSocketFactory(trustStore); sf.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER); SchemeRegistry registry = new SchemeRegistry(); registry.register(new Scheme("http", PlainSocketFactory .getSocketFactory(), 80)); registry.register(new Scheme("https", sf, 443)); HttpParams params = new BasicHttpParams(); HttpProtocolParams.setVersion(params, HttpVersion.HTTP_1_1); HttpProtocolParams.setContentCharset(params, HTTP.UTF_8); ClientConnectionManager ccm = new ThreadSafeClientConnManager( params, registry); return new DefaultHttpClient(ccm, params); } catch (Exception e) { return new DefaultHttpClient(); } } }
package com.moyinoluwa.radarwatchface; import android.content.BroadcastReceiver; import android.content.Context; import android.content.Intent; import android.content.IntentFilter; import android.content.res.Resources; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.graphics.Rect; import android.graphics.Typeface; import android.os.Bundle; import android.os.Handler; import android.os.Message; import android.support.v4.content.ContextCompat; import android.support.wearable.watchface.CanvasWatchFaceService; import android.support.wearable.watchface.WatchFaceStyle; import android.view.SurfaceHolder; import java.lang.ref.WeakReference; import java.util.Calendar; import java.util.TimeZone; import java.util.concurrent.TimeUnit; /** * Analog watch face with a ticking second hand. In ambient mode, the second hand isn't shown. On * devices with low-bit ambient mode, the hands are drawn without anti-aliasing in ambient mode. */ public class RadarWatchFace extends CanvasWatchFaceService { private static final String TAG = "RadarWatchFace"; /** * Update rate in milliseconds for interactive mode. We update once a second to advance the * second hand. */ private static final long INTERACTIVE_UPDATE_RATE_MS = TimeUnit.SECONDS.toMillis(1); /** * Handler message id for updating the time periodically in interactive mode. */ private static final int MSG_UPDATE_TIME = 0; @Override public Engine onCreateEngine() { return new Engine(); } private static class EngineHandler extends Handler { private final WeakReference<RadarWatchFace.Engine> mWeakReference; public EngineHandler(RadarWatchFace.Engine reference) { mWeakReference = new WeakReference<>(reference); } @Override public void handleMessage(Message msg) { RadarWatchFace.Engine engine = mWeakReference.get(); if (engine != null) { switch (msg.what) { case MSG_UPDATE_TIME: engine.handleUpdateTimeMessage(); break; } } } } private class Engine extends CanvasWatchFaceService.Engine { private final Handler mUpdateTimeHandler = new EngineHandler(this); private boolean mRegisteredTimeZoneReceiver = false; private Calendar mCalendar; // Variables for painting background private Paint mBackgroundPaint; // Watch Face Hand related objects private Paint mHourAndMinuteHandPaint; private Paint mSecondHandPaint; private Paint mCirclePaint; private Paint mTickPaint; private Paint radarTextPaint; private Paint hourTextPaint; private float mHourHandLength; private float mMinuteHandLength; private float mSecondHandLength; private boolean mAmbient; /* * Whether the display supports fewer bits for each color in ambient mode. When true, we * disable anti-aliasing in ambient mode. */ boolean mLowBitAmbient; /* * Whether the display supports burn in protection in ambient mode. * When true, remove the background in ambient mode. */ private boolean mBurnInProtection; private int mWidth; private int mHeight; private float mCenterX; private float mCenterY; final BroadcastReceiver mTimeZoneReceiver = new BroadcastReceiver() { @Override public void onReceive(Context context, Intent intent) { mCalendar.setTimeZone(TimeZone.getDefault()); invalidate(); } }; @Override public void onCreate(SurfaceHolder holder) { super.onCreate(holder); setWatchFaceStyle(new WatchFaceStyle.Builder(RadarWatchFace.this) .setCardPeekMode(WatchFaceStyle.PEEK_MODE_SHORT) .setBackgroundVisibility(WatchFaceStyle.BACKGROUND_VISIBILITY_INTERRUPTIVE) .setShowSystemUiTime(false) .build()); mCalendar = Calendar.getInstance(); initializeBackground(); initializeRadarTextPaint(); initializeHourTextPaint(); initializeTickPaint(); initializeMinuteAndHourHand(); initializeSecondHand(); initializeCenterCircle(); } private void initializeBackground() { mBackgroundPaint = new Paint(); mBackgroundPaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.radar_background_color)); } private void initializeMinuteAndHourHand() { Resources resources = RadarWatchFace.this.getResources(); mHourAndMinuteHandPaint = new Paint(); mHourAndMinuteHandPaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.analog_hands)); mHourAndMinuteHandPaint.setStrokeWidth(resources.getDimension(R.dimen.analog_hand_stroke)); mHourAndMinuteHandPaint.setAntiAlias(true); mHourAndMinuteHandPaint.setStrokeCap(Paint.Cap.SQUARE); } private void initializeSecondHand() { Resources resources = RadarWatchFace.this.getResources(); mSecondHandPaint = new Paint(); mSecondHandPaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.analog_hands)); mSecondHandPaint.setStrokeWidth(resources.getDimension(R.dimen.tick_hand_stroke)); mSecondHandPaint.setAntiAlias(true); mSecondHandPaint.setStrokeCap(Paint.Cap.SQUARE); } private void initializeCenterCircle() { mCirclePaint = new Paint(); mCirclePaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.analog_hands)); mCirclePaint.setAntiAlias(true); } private void initializeTickPaint() { Resources resources = RadarWatchFace.this.getResources(); mTickPaint = new Paint(); mTickPaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.tick_color)); mTickPaint.setStrokeWidth(resources.getDimension(R.dimen.tick_hand_stroke)); mTickPaint.setAntiAlias(true); } private void initializeRadarTextPaint() { Resources resources = RadarWatchFace.this.getResources(); Typeface radarTextTypeface = Typeface.createFromAsset(getAssets(), "fonts/NexaLight.ttf"); float radarTextSize = 60; radarTextPaint = new Paint(); radarTextPaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.radar_text_color)); radarTextPaint.setStrokeWidth(resources.getDimension(R.dimen.radar_hand_stroke)); radarTextPaint.setAntiAlias(true); radarTextPaint.setTextAlign(Paint.Align.LEFT); radarTextPaint.setTextSize(radarTextSize); radarTextPaint.setTypeface(radarTextTypeface); } private void initializeHourTextPaint() { Resources resources = RadarWatchFace.this.getResources(); Typeface radarTextTypeface = Typeface.createFromAsset(getAssets(), "fonts/NexaLight.ttf"); float hourTextSize = 20; hourTextPaint = new Paint(); hourTextPaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.tick_color)); hourTextPaint.setStrokeWidth(resources.getDimension(R.dimen.radar_hand_stroke)); hourTextPaint.setAntiAlias(true); hourTextPaint.setTextAlign(Paint.Align.LEFT); hourTextPaint.setTextSize(hourTextSize); hourTextPaint.setTypeface(radarTextTypeface); } @Override public void onDestroy() { mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME); super.onDestroy(); } @Override public void onPropertiesChanged(Bundle properties) { super.onPropertiesChanged(properties); mLowBitAmbient = properties.getBoolean(PROPERTY_LOW_BIT_AMBIENT, false); mBurnInProtection = properties.getBoolean(PROPERTY_BURN_IN_PROTECTION, false); } @Override public void onTimeTick() { super.onTimeTick(); invalidate(); } @Override public void onAmbientModeChanged(boolean inAmbientMode) { super.onAmbientModeChanged(inAmbientMode); if (mAmbient != inAmbientMode) { mAmbient = inAmbientMode; if (mLowBitAmbient || mBurnInProtection) { mHourAndMinuteHandPaint.setAntiAlias(!inAmbientMode); mSecondHandPaint.setAntiAlias(!inAmbientMode); mTickPaint.setAntiAlias(!inAmbientMode); mCirclePaint.setAntiAlias(!inAmbientMode); radarTextPaint.setAntiAlias(!inAmbientMode); hourTextPaint.setAntiAlias(!inAmbientMode); } invalidate(); } // Whether the timer should be running depends on whether we're visible (as well as // whether we're in ambient mode), so we may need to start or stop the timer. updateTimer(); } @Override public void onSurfaceChanged(SurfaceHolder holder, int format, int width, int height) { super.onSurfaceChanged(holder, format, width, height); mWidth = width; mHeight = height; /* * Find the coordinates of the center point on the screen. * Ignore the window insets so that, on round watches * with a "chin", the watch face is centered on the entire screen, * not just the usable portion. */ mCenterX = mWidth / 2f; mCenterY = mHeight / 2f; // Calculate the lengths of the watch hands and store them in member variables. mHourHandLength = mCenterX - 80; mMinuteHandLength = mCenterX - 50; mSecondHandLength = mCenterX - 20; } @Override public void onDraw(Canvas canvas, Rect bounds) { long now = System.currentTimeMillis(); mCalendar.setTimeInMillis(now); // Draw the background. drawBackground(canvas); // draw 'radar' text drawRadarText(canvas); // draw the minute ticks drawMinuteTicks(canvas); // draw the hour ticks drawHourTicks(canvas); // draw the hour and minute hand drawHourAndMinuteHand(canvas); // draw second hand drawSecondHand(canvas); // draw center circle drawCircle(canvas); } private void drawBackground(Canvas canvas) { if (isInAmbientMode()) { canvas.drawColor(Color.BLACK); } else { canvas.drawRect(0, 0, canvas.getWidth(), canvas.getHeight(), mBackgroundPaint); } } private void drawRadarText(Canvas canvas) { String radarText = getString(R.string.my_analog_name).toLowerCase(); if (isInAmbientMode()) { radarTextPaint.setColor(Color.WHITE); } else { radarTextPaint.setColor(ContextCompat.getColor(RadarWatchFace.this, R.color.radar_text_color)); } canvas.drawText(radarText, mCenterX - 75f, mCenterY - 40f, radarTextPaint); } private void drawHourTicks(Canvas canvas) { // draws the hour values on specific positions on the canvas canvas.drawText("12", mCenterX - 10f, mCenterY - 120f, hourTextPaint); canvas.drawText("1", mCenterX + 60f, mCenterY - 100f, hourTextPaint); canvas.drawText("2", mCenterX + 105f, mCenterY - 57f, hourTextPaint); canvas.drawText("3", mCenterX + 120f, mCenterY + 7f, hourTextPaint); canvas.drawText("4", mCenterX + 105f, mCenterY + 73f, hourTextPaint); canvas.drawText("5", mCenterX + 55f, mCenterY + 120f, hourTextPaint); canvas.drawText("6", mCenterX - 3f, mCenterY + 130f, hourTextPaint); canvas.drawText("7", mCenterX - 69f, mCenterY + 120f, hourTextPaint); canvas.drawText("8", mCenterX - 115f, mCenterY + 73f, hourTextPaint); canvas.drawText("9", mCenterX - 132f, mCenterY + 7f, hourTextPaint); canvas.drawText("10", mCenterX - 115f, mCenterY - 57f, hourTextPaint); canvas.drawText("11", mCenterX - 69f, mCenterY - 100f, hourTextPaint); } private void drawMinuteTicks(Canvas canvas) { float innerTickRadius; // Doubles the length of the hour tick so that // there is a distinction between the minutes and hours for (int tickIndex = 0; tickIndex < 60; tickIndex++) { if (tickIndex == 0 || (tickIndex % 5) == 0) { innerTickRadius = mCenterX - 20; } else { innerTickRadius = mCenterX - 10; } float tickRotation = (float) (tickIndex * Math.PI * 2 / 60); float innerX = (float) Math.sin(tickRotation) * innerTickRadius; float innerY = (float) -Math.cos(tickRotation) * innerTickRadius; float outerX = (float) Math.sin(tickRotation) * mCenterX; float outerY = (float) -Math.cos(tickRotation) * mCenterX; canvas.drawLine(mCenterX + innerX, mCenterY + innerY, mCenterX + outerX, mCenterY + outerY, mTickPaint); } } private void drawHourAndMinuteHand(Canvas canvas) { // Constant to help calculate clock hand rotations final float TWO_PI = (float) Math.PI * 2f; float seconds = mCalendar.get(Calendar.SECOND) + mCalendar.get(Calendar.MILLISECOND) / 1000f; float minutes = mCalendar.get(Calendar.MINUTE) + seconds / 60f; float minutesRotation = minutes / 60f * TWO_PI; float hours = mCalendar.get(Calendar.HOUR) + minutes / 60f; float hoursRotation = hours / 12f * TWO_PI; float minX = (float) Math.sin(minutesRotation) * mMinuteHandLength; float minY = (float) -Math.cos(minutesRotation) * mMinuteHandLength; canvas.drawLine(mCenterX, mCenterY, mCenterX + minX, mCenterY + minY, mHourAndMinuteHandPaint); float hrX = (float) Math.sin(hoursRotation) * mHourHandLength; float hrY = (float) -Math.cos(hoursRotation) * mHourHandLength; canvas.drawLine(mCenterX, mCenterY, mCenterX + hrX, mCenterY + hrY, mHourAndMinuteHandPaint); } private void drawSecondHand(Canvas canvas) { final float seconds = (mCalendar.get(Calendar.SECOND) + mCalendar.get(Calendar.MILLISECOND) / 1000f); // Constant to help calculate clock hand rotations final float TWO_PI = (float) Math.PI * 2f; final float secondsRotation = seconds / 60f * TWO_PI; if (!mAmbient) { float secX = (float) Math.sin(secondsRotation) * mSecondHandLength; float secY = (float) -Math.cos(secondsRotation) * mSecondHandLength; canvas.drawLine(mCenterX, mCenterY, mCenterX + secX, mCenterY + secY, mSecondHandPaint); } } private void drawCircle(Canvas canvas) { final float circleRadius = 8.5f; canvas.drawCircle(mCenterX, mCenterY, circleRadius, mCirclePaint); } @Override public void onVisibilityChanged(boolean visible) { super.onVisibilityChanged(visible); if (visible) { registerReceiver(); // Update time zone in case it changed while we weren't visible. mCalendar.setTimeZone(TimeZone.getDefault()); invalidate(); } else { unregisterReceiver(); } // Whether the timer should be running depends on whether we're visible (as well as // whether we're in ambient mode), so we may need to start or stop the timer. updateTimer(); } private void registerReceiver() { if (mRegisteredTimeZoneReceiver) { return; } mRegisteredTimeZoneReceiver = true; IntentFilter filter = new IntentFilter(Intent.ACTION_TIMEZONE_CHANGED); RadarWatchFace.this.registerReceiver(mTimeZoneReceiver, filter); } private void unregisterReceiver() { if (!mRegisteredTimeZoneReceiver) { return; } mRegisteredTimeZoneReceiver = false; RadarWatchFace.this.unregisterReceiver(mTimeZoneReceiver); } /** * Starts the {@link #mUpdateTimeHandler} timer if it should be running and isn't currently * or stops it if it shouldn't be running but currently is. */ private void updateTimer() { mUpdateTimeHandler.removeMessages(MSG_UPDATE_TIME); if (shouldTimerBeRunning()) { mUpdateTimeHandler.sendEmptyMessage(MSG_UPDATE_TIME); } } /** * Returns whether the {@link #mUpdateTimeHandler} timer should be running. The timer should * only run when we're visible and in interactive mode. */ private boolean shouldTimerBeRunning() { return isVisible() && !isInAmbientMode(); } /** * Handle updating the time periodically in interactive mode. */ private void handleUpdateTimeMessage() { invalidate(); if (shouldTimerBeRunning()) { long timeMs = System.currentTimeMillis(); long delayMs = INTERACTIVE_UPDATE_RATE_MS - (timeMs % INTERACTIVE_UPDATE_RATE_MS); mUpdateTimeHandler.sendEmptyMessageDelayed(MSG_UPDATE_TIME, delayMs); } } } }
/* * Copyright (C) 2014 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package com.example.android.dragonTV.ui; import android.app.LoaderManager; import android.content.Intent; import android.content.Loader; import android.graphics.drawable.Drawable; import android.os.Bundle; import android.os.Handler; import android.support.v17.leanback.app.BackgroundManager; import android.support.v17.leanback.app.BrowseFragment; import android.support.v17.leanback.widget.ArrayObjectAdapter; import android.support.v17.leanback.widget.HeaderItem; import android.support.v17.leanback.widget.ImageCardView; import android.support.v17.leanback.widget.ListRow; import android.support.v17.leanback.widget.ListRowPresenter; import android.support.v17.leanback.widget.OnItemViewClickedListener; import android.support.v17.leanback.widget.OnItemViewSelectedListener; import android.support.v17.leanback.widget.Presenter; import android.support.v17.leanback.widget.Row; import android.support.v17.leanback.widget.RowPresenter; import android.support.v4.app.ActivityOptionsCompat; import android.util.DisplayMetrics; import android.util.Log; import android.view.View; import android.widget.Toast; import com.bumptech.glide.Glide; import com.bumptech.glide.load.resource.drawable.GlideDrawable; import com.bumptech.glide.request.animation.GlideAnimation; import com.bumptech.glide.request.target.SimpleTarget; import com.example.android.dragonTV.R; import com.example.android.dragonTV.data.VideoItemLoader; import com.example.android.dragonTV.data.VideoProvider; import com.example.android.dragonTV.model.Movie; import com.example.android.dragonTV.presenter.CardPresenter; import com.example.android.dragonTV.presenter.GridItemPresenter; import com.example.android.dragonTV.recommendation.UpdateRecommendationsService; import java.net.URI; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Timer; import java.util.TimerTask; /* * Main class to show BrowseFragment with header and rows of videos */ public class MainFragment extends BrowseFragment implements LoaderManager.LoaderCallbacks<HashMap<String, List<Movie>>> { private static final String TAG = "MainFragment"; private static int BACKGROUND_UPDATE_DELAY = 300; private static String mVideosUrl; private final Handler mHandler = new Handler(); private ArrayObjectAdapter mRowsAdapter; private Drawable mDefaultBackground; private DisplayMetrics mMetrics; private Timer mBackgroundTimer; private URI mBackgroundURI; private BackgroundManager mBackgroundManager; @Override public void onActivityCreated(Bundle savedInstanceState) { Log.d(TAG, "onCreate"); super.onActivityCreated(savedInstanceState); loadVideoData(); prepareBackgroundManager(); setupUIElements(); setupEventListeners(); } @Override public void onDestroy() { super.onDestroy(); if (null != mBackgroundTimer) { Log.d(TAG, "onDestroy: " + mBackgroundTimer.toString()); mBackgroundTimer.cancel(); } } private void prepareBackgroundManager() { mBackgroundManager = BackgroundManager.getInstance(getActivity()); mBackgroundManager.attach(getActivity().getWindow()); mDefaultBackground = getResources().getDrawable(R.drawable.default_background); mMetrics = new DisplayMetrics(); getActivity().getWindowManager().getDefaultDisplay().getMetrics(mMetrics); } private void setupUIElements() { setBadgeDrawable(getActivity().getResources().getDrawable(R.drawable.videos_by_google_banner)); setTitle(getString(R.string.browse_title)); // Badge, when set, takes precedent over title setHeadersState(HEADERS_ENABLED); setHeadersTransitionOnBackEnabled(true); // set fastLane (or headers) background color setBrandColor(getResources().getColor(R.color.fastlane_background)); // set search icon color setSearchAffordanceColor(getResources().getColor(R.color.search_opaque)); } private void loadVideoData() { VideoProvider.setContext(getActivity()); mVideosUrl = getActivity().getResources().getString(R.string.catalog_url); getLoaderManager().initLoader(0, null, this); } private void setupEventListeners() { setOnSearchClickedListener(new View.OnClickListener() { @Override public void onClick(View view) { Intent intent = new Intent(getActivity(), SearchActivity.class); startActivity(intent); } }); setOnItemViewClickedListener(new ItemViewClickedListener()); setOnItemViewSelectedListener(new ItemViewSelectedListener()); } /* * (non-Javadoc) * @see android.support.v4.app.LoaderManager.LoaderCallbacks#onCreateLoader(int, * android.os.Bundle) */ @Override public Loader<HashMap<String, List<Movie>>> onCreateLoader(int arg0, Bundle arg1) { Log.d(TAG, "VideoItemLoader created "); return new VideoItemLoader(getActivity(), mVideosUrl); } /* * (non-Javadoc) * @see android.support.v4.app.LoaderManager.LoaderCallbacks#onLoadFinished(android * .support.v4.content.Loader, java.lang.Object) */ @Override public void onLoadFinished(Loader<HashMap<String, List<Movie>>> arg0, HashMap<String, List<Movie>> data) { mRowsAdapter = new ArrayObjectAdapter(new ListRowPresenter()); CardPresenter cardPresenter = new CardPresenter(); int i = 0; for (Map.Entry<String, List<Movie>> entry : data.entrySet()) { ArrayObjectAdapter listRowAdapter = new ArrayObjectAdapter(cardPresenter); List<Movie> list = entry.getValue(); for (int j = 0; j < list.size(); j++) { listRowAdapter.add(list.get(j)); } HeaderItem header = new HeaderItem(i, entry.getKey(), null); i++; mRowsAdapter.add(new ListRow(header, listRowAdapter)); } setAdapter(mRowsAdapter); updateRecommendations(); } @Override public void onLoaderReset(Loader<HashMap<String, List<Movie>>> arg0) { mRowsAdapter.clear(); } protected void setDefaultBackground(Drawable background) { mDefaultBackground = background; } protected void setDefaultBackground(int resourceId) { mDefaultBackground = getResources().getDrawable(resourceId); } protected void updateBackground(String uri) { int width = mMetrics.widthPixels; int height = mMetrics.heightPixels; Glide.with(getActivity()) .load(uri) .centerCrop() .error(mDefaultBackground) .into(new SimpleTarget<GlideDrawable>(width, height) { @Override public void onResourceReady(GlideDrawable resource, GlideAnimation<? super GlideDrawable> glideAnimation) { mBackgroundManager.setDrawable(resource); } }); mBackgroundTimer.cancel(); } protected void updateBackground(Drawable drawable) { BackgroundManager.getInstance(getActivity()).setDrawable(drawable); } protected void clearBackground() { BackgroundManager.getInstance(getActivity()).setDrawable(mDefaultBackground); } private void startBackgroundTimer() { if (null != mBackgroundTimer) { mBackgroundTimer.cancel(); } mBackgroundTimer = new Timer(); mBackgroundTimer.schedule(new UpdateBackgroundTask(), BACKGROUND_UPDATE_DELAY); } private void updateRecommendations() { Intent recommendationIntent = new Intent(getActivity(), UpdateRecommendationsService.class); getActivity().startService(recommendationIntent); } private class UpdateBackgroundTask extends TimerTask { @Override public void run() { mHandler.post(new Runnable() { @Override public void run() { if (mBackgroundURI != null) { updateBackground(mBackgroundURI.toString()); } } }); } } private final class ItemViewClickedListener implements OnItemViewClickedListener { @Override public void onItemClicked(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { if (item instanceof Movie) { Movie movie = (Movie) item; Log.d(TAG, "Item: " + item.toString()); Intent intent = new Intent(getActivity(), MovieDetailsActivity.class); intent.putExtra(MovieDetailsActivity.MOVIE, movie); Bundle bundle = ActivityOptionsCompat.makeSceneTransitionAnimation( getActivity(), ((ImageCardView) itemViewHolder.view).getMainImageView(), MovieDetailsActivity.SHARED_ELEMENT_NAME).toBundle(); getActivity().startActivity(intent, bundle); } else if (item instanceof String) { if (((String) item).indexOf(getString(R.string.grid_view)) >= 0) { Intent intent = new Intent(getActivity(), VerticalGridActivity.class); startActivity(intent); } else if (((String) item).indexOf(getString(R.string.error_fragment)) >= 0) { Intent intent = new Intent(getActivity(), BrowseErrorActivity.class); startActivity(intent); } else { Toast.makeText(getActivity(), ((String) item), Toast.LENGTH_SHORT) .show(); } } } } private final class ItemViewSelectedListener implements OnItemViewSelectedListener { @Override public void onItemSelected(Presenter.ViewHolder itemViewHolder, Object item, RowPresenter.ViewHolder rowViewHolder, Row row) { if (item instanceof Movie) { mBackgroundURI = ((Movie) item).getBackgroundImageURI(); startBackgroundTimer(); } } } }
/** * */ package org.devgateway.ocds.persistence.mongo.spring; import java.io.Serializable; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Set; import javax.annotation.PostConstruct; import org.apache.commons.beanutils.BeanUtils; import org.apache.commons.beanutils.PropertyUtils; import org.devgateway.ocds.persistence.mongo.Identifiable; import org.devgateway.ocds.persistence.mongo.Record; import org.devgateway.ocds.persistence.mongo.Release; import org.devgateway.ocds.persistence.mongo.Tag; import org.devgateway.ocds.persistence.mongo.merge.Merge; import org.devgateway.ocds.persistence.mongo.merge.MergeStrategy; import org.devgateway.ocds.persistence.mongo.repository.main.RecordRepository; import org.devgateway.ocds.persistence.mongo.repository.main.ReleaseRepository; import org.reflections.Reflections; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import com.google.common.collect.Sets; /** * @author mpostelnicu * */ @Service public class ReleaseCompilerService { protected static final Logger logger = LoggerFactory.getLogger(ReleaseCompilerService.class); @Autowired private ReleaseRepository releaseRepository; @Autowired private RecordRepository recordRepository; // @Autowired // private OcdsSchemaValidatorService ocdsSchemaValidatorService; // // @Autowired // private ObjectMapper jacksonObjectMapper; @Autowired protected Reflections reflections; private Set<Field> fieldsAnnotatedWithMerge; @PostConstruct protected void init() { fieldsAnnotatedWithMerge = Sets.newConcurrentHashSet(reflections.getFieldsAnnotatedWith(Merge.class)); } /** * @param left * @param right * @return * @see {@link MergeStrategy#overwrite} */ protected Object mergeFieldStrategyOverwrite(final Object left, final Object right) { return right; } /** * @param left * @param right * @return * @see {@link MergeStrategy#ocdsOmit} */ protected Object mergeFieldStrategyOcdsOmit(final Object left, final Object right) { return null; } /** * * @param left * @param right * @return * @see {@link MergeStrategy#ocdsVersion} */ protected Object mergeFieldStrategyOcdsVersion(final Object left, final Object right) { return right; } protected Identifiable getIdentifiableById(final Serializable id, final Collection<Identifiable> col) { for (Identifiable identifiable : col) { if (identifiable.getIdProperty().equals(id)) { return identifiable; } } return null; } /** * @param leftCollection * @param rightCollection * @return * @see {@link MergeStrategy#arrayMergeById} */ @SuppressWarnings("unchecked") protected <S extends Collection<Identifiable>> S mergeFieldStrategyArrayMergeById(final S leftCollection, final S rightCollection) { // target collections must be instantiated S target = null; try { target = (S) leftCollection.getClass().newInstance(); } catch (InstantiationException | IllegalAccessException e) { throw new RuntimeException(e); } // we add all the left target.addAll(leftCollection); // iterate all right elements for (Identifiable rightIdentifiable : rightCollection) { // if there is an existing element with the same id, perform merge // on the children and replace existing left element Identifiable leftIdentifiable = getIdentifiableById(rightIdentifiable.getIdProperty(), leftCollection); if (leftIdentifiable != null) { target.remove(leftIdentifiable); target.add(mergeOcdsBeans(leftIdentifiable, rightIdentifiable)); } else { // otherwise add the new element to the left list target.add(rightIdentifiable); } } return target; } /** * Merges the fields of the right bean into a shallow copy of the left bean * * @param leftBean * @param rightBean * @return */ @SuppressWarnings("unchecked") protected <S> S mergeOcdsBeans(final S leftBean, final S rightBean) { // if there is no data to the right, the merge just returns the // unmutated left if (rightBean == null) { return leftBean; } Class<?> clazz = rightBean.getClass(); if (leftBean != null && !leftBean.getClass().equals(clazz)) { throw new RuntimeException("Attempted the merging of objects of different type!"); } //we perform a shallow copy of the left bean S target; try { target = (S) BeanUtils.cloneBean(leftBean); } catch (IllegalAccessException | InstantiationException | InvocationTargetException | NoSuchMethodException e1) { throw new RuntimeException(e1); } Arrays.asList(rightBean.getClass().getDeclaredFields()).parallelStream().forEach(field -> { try { PropertyUtils.setProperty(target, field.getName(), mergeFieldFromOcdsBeans(field, leftBean, rightBean)); } catch (IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { throw new RuntimeException(e); } }); return target; } /** * Computes the output of an atomic merging operation on a specific field * * @param field the field to perform the merge on * @param leftBean the left bean * @param rightBean the right bean * @return the merged result * @throws IllegalAccessException * @throws InvocationTargetException * @throws NoSuchMethodException */ protected <S> Object mergeFieldFromOcdsBeans(final Field field, final S leftBean, final S rightBean) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException { Object rightFieldValue = PropertyUtils.getProperty(rightBean, field.getName()); Object leftFieldValue = PropertyUtils.getProperty(leftBean, field.getName()); if (fieldsAnnotatedWithMerge.contains(field)) { MergeStrategy mergeStrategy = field.getDeclaredAnnotation(Merge.class).value(); switch (mergeStrategy) { case overwrite: return mergeFieldStrategyOverwrite(leftFieldValue, rightFieldValue); case ocdsOmit: return mergeFieldStrategyOcdsOmit(leftFieldValue, rightFieldValue); case ocdsVersion: return mergeFieldStrategyOcdsVersion(leftFieldValue, rightFieldValue); case arrayMergeById: return mergeFieldStrategyArrayMergeById((Collection<Identifiable>) leftFieldValue, (Collection<Identifiable>) rightFieldValue); default: throw new RuntimeException("Unknown or unimplemented merge strategy!"); } } else { // if no merge strategy was defined for the given field, // recursively invoke the method on the field value return mergeOcdsBeans(leftFieldValue, rightFieldValue); } } protected Release createCompiledRelease(final Record record) { // empty records produce null compiled release if (record.getReleases().isEmpty()) { return null; } // records with just one release produce a compiled release identical to // the one release Release left = record.getReleases().get(0); if (record.getReleases().size() > 1) { // we merge each element of the list to its left partner List<Release> subList = record.getReleases().subList(1, record.getReleases().size()); for (Release right : subList) { Release compiled = mergeOcdsBeans(left, right); left = compiled; } } // this was purposefully nullified by ocdsOmit left.setTag(new ArrayList<Tag>()); left.getTag().add(Tag.compiled); return left; } public void createSaveCompiledReleaseAndSaveRecord(final Record record) { Release compiledRelease = createCompiledRelease(record); record.setCompiledRelease(releaseRepository.save(compiledRelease)); recordRepository.save(record); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.util; import java.util.List; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlAttribute; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlElementWrapper; import javax.xml.bind.annotation.XmlElements; import javax.xml.bind.annotation.XmlRootElement; /** * A model of a CamelContext stat dump from {@link org.apache.camel.api.management.mbean.ManagedCamelContextMBean#dumpRoutesStatsAsXml(boolean, boolean)}. */ @XmlRootElement(name = "camelContextStat") @XmlAccessorType(XmlAccessType.FIELD) public final class CamelContextStatDump { @XmlAttribute private String id; @XmlAttribute private String state; @XmlAttribute private Long exchangesCompleted; @XmlAttribute private Long exchangesFailed; @XmlAttribute private Long failuresHandled; @XmlAttribute private Long redeliveries; @XmlAttribute private Long minProcessingTime; @XmlAttribute private Long maxProcessingTime; @XmlAttribute private Long totalProcessingTime; @XmlAttribute private Long lastProcessingTime; @XmlAttribute private Long deltaProcessingTime; @XmlAttribute private Long meanProcessingTime; @XmlAttribute private Long exchangesInflight; @XmlAttribute private Long selfProcessingTime; @XmlAttribute private String resetTimestamp; @XmlAttribute private String firstExchangeCompletedTimestamp; @XmlAttribute private String firstExchangeCompletedExchangeId; @XmlAttribute private String firstExchangeFailureTimestamp; @XmlAttribute private String firstExchangeFailureExchangeId; @XmlAttribute private String lastExchangeCompletedTimestamp; @XmlAttribute private String lastExchangeCompletedExchangeId; @XmlAttribute private String lastExchangeFailureTimestamp; @XmlAttribute private String lastExchangeFailureExchangeId; @XmlElementWrapper(name = "routeStats") @XmlElements({ @XmlElement(type = RouteStatDump.class, name = "routeStat") }) private List<RouteStatDump> routeStats; public String getId() { return id; } public void setId(String id) { this.id = id; } public String getState() { return state; } public void setState(String state) { this.state = state; } public Long getExchangesCompleted() { return exchangesCompleted; } public void setExchangesCompleted(Long exchangesCompleted) { this.exchangesCompleted = exchangesCompleted; } public Long getExchangesFailed() { return exchangesFailed; } public void setExchangesFailed(Long exchangesFailed) { this.exchangesFailed = exchangesFailed; } public Long getFailuresHandled() { return failuresHandled; } public void setFailuresHandled(Long failuresHandled) { this.failuresHandled = failuresHandled; } public Long getRedeliveries() { return redeliveries; } public void setRedeliveries(Long redeliveries) { this.redeliveries = redeliveries; } public Long getMinProcessingTime() { return minProcessingTime; } public void setMinProcessingTime(Long minProcessingTime) { this.minProcessingTime = minProcessingTime; } public Long getMaxProcessingTime() { return maxProcessingTime; } public void setMaxProcessingTime(Long maxProcessingTime) { this.maxProcessingTime = maxProcessingTime; } public Long getTotalProcessingTime() { return totalProcessingTime; } public void setTotalProcessingTime(Long totalProcessingTime) { this.totalProcessingTime = totalProcessingTime; } public Long getLastProcessingTime() { return lastProcessingTime; } public void setLastProcessingTime(Long lastProcessingTime) { this.lastProcessingTime = lastProcessingTime; } public Long getDeltaProcessingTime() { return deltaProcessingTime; } public void setDeltaProcessingTime(Long deltaProcessingTime) { this.deltaProcessingTime = deltaProcessingTime; } public Long getMeanProcessingTime() { return meanProcessingTime; } public void setMeanProcessingTime(Long meanProcessingTime) { this.meanProcessingTime = meanProcessingTime; } public Long getSelfProcessingTime() { return selfProcessingTime; } public void setSelfProcessingTime(Long selfProcessingTime) { this.selfProcessingTime = selfProcessingTime; } public Long getExchangesInflight() { return exchangesInflight; } public void setExchangesInflight(Long exchangesInflight) { this.exchangesInflight = exchangesInflight; } public String getResetTimestamp() { return resetTimestamp; } public void setResetTimestamp(String resetTimestamp) { this.resetTimestamp = resetTimestamp; } public String getFirstExchangeCompletedTimestamp() { return firstExchangeCompletedTimestamp; } public void setFirstExchangeCompletedTimestamp(String firstExchangeCompletedTimestamp) { this.firstExchangeCompletedTimestamp = firstExchangeCompletedTimestamp; } public String getFirstExchangeCompletedExchangeId() { return firstExchangeCompletedExchangeId; } public void setFirstExchangeCompletedExchangeId(String firstExchangeCompletedExchangeId) { this.firstExchangeCompletedExchangeId = firstExchangeCompletedExchangeId; } public String getFirstExchangeFailureTimestamp() { return firstExchangeFailureTimestamp; } public void setFirstExchangeFailureTimestamp(String firstExchangeFailureTimestamp) { this.firstExchangeFailureTimestamp = firstExchangeFailureTimestamp; } public String getFirstExchangeFailureExchangeId() { return firstExchangeFailureExchangeId; } public void setFirstExchangeFailureExchangeId(String firstExchangeFailureExchangeId) { this.firstExchangeFailureExchangeId = firstExchangeFailureExchangeId; } public String getLastExchangeCompletedTimestamp() { return lastExchangeCompletedTimestamp; } public void setLastExchangeCompletedTimestamp(String lastExchangeCompletedTimestamp) { this.lastExchangeCompletedTimestamp = lastExchangeCompletedTimestamp; } public String getLastExchangeCompletedExchangeId() { return lastExchangeCompletedExchangeId; } public void setLastExchangeCompletedExchangeId(String lastExchangeCompletedExchangeId) { this.lastExchangeCompletedExchangeId = lastExchangeCompletedExchangeId; } public String getLastExchangeFailureTimestamp() { return lastExchangeFailureTimestamp; } public void setLastExchangeFailureTimestamp(String lastExchangeFailureTimestamp) { this.lastExchangeFailureTimestamp = lastExchangeFailureTimestamp; } public String getLastExchangeFailureExchangeId() { return lastExchangeFailureExchangeId; } public void setLastExchangeFailureExchangeId(String lastExchangeFailureExchangeId) { this.lastExchangeFailureExchangeId = lastExchangeFailureExchangeId; } public List<RouteStatDump> getRouteStats() { return routeStats; } public void setRouteStats(List<RouteStatDump> routeStats) { this.routeStats = routeStats; } }
/* * Copyright (C) 2009 Myriad Group AG Copyright (C) 2009 The Android Open Source * Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package info.guardianproject.otr.app.im.app; import info.guardianproject.otr.app.im.IImConnection; import info.guardianproject.otr.app.im.R; import info.guardianproject.otr.app.im.engine.ImConnection; import info.guardianproject.otr.app.im.provider.Imps; import info.guardianproject.otr.app.im.service.ImServiceConstants; import android.app.Activity; import android.content.ContentResolver; import android.content.ContentUris; import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.database.Cursor; import android.os.AsyncTask; import android.os.Handler; import android.os.Message; import android.os.RemoteException; import android.util.Log; import android.view.View; import android.widget.CompoundButton; import android.widget.CompoundButton.OnCheckedChangeListener; import android.widget.ImageView; import android.widget.LinearLayout; import android.widget.TextView; public class ProviderListItem extends LinearLayout { private Activity mActivity; private SignInManager mSignInManager; private ContentResolver mResolver; private CompoundButton mSignInSwitch; private OnCheckedChangeListener mCheckedChangeListner = new OnCheckedChangeListener(){ @Override public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) { if (isChecked) mSignInManager.signIn(mAccountId); else mSignInManager.signOut(mAccountId); mUserChanged = true; } }; private boolean mUserChanged = false; private TextView mProviderName; private TextView mLoginName; private TextView mChatView; private ImageView mBtnSettings; private int mProviderIdColumn; private int mActiveAccountIdColumn; private int mActiveAccountUserNameColumn; private int mAccountPresenceStatusColumn; private int mAccountConnectionStatusColumn; private long mAccountId; private boolean mShowLongName = false; private ImApp mApp = null; private AsyncTask<Void, Void, Void> mBindTask; private Handler mHandler = new Handler() { @Override public void handleMessage(Message msg) { super.handleMessage(msg); //update notifications from async task } }; public ProviderListItem(Context context, Activity activity, SignInManager signInManager) { super(context); mActivity = activity; mSignInManager = signInManager; mApp = (ImApp)activity.getApplication(); mResolver = mApp.getContentResolver(); } public void init(Cursor c, boolean showLongName) { mShowLongName = showLongName; mProviderIdColumn = c.getColumnIndexOrThrow(Imps.Provider._ID); mSignInSwitch = (CompoundButton) findViewById(R.id.statusSwitch); mProviderName = (TextView) findViewById(R.id.providerName); mLoginName = (TextView) findViewById(R.id.loginName); mChatView = (TextView) findViewById(R.id.conversations); mBtnSettings = (ImageView)findViewById(R.id.btnSettings); mActiveAccountIdColumn = c.getColumnIndexOrThrow(Imps.Provider.ACTIVE_ACCOUNT_ID); mActiveAccountUserNameColumn = c .getColumnIndexOrThrow(Imps.Provider.ACTIVE_ACCOUNT_USERNAME); mAccountPresenceStatusColumn = c .getColumnIndexOrThrow(Imps.Provider.ACCOUNT_PRESENCE_STATUS); mAccountConnectionStatusColumn = c .getColumnIndexOrThrow(Imps.Provider.ACCOUNT_CONNECTION_STATUS); if (mSignInSwitch != null) { mProviderName.setOnClickListener(new OnClickListener () { @Override public void onClick(View v) { Intent intent = new Intent(getContext(), NewChatActivity.class); intent.putExtra(ImServiceConstants.EXTRA_INTENT_ACCOUNT_ID, mAccountId); getContext().startActivity(intent); } }); mLoginName.setOnClickListener(new OnClickListener () { @Override public void onClick(View v) { Intent intent = new Intent(getContext(), NewChatActivity.class); intent.putExtra(ImServiceConstants.EXTRA_INTENT_ACCOUNT_ID, mAccountId); getContext().startActivity(intent); } }); mSignInSwitch.setOnCheckedChangeListener(mCheckedChangeListner); if (mBtnSettings != null) { mBtnSettings.setOnClickListener(new OnClickListener() { @Override public void onClick(View v) { Intent intent = new Intent(Intent.ACTION_EDIT, ContentUris.withAppendedId( Imps.Account.CONTENT_URI, mAccountId)); intent.addCategory(ImApp.IMPS_CATEGORY); mActivity.startActivity(intent); } }); } } /* mStatusSwitch.setOnClickListener(new OnClickListener (){ @Override public void onClick(View v) { if (mStatusSwitch.isChecked()) mSignInManager.signIn(mAccountId); else mSignInManager.signOut(mAccountId); } });*/ } @Override protected void onAttachedToWindow() { super.onAttachedToWindow(); } public void bindView(Cursor cursor) { final Resources r = getResources(); final int providerId = cursor.getInt(mProviderIdColumn); mAccountId = cursor.getLong(mActiveAccountIdColumn); setTag(mAccountId); if (!cursor.isNull(mActiveAccountIdColumn)) { final String activeUserName = cursor.getString(mActiveAccountUserNameColumn); final int connectionStatus = cursor.getInt(mAccountConnectionStatusColumn); final String presenceString = getPresenceString(cursor, getContext()); if (mChatView != null) mChatView.setVisibility(View.GONE); runBindTask(r, providerId, activeUserName, connectionStatus, presenceString); } } @Override protected void onDetachedFromWindow() { if (mBindTask != null) mBindTask.cancel(false); mBindTask = null; super.onDetachedFromWindow(); } private void runBindTask(final Resources r, final int providerId, final String activeUserName, final int dbConnectionStatus, final String presenceString) { if (mBindTask != null) mBindTask.cancel(false); mBindTask = new AsyncTask<Void, Void, Void>() { private String mProviderNameText; private String mSecondRowText; private boolean mSwitchOn; @Override protected Void doInBackground(Void... params) { if (providerId != -1) { try { Cursor pCursor = mResolver.query(Imps.ProviderSettings.CONTENT_URI,new String[] {Imps.ProviderSettings.NAME, Imps.ProviderSettings.VALUE},Imps.ProviderSettings.PROVIDER + "=?",new String[] { Long.toString( providerId)},null); Imps.ProviderSettings.QueryMap settings = new Imps.ProviderSettings.QueryMap(pCursor, mResolver, providerId, false /* keep updated */, mHandler /* no handler */); String userDomain = settings.getDomain(); int connectionStatus = dbConnectionStatus; IImConnection conn = mApp.getConnection(providerId); if (conn == null) { connectionStatus = ImConnection.DISCONNECTED; } else { try { connectionStatus = conn.getState(); } catch (RemoteException e) { // TODO Auto-generated catch block e.printStackTrace(); } } if (mShowLongName) mProviderNameText = activeUserName + '@' + userDomain; else mProviderNameText = activeUserName; switch (connectionStatus) { case ImConnection.LOGGING_IN: case ImConnection.SUSPENDING: case ImConnection.SUSPENDED: mSecondRowText = r.getString(R.string.signing_in_wait); mSwitchOn = true; break; case ImConnection.LOGGED_IN: mSwitchOn = true; mSecondRowText = computeSecondRowText(presenceString, r, settings, true); break; case ImConnection.LOGGING_OUT: mSwitchOn = false; mSecondRowText = r.getString(R.string.signing_out_wait); break; default: mSwitchOn = false; mSecondRowText = computeSecondRowText(presenceString, r, settings, false); break; } settings.close(); } catch (NullPointerException npe) { Log.d(ImApp.LOG_TAG,"null on QueryMap (this shouldn't happen anymore, but just in case)",npe); } } return null; } @Override protected void onPostExecute(Void result) { if (mProviderNameText != null) applyView(mProviderNameText, mSwitchOn, mSecondRowText); } }; mBindTask.execute(); } private void applyView(String providerNameText, boolean switchOn, String secondRowText) { mProviderName.setText(providerNameText); if (mSignInSwitch != null && (!mUserChanged)) { mSignInSwitch.setOnCheckedChangeListener(null); mSignInSwitch.setChecked(switchOn); mSignInSwitch.setOnCheckedChangeListener(mCheckedChangeListner); } if (mLoginName != null) mLoginName.setText(secondRowText); } private String computeSecondRowText(String presenceString, Resources r, final Imps.ProviderSettings.QueryMap settings, boolean showPresence) { String secondRowText; StringBuffer secondRowTextBuffer = new StringBuffer(); if (showPresence) { secondRowTextBuffer.append(presenceString); secondRowTextBuffer.append(" - "); } if (settings.getServer() != null && settings.getServer().length() > 0) { secondRowTextBuffer.append(settings.getServer()); } else { secondRowTextBuffer.append(settings.getDomain()); } if (settings.getPort() != 5222 && settings.getPort() != 0) secondRowTextBuffer.append(':').append(settings.getPort()); if (settings.getUseTor()) { secondRowTextBuffer.append(" - "); secondRowTextBuffer.append(r.getString(R.string._via_orbot)); } secondRowText = secondRowTextBuffer.toString(); return secondRowText; } public Long getAccountID () { return mAccountId; } private String getPresenceString(Cursor cursor, Context context) { int presenceStatus = cursor.getInt(mAccountPresenceStatusColumn); switch (presenceStatus) { case Imps.Presence.AVAILABLE: return context.getString(R.string.presence_available); case Imps.Presence.IDLE: return context.getString(R.string.presence_idle); case Imps.Presence.AWAY: return context.getString(R.string.presence_away); case Imps.Presence.DO_NOT_DISTURB: return context.getString(R.string.presence_busy); case Imps.Presence.INVISIBLE: return context.getString(R.string.presence_invisible); default: return context.getString(R.string.presence_offline); } } public interface SignInManager { public void signIn (long accountId); public void signOut (long accountId); } public void applyView( AccountAdapter.AccountSetting accountSetting ) { // provide name String providerNameText = accountSetting.activeUserName; if (mShowLongName) providerNameText += '@' + accountSetting.domain; mProviderName.setText(providerNameText); // switch boolean switchOn = false; String secondRowText; switch (accountSetting.connectionStatus) { case ImConnection.LOGGING_IN: case ImConnection.SUSPENDING: case ImConnection.SUSPENDED: switchOn = true; secondRowText = getResources().getString(R.string.signing_in_wait); break; case ImConnection.LOGGED_IN: switchOn = true; secondRowText = computeSecondRowText(accountSetting, true); break; default: switchOn = false; secondRowText = computeSecondRowText(accountSetting, false); break; } if (mSignInSwitch != null && (!mUserChanged)) { mSignInSwitch.setOnCheckedChangeListener(null); mSignInSwitch.setChecked(switchOn); mSignInSwitch.setOnCheckedChangeListener(mCheckedChangeListner); } // login name if (mLoginName != null) mLoginName.setText(secondRowText); }; private String getPresenceString( Context context, int presenceStatus) { switch (presenceStatus) { case Imps.Presence.AVAILABLE: return context.getString(R.string.presence_available); case Imps.Presence.IDLE: return context.getString(R.string.presence_idle); case Imps.Presence.AWAY: return context.getString(R.string.presence_away); case Imps.Presence.DO_NOT_DISTURB: return context.getString(R.string.presence_busy); case Imps.Presence.INVISIBLE: return context.getString(R.string.presence_invisible); default: return context.getString(R.string.presence_offline); } } private String computeSecondRowText( AccountAdapter.AccountSetting accountSetting, boolean showPresence ) { StringBuffer secondRowTextBuffer = new StringBuffer(); if (showPresence) { secondRowTextBuffer.append( getPresenceString(mActivity, accountSetting.connectionStatus)); secondRowTextBuffer.append(" - "); } if (accountSetting.host != null && accountSetting.host.length() > 0) { secondRowTextBuffer.append(accountSetting.host); } else { secondRowTextBuffer.append(accountSetting.domain); } if (accountSetting.port != 5222 && accountSetting.port != 0) secondRowTextBuffer.append(':').append(accountSetting.port); if (accountSetting.isTor) { secondRowTextBuffer.append(" - "); secondRowTextBuffer.append(mActivity.getString(R.string._via_orbot)); } return secondRowTextBuffer.toString(); } }
/* * Copyright (c) 1997 - 2016 * Actelion Pharmaceuticals Ltd. * Gewerbestrasse 16 * CH-4123 Allschwil, Switzerland * * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. Neither the name of the the copyright holder nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * */ package com.actelion.research.util; import java.lang.reflect.Array; import java.text.DecimalFormat; import java.util.*; public class ArrayUtils { public static boolean equals(int [] a, int [] b){ boolean eq = true; if(a==null && b==null){ return true; } else if(a==null){ return false; } else if(b==null){ return false; } if(a.length!=b.length){ return false; } for (int i = 0; i < a.length; i++) { if(a[i]!=b[i]){ eq=false; break; } } return eq; } /** * Resize an array of Object */ public final static double[] cut(double a[], int off, int len) { double[] res = new double[a.length-len]; for(int i=0; i<off; i++) { res[i] = a[i]; } for(int i=off; i<res.length; i++) { res[i] = a[i+len]; } return res; } /** * Converts a List of Integer to an int[] * @param list * @return an array of int */ public final static int[] toIntArray(List<Integer> list) { int[] res = new int[list.size()]; int index = 0; Iterator iter = list.iterator(); while(iter.hasNext()) { Integer i = (Integer) iter.next(); res[index++] = i.intValue(); } return res; } public final static<T> List<T> toList(T [] arr) { List<T> li = new ArrayList<T>(arr.length); for (T t : arr) { li.add(t); } return li; } public final static List<Integer> toList(int [] arr) { List<Integer> li = new ArrayList<Integer>(arr.length); for (int t : arr) { li.add(t); } return li; } public final static void toList(int [] arr, List<Integer> li) { for (int t : arr) { li.add(t); } } public final static String[] toStringArray(List<String> list) { String[] res = new String[list.size()]; int index = 0; Iterator<String> iter = list.iterator(); while(iter.hasNext()) { res[index++] = iter.next(); } return res; } public final static int indexOf(Object[] array, Object obj) { for (int i = 0; i < array.length; i++) { if(array[i].equals(obj)) return i; } return -1; } public final static int indexOf(int[] array, int obj) { return indexOf(array, obj, 0); } public final static int indexOf(int[] array, int obj, int startIndex) { for (int i = startIndex; i < array.length; i++) { if(array[i] == obj) return i; } return -1; } public final static int indexOf(byte[] array, byte obj) { return indexOf(array, obj, 0); } public final static int indexOf(byte[] array, byte obj, int startIndex) { for (int i = startIndex; i < array.length; i++) { if(array[i] == obj) return i; } return -1; } public final static int sum(int[] array) { int res = 0; for(int i=0; i<array.length; i++) { res += array[i]; } return res; } public final static double sum(double[] array) { double res = 0; for(int i=0; i<array.length; i++) { res += array[i]; } return res; } public final static double min(double[] array) { if(array.length==0) return 0; double res = array[0]; for(int i=1; i<array.length; i++) { res = Math.min(res, array[i]); } return res; } public final static double max(double[] array) { if(array.length==0) return 0; double res = array[0]; for(int i=1; i<array.length; i++) { res = Math.max(res, array[i]); } return res; } public final static int max(int[] array) { if(array.length==0) return 0; int res = array[0]; for(int i=1; i<array.length; i++) { res = Math.max(res, array[i]); } return res; } public final static float max(float [] array) { if(array.length==0) return 0; float res = array[0]; for(int i=1; i<array.length; i++) { res = Math.max(res, array[i]); } return res; } public final static String toString(int[] v) { String res = "["; for(int i=0; i<v.length; i++) { res += (i>0?", ":"") + v[i] ; } return res + "]"; } public final static String toString(byte[] v) { String res = "["; for(int i=0; i<v.length; i++) { res += (i>0?", ":"") + v[i] ; } return res + "]"; } public final static String toString(double[] v) { String res = "["; for(int i=0; i<v.length; i++) { res += (i>0?", ":"") + v[i] ; } return res + "]"; } public final static String toString(float[] v) { String res = "["; for(int i=0; i<v.length; i++) { res += (i>0?", ":"") + v[i] ; } return res + "]"; } public final static String toString(double[] v, DecimalFormat df) { StringBuilder sb = new StringBuilder(); sb.append("["); for(int i=0; i<v.length; i++) { sb.append(df.format(v[i])); if(i<v.length-1){ sb.append(", "); } } sb.append("]"); return sb.toString(); } public final static String toString(Object[] v) { String res = "["; for(int i=0; i<v.length; i++) { res += (i>0?", ":"") + v[i] ; } return res + "]"; } public final static void shift(int[] v, int n) { int[] copy = new int[v.length]; for(int i=0; i<v.length; i++) copy[i] = v[(i+n+v.length)%v.length]; System.arraycopy(copy, 0, v, 0, v.length); } public static void shuffle(int [] arr, Random random){ int cycles = 7; int size = arr.length; for (int i = 0; i < cycles; i++) { for (int j = 0; j < size; j++) { int dest = random.nextInt(size); if(dest==j){ continue; } int v = arr[j]; arr[j] = arr[dest]; arr[dest] = v; } } } public static void shuffle(int [] arr){ Random rnd = new Random(); shuffle(arr, rnd); } public static <T> void shuffle(T [] arr){ Random rnd = new Random(); int cycles = 7; int size = arr.length; for (int i = 0; i < cycles; i++) { for (int j = 0; j < size; j++) { int dest = rnd.nextInt(size); if(dest==j){ continue; } T v = arr[j]; arr[j] = arr[dest]; arr[dest] = v; } } } public final static boolean contains(List<int[]> list, int[] arr) { for (int[] arr2: list ) { if(arr.length!=arr2.length) continue; for (int i = 0; i < arr2.length; i++) if(arr2[i]!=arr[i]) continue; return true; } return false; } public static void set(boolean [] a, boolean v){ for (int i = 0; i < a.length; i++) { a[i]=v; } } }
package ca.ualberta.lard; import ca.ualberta.lard.model.GeoLocation; import ca.ualberta.lard.model.GeoLocationMap; import android.os.Bundle; import android.app.Activity; import android.content.Intent; import android.graphics.Color; import android.view.View; import android.widget.AdapterView; import android.widget.AdapterView.OnItemSelectedListener; import android.widget.ArrayAdapter; import android.widget.EditText; import android.widget.RadioButton; import android.widget.Spinner; import android.widget.TextView; import android.widget.Toast; /** * Returns A GeoLocation through intents. Can create a GeoLocation with your current * gps location, a location based on a list of pre-created locations on campus or * a location created manually by inputing the desired latitude and longitude. * @author Thomas */ public class LocationSelectionActivity extends Activity { private boolean gpsLocationClicked; private boolean selectedLocationClicked; private boolean customLocationClicked; private Spinner spinner; private String selectedLocationString; private GeoLocation geoLocation; // For getting the location set by this activity public static final String LOCATION_REQUEST = "LOCATION"; /** * Gets the widgets and sets them to their default state. The Default state is that the RadioButton * for using your current gps location is clicked. It populates the spinner with some default * location across local campus (CSC, CAB, etc) and makes the spinner clickable */ @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_location_selection); //Makes the action bar title look cleaner getActionBar().setTitle("Location Settings"); // Default state is gps location is set to true gpsLocationClicked = true; selectedLocationClicked = false; customLocationClicked = false; RadioButton gpsLocationRadioButton = (RadioButton) findViewById(R.id.gpsRadioButton); RadioButton selectedLocationRadioButton = (RadioButton) findViewById(R.id.selectLocationRadioButton); RadioButton customLocationRadioButton = (RadioButton) findViewById(R.id.customLocationRadioButton); gpsLocationRadioButton.setChecked(true); // Locks gps RadioButton and unlocks Selected RadioButton gpsLocationRadioButton.setClickable(false); selectedLocationRadioButton.setClickable(true); customLocationRadioButton.setClickable(true); spinner = (Spinner) findViewById(R.id.locationSpinner); // Gets a list of locations from the GeoLocation map then converts it to a string array ArrayAdapter<CharSequence> adapter = ArrayAdapter.createFromResource(this, R.array.location_array, R.layout.spinner_item); //sets the spinner item as the custom one so you can have increased text size adapter.setDropDownViewResource(R.layout.spinner_item); spinner.setAdapter(adapter); spinner.setOnItemSelectedListener(new OnItemSelectedListener() { /** * Gets the name of the clicked item then stores the item's string in an attribute * so that when you click save it had a selected location to build a GeoLocation from. */ @Override public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) { String string = (spinner.getItemAtPosition(position)).toString(); selectedLocationString = string; //sets text color of spinner to white TextView textView = (TextView) spinner.getSelectedView(); textView.setTextColor(Color.WHITE); } /** * does nothing, but needed for Listener */ @Override public void onNothingSelected(AdapterView<?> parentView) { } }); } /** * Gets RadioButton clicked then unchecks the other RadioButton, also sets the bools * @param view */ public void gpsLocationClick(View view) { gpsLocationClicked = true; selectedLocationClicked = false; customLocationClicked = false; radioButtonSelector(view); } /** * Gets the clicked RadioButton then unchecks the other RadioButton, also sets the bools * @param view */ public void selectedLocationClick(View view) { gpsLocationClicked = false; selectedLocationClicked = true; customLocationClicked = false; radioButtonSelector(view); } /** * Gets the clicked RadioButton then unchecks the other RadioButton, also sets the bools * @param view */ public void customLocationClick(View view) { gpsLocationClicked = false; selectedLocationClicked = false; customLocationClicked = true; radioButtonSelector(view); } /** * Makes sure you cant have both RadioButtons selected at the same time * @param view */ private void radioButtonSelector(View view) { RadioButton gpsLocationRadioButton = (RadioButton) findViewById(R.id.gpsRadioButton); RadioButton selectedLocationRadioButton = (RadioButton) findViewById(R.id.selectLocationRadioButton); RadioButton customLocationRadioButton = (RadioButton) findViewById(R.id.customLocationRadioButton); // swaps which RadioButton has been clicked gpsLocationRadioButton.setChecked(gpsLocationClicked); selectedLocationRadioButton.setChecked(selectedLocationClicked); customLocationRadioButton.setChecked(customLocationClicked); // swaps which RadioButton can be clicked gpsLocationRadioButton.setClickable(selectedLocationClicked || customLocationClicked); selectedLocationRadioButton.setClickable(gpsLocationClicked || customLocationClicked); customLocationRadioButton.setClickable(selectedLocationClicked || gpsLocationClicked); } /** * Will return a GeoLocation when you press the save button, what GeoLocation * You get will depend on what RadioButtons are selected and what Location * you have selected on the spinner or what value you have in the lat and lon fields. * @param view */ public void locationSaveClick(View view){ if(gpsLocationClicked == true) { // Create a gps location from current phone context geoLocation = new GeoLocation(getApplicationContext()); if (geoLocation != null) { // serializes string String geoString = geoLocation.toJSON(); // sends serialized string to parent activity Intent resultData = new Intent(); resultData.putExtra(LOCATION_REQUEST, geoString); setResult(Activity.RESULT_OK, resultData); } finish(); } else if (selectedLocationClicked == true) { if (selectedLocationString != null || selectedLocationString.isEmpty() == false) { //Creates a new GeoLocation based on what location was selected GeoLocationMap geoMap = new GeoLocationMap(); double lat = (geoMap.getMap()).get(selectedLocationString).first; double lon = (geoMap.getMap()).get(selectedLocationString).second; geoLocation = new GeoLocation(lat,lon); //serializes the GeoLocation String geoString = geoLocation.toJSON(); //Sends the serialized GeoLocation to the parent activity Intent resultData = new Intent(); resultData.putExtra(LOCATION_REQUEST, geoString); setResult(Activity.RESULT_OK, resultData); } finish(); } else if (customLocationClicked == true) { EditText latEditText = (EditText) findViewById(R.id.latEditText); EditText lonEditText = (EditText) findViewById(R.id.lonEditText); //checks if strings are empty, if are then make it 0 String latString = latEditText.getText().toString(); String lonString = lonEditText.getText().toString(); if(latString.matches("")) { latEditText.setText("0"); } if( lonString.matches("") ) { lonEditText.setText("0"); } double lat = Double.parseDouble(latEditText.getText().toString()); double lon = Double.parseDouble(lonEditText.getText().toString()); geoLocation = new GeoLocation(lat, lon); if (geoLocation != null ) { // serializes string String geoString = geoLocation.toJSON(); // sends serialized string to parent activity Intent resultData = new Intent(); resultData.putExtra(LOCATION_REQUEST, geoString); setResult(Activity.RESULT_OK, resultData); finish(); } } else { // We should not of got here Toast.makeText(getApplicationContext(), "Neither boolean set to true", Toast.LENGTH_SHORT).show(); finish(); } } }
/* * Copyright (c) 2015 by David Hardy. Licensed under the Apache License, Version 2.0. */ package nl.endran.scrumpoker; import android.content.Context; import android.content.Intent; import android.content.IntentSender; import android.os.Bundle; import android.os.Handler; import android.support.annotation.CallSuper; import android.support.annotation.Nullable; import android.support.design.widget.NavigationView; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.support.v7.app.ActionBarDrawerToggle; import android.support.v7.widget.Toolbar; import android.util.Log; import android.view.MenuItem; import android.widget.Toast; import com.crashlytics.android.answers.ContentViewEvent; import com.crashlytics.android.answers.CustomEvent; import com.crashlytics.android.answers.ShareEvent; import com.google.android.gms.common.api.Status; import nl.endran.scrumpoker.fragments.cardselection.AboutFragment; import nl.endran.scrumpoker.fragments.cardselection.CardDisplayFragment; import nl.endran.scrumpoker.fragments.cardselection.CardSelection; import nl.endran.scrumpoker.fragments.cardselection.CardSelectionFragment; import nl.endran.scrumpoker.fragments.cardselection.DeckType; import nl.endran.scrumpoker.fragments.cardselection.QuickSettingsFragment; import nl.endran.scrumpoker.fragments.cardselection.SettingsFragment; import nl.endran.scrumpoker.nearby.NearbyHelper; import nl.endran.scrumpoker.nearby.NearbyManager; import nl.endran.scrumpoker.nearby.PermissionCheckCallback; import uk.co.chrisjenx.calligraphy.CalligraphyContextWrapper; public class MainActivity extends BaseActivity { private static final int REQUEST_RESOLVE_ERROR = 892374; private CardDisplayFragment cardDisplayFragment; private CardSelectionFragment cardSelectionFragment; private QuickSettingsFragment quickSettingsFragment; private DrawerLayout drawer; private FragmentManager supportFragmentManager; private Preferences preferences; private NearbyHelper nearbyHelper; private NearbyManager nearbyManager; private CardSelection cardSelection; @Override @CallSuper protected void attachBaseContext(Context newBase) { super.attachBaseContext(CalligraphyContextWrapper.wrap(newBase)); } @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar); setSupportActionBar(toolbar); preferences = new Preferences(getApplicationContext()); supportFragmentManager = getSupportFragmentManager(); quickSettingsFragment = (QuickSettingsFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentQuickSettingsFragment); cardSelectionFragment = (CardSelectionFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentCardSelection); cardDisplayFragment = (CardDisplayFragment) getSupportFragmentManager().findFragmentById(R.id.fragmentCardDisplay); NavigationView navigationView = (NavigationView) findViewById(R.id.nav_view); navigationView.setNavigationItemSelectedListener(new NavigationView.OnNavigationItemSelectedListener() { @Override public boolean onNavigationItemSelected(final MenuItem item) { return handleNavigationItemSelected(item); } }); drawer = (DrawerLayout) findViewById(R.id.drawer_layout); ActionBarDrawerToggle toggle = new ActionBarDrawerToggle( this, drawer, toolbar, R.string.navigation_drawer_open, R.string.navigation_drawer_close); drawer.setDrawerListener(toggle); toggle.syncState(); nearbyHelper = new NearbyHelper(getApplicationContext(), preferences); nearbyManager = new NearbyManager(preferences, nearbyHelper, new Handler()); DeckType standard = preferences.getDeckType(); setCardsAndShow(standard); quickSettingsFragment.setPreferences(preferences); } private void setCardsAndShow(final DeckType deckType) { closeDrawer(); resetMenuScreens(); preferences.setDeckType(deckType); cardSelectionFragment.setCardValues(deckType.getValues()); showCardSelection(); } private void showCardSelection() { tracking.logContentView(new ContentViewEvent() .putContentName("Card selection") .putContentType(preferences.getDeckType().name())); nearbyManager.setState(NearbyManager.State.SELECTING); cardDisplayFragment.hide(); quickSettingsFragment.hide(); cardSelectionFragment.show(new CardSelectionFragment.Listener() { @Override public void onCardSelected(final CardSelection cardSelection) { showSelectionBackgroundFragment(cardSelection); } }); } private void showSelectionBackgroundFragment(final CardSelection cardSelection) { tracking.logContentView(new ContentViewEvent() .putContentName("Card selected")); this.cardSelection = cardSelection; nearbyManager.setState(NearbyManager.State.READY); cardDisplayFragment.hide(); cardSelectionFragment.hide(); quickSettingsFragment.show(new QuickSettingsFragment.Listener() { @Override public void onShowCardClicked() { showCardDisplay(); } @Override public void onNearbyPermissionRequested() { requestedNearbyPermission(); } @Override public void onStopNearby() { nearbyManager.stop(); nearbyHelper.stop(); } }); } private void requestedNearbyPermission() { nearbyHelper.start(new NearbyHelper.Listener() { @Override public void onReady() { nearbyHelper.requestPermission(new PermissionCheckCallback.Listener() { @Override public void onPermissionAllowed() { preferences.setNearbyAllowed(true); } @Override public void onPermissionNotAllowed(@Nullable final Status status) { preferences.setNearbyAllowed(false); if (status != null && status.hasResolution()) { try { status.startResolutionForResult(MainActivity.this, REQUEST_RESOLVE_ERROR); } catch (IntentSender.SendIntentException e) { Log.e("NearBy", "SendIntentException", e); Toast.makeText(getApplicationContext(), R.string.error_google_api, Toast.LENGTH_SHORT).show(); } } else { Toast.makeText(getApplicationContext(), R.string.error_google_api, Toast.LENGTH_SHORT).show(); } } }); nearbyManager.start(new NearbyManager.Listener() { @Override public void onEverybodyReady() { tracking.logCustom(new CustomEvent("Reveal") .putCustomAttribute("Type", "Nearby")); if (cardSelection != null) { showCardDisplay(); } } }); } }); } private void showCardDisplay() { tracking.logContentView(new ContentViewEvent() .putContentName("Card displayed")); nearbyManager.setState(NearbyManager.State.SHOWING); cardSelectionFragment.hide(); quickSettingsFragment.hide(); cardDisplayFragment.show(cardSelection); cardSelection = null; } @Override protected int getLayoutId() { return R.layout.activity_main; } @Override protected String getPageName() { return "MainActivity"; } @Override public void onBackPressed() { if (drawer.isDrawerOpen(GravityCompat.START)) { closeDrawer(); } else if (supportFragmentManager.getBackStackEntryCount() > 0) { resetMenuScreens(); } else if (!cardSelectionFragment.isShowing()) { showCardSelection(); } else { super.onBackPressed(); } } private void resetMenuScreens() { int backStackEntryCount = supportFragmentManager.getBackStackEntryCount(); for (int i = 0; i < backStackEntryCount; i++) { supportFragmentManager.popBackStack(); } } private void closeDrawer() { if (drawer.isDrawerOpen(GravityCompat.START)) { drawer.closeDrawer(GravityCompat.START); } } @Override protected void onResume() { super.onResume(); closeDrawer(); } public boolean handleNavigationItemSelected(MenuItem item) { int id = item.getItemId(); if (id == R.id.nav_standard) { setCardsAndShow(DeckType.STANDARD); } else if (id == R.id.nav_fibonacci) { setCardsAndShow(DeckType.FIBONACCI); } else if (id == R.id.nav_shirt) { setCardsAndShow(DeckType.SHIRT); } else if (id == R.id.nav_natural) { setCardsAndShow(DeckType.NATURAL); } else if (id == R.id.nav_share) { shareApp(); } else if (id == R.id.nav_about) { tracking.logContentView(new ContentViewEvent() .putContentName("About")); showFragment(new AboutFragment()); } else if (id == R.id.nav_settings) { tracking.logContentView(new ContentViewEvent() .putContentName("Settings")); SettingsFragment fragment = new SettingsFragment(); fragment.setListener(new SettingsFragment.Listener() { @Override public void onNearbyPermissionRequested() { requestedNearbyPermission(); } }); showFragment(fragment); } DrawerLayout drawer = (DrawerLayout) findViewById(R.id.drawer_layout); drawer.closeDrawer(GravityCompat.START); return true; } private void showFragment(final Fragment fragment) { resetMenuScreens(); FragmentTransaction transaction = supportFragmentManager.beginTransaction(); transaction.addToBackStack(fragment.getClass().getName()); transaction.setCustomAnimations(R.anim.fade_in, 0, 0, R.anim.fade_out); transaction.replace(R.id.contentFrame, fragment); transaction.commit(); } private void shareApp() { tracking.logShare(new ShareEvent()); Intent sendIntent = new Intent(); sendIntent.setAction(Intent.ACTION_SEND); sendIntent.putExtra(Intent.EXTRA_TITLE, getString(R.string.app_name)); sendIntent.putExtra(Intent.EXTRA_SUBJECT, getString(R.string.app_name)); sendIntent.putExtra(Intent.EXTRA_TEXT, "Hey check out this awesome Scrum Poker app at: https://play.google.com/store/apps/details?id=nl.endran.scrumpoker"); sendIntent.setType("text/plain"); startActivity(sendIntent); } @Override protected void onStart() { super.onStart(); if (preferences.shouldUseNearby()) { requestedNearbyPermission(); } } @Override protected void onStop() { super.onStop(); nearbyHelper.stop(); nearbyManager.stop(); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == REQUEST_RESOLVE_ERROR) { if (resultCode == RESULT_OK) { preferences.setNearbyAllowed(true); preferences.setUseNearby(true); } else { preferences.setNearbyAllowed(false); Toast.makeText(this, R.string.please_allow_nearby, Toast.LENGTH_SHORT).show(); } showCardSelection(); } } }
/* * Copyright (C) 2008 Esmertec AG. Copyright (C) 2008 The Android Open Source * Project * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package info.guardianproject.otr.app.im.app; import info.guardianproject.otr.app.im.plugin.ImConfigNames; import info.guardianproject.otr.app.im.provider.Imps; import info.guardianproject.otr.app.im.ui.RoundedAvatarDrawable; import java.io.ByteArrayOutputStream; import java.util.Map; import org.apache.commons.codec.DecoderException; import org.apache.commons.codec.binary.Hex; import android.content.ContentResolver; import android.content.ContentUris; import android.content.ContentValues; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.net.Uri; import android.util.Log; public class DatabaseUtils { private static final String TAG = ImApp.LOG_TAG; private DatabaseUtils() { } public static Cursor queryAccountsForProvider(ContentResolver cr, String[] projection, long providerId) { StringBuilder where = new StringBuilder(Imps.Account.ACTIVE); where.append("=1 AND ").append(Imps.Account.PROVIDER).append('=').append(providerId); Cursor c = cr.query(Imps.Account.CONTENT_URI, projection, where.toString(), null, null); if (c != null && !c.moveToFirst()) { c.close(); return null; } return c; } public static RoundedAvatarDrawable getAvatarFromCursor(Cursor cursor, int dataColumn, int width, int height) throws DecoderException { String hexData = cursor.getString(dataColumn); if (hexData.equals("NULL")) { return null; } byte[] data = Hex.decodeHex(hexData.substring(2, hexData.length() - 1).toCharArray()); return decodeAvatar(data, width, height); } public static RoundedAvatarDrawable getAvatarFromAddress(ContentResolver cr, String address, int width, int height) throws DecoderException { String[] projection = {Imps.Contacts.AVATAR_DATA}; String[] args = {address}; String query = Imps.Contacts.USERNAME + " LIKE ?"; Cursor cursor = cr.query(Imps.Contacts.CONTENT_URI,projection, query, args, Imps.Contacts.DEFAULT_SORT_ORDER); if (cursor.moveToFirst()) { String hexData = cursor.getString(0); cursor.close(); if (hexData.equals("NULL")) { return null; } byte[] data = Hex.decodeHex(hexData.substring(2, hexData.length() - 1).toCharArray()); return decodeAvatar(data, width, height); } else { cursor.close(); return null; } } public static Uri getAvatarUri(Uri baseUri, long providerId, long accountId) { Uri.Builder builder = baseUri.buildUpon(); ContentUris.appendId(builder, providerId); ContentUris.appendId(builder, accountId); return builder.build(); } public static void updateAvatarBlob(ContentResolver resolver, Uri updateUri, byte[] data, String username) { ContentValues values = new ContentValues(3); values.put(Imps.Avatars.DATA, data); StringBuilder buf = new StringBuilder(Imps.Avatars.CONTACT); buf.append("=?"); String[] selectionArgs = new String[] { username }; resolver.update(updateUri, values, buf.toString(), selectionArgs); } public static boolean hasAvatarContact(ContentResolver resolver, Uri updateUri, String username) { ContentValues values = new ContentValues(3); values.put(Imps.Avatars.CONTACT, username); StringBuilder buf = new StringBuilder(Imps.Avatars.CONTACT); buf.append("=?"); String[] selectionArgs = new String[] { username }; return resolver.update(updateUri, values, buf.toString(), selectionArgs) > 0; } public static boolean doesAvatarHashExist(ContentResolver resolver, Uri queryUri, String jid, String hash) { StringBuilder buf = new StringBuilder(Imps.Avatars.CONTACT); buf.append("=?"); buf.append(" AND "); buf.append(Imps.Avatars.HASH); buf.append("=?"); String[] selectionArgs = new String[] { jid, hash }; Cursor cursor = resolver.query(queryUri, null, buf.toString(), selectionArgs, null); if (cursor == null) return false; try { return cursor.getCount() > 0; } finally { cursor.close(); } } public static void insertAvatarBlob(ContentResolver resolver, Uri updateUri, long providerId, long accountId, byte[] data, String hash, String contact) { ContentValues values = new ContentValues(3); values.put(Imps.Avatars.DATA, data); values.put(Imps.Avatars.CONTACT, contact); values.put(Imps.Avatars.PROVIDER, providerId); values.put(Imps.Avatars.ACCOUNT, accountId); values.put(Imps.Avatars.HASH, hash); resolver.insert(updateUri, values); } private static RoundedAvatarDrawable decodeAvatar(byte[] data, int width, int height) { BitmapFactory.Options options = new BitmapFactory.Options(); options.inJustDecodeBounds = true; BitmapFactory.decodeByteArray(data, 0, data.length,options); options.inSampleSize = calculateInSampleSize(options, width, height); options.inJustDecodeBounds = false; Bitmap b = BitmapFactory.decodeByteArray(data, 0, data.length,options); if (b != null) { RoundedAvatarDrawable avatar = new RoundedAvatarDrawable(b); return avatar; } else return null; } public static int calculateInSampleSize( BitmapFactory.Options options, int reqWidth, int reqHeight) { // Raw height and width of image final int height = options.outHeight; final int width = options.outWidth; int inSampleSize = 1; if (height > reqHeight || width > reqWidth) { // Calculate ratios of height and width to requested height and width final int heightRatio = Math.round((float) height / (float) reqHeight); final int widthRatio = Math.round((float) width / (float) reqWidth); // Choose the smallest ratio as inSampleSize value, this will guarantee // a final image with both dimensions larger than or equal to the // requested height and width. inSampleSize = heightRatio < widthRatio ? heightRatio : widthRatio; } return inSampleSize; } /** * Update IM provider database for a plugin using newly loaded information. * * @param cr the resolver * @param providerName the plugin provider name * @param providerFullName the full name * @param signUpUrl the plugin's service signup URL * @param config the plugin's settings * @return the provider ID of the plugin */ public static long updateProviderDb(ContentResolver cr, String providerName, String providerFullName, String signUpUrl, Map<String, String> config) { boolean versionChanged; // query provider data long providerId = Imps.Provider.getProviderIdForName(cr, providerName); if (providerId > 0) { // already loaded, check if version changed String pluginVersion = config.get(ImConfigNames.PLUGIN_VERSION); if (!isPluginVersionChanged(cr, providerId, pluginVersion)) { // no change, just return return providerId; } // changed, update provider meta data updateProviderRow(cr, providerId, providerFullName, signUpUrl); // clear branding resource map cache clearBrandingResourceMapCache(cr, providerId); Log.d(TAG, "Plugin " + providerName + "(" + providerId + ") has a version change. Database updated."); } else { // new plugin, not loaded before, insert the provider data providerId = insertProviderRow(cr, providerName, providerFullName, signUpUrl); Log.d(TAG, "Plugin " + providerName + "(" + providerId + ") is new. Provider added to IM db."); } // plugin provider has been inserted/updated, we need to update settings saveProviderSettings(cr, providerId, config); return providerId; } /** Clear the branding resource map cache. */ private static int clearBrandingResourceMapCache(ContentResolver cr, long providerId) { StringBuilder where = new StringBuilder(); where.append(Imps.BrandingResourceMapCache.PROVIDER_ID); where.append('='); where.append(providerId); return cr.delete(Imps.BrandingResourceMapCache.CONTENT_URI, where.toString(), null); } /** Insert the plugin settings into the database. */ private static int saveProviderSettings(ContentResolver cr, long providerId, Map<String, String> config) { ContentValues[] settingValues = new ContentValues[config.size()]; int index = 0; for (Map.Entry<String, String> entry : config.entrySet()) { ContentValues settingValue = new ContentValues(); settingValue.put(Imps.ProviderSettings.PROVIDER, providerId); settingValue.put(Imps.ProviderSettings.NAME, entry.getKey()); settingValue.put(Imps.ProviderSettings.VALUE, entry.getValue()); settingValues[index++] = settingValue; } return cr.bulkInsert(Imps.ProviderSettings.CONTENT_URI, settingValues); } /** Insert a new plugin provider to the provider table. */ private static long insertProviderRow(ContentResolver cr, String providerName, String providerFullName, String signUpUrl) { ContentValues values = new ContentValues(3); values.put(Imps.Provider.NAME, providerName); values.put(Imps.Provider.FULLNAME, providerFullName); values.put(Imps.Provider.CATEGORY, ImApp.IMPS_CATEGORY); values.put(Imps.Provider.SIGNUP_URL, signUpUrl); Uri result = cr.insert(Imps.Provider.CONTENT_URI, values); return ContentUris.parseId(result); } /** Update the data of a plugin provider. */ private static int updateProviderRow(ContentResolver cr, long providerId, String providerFullName, String signUpUrl) { // Update the full name, signup url and category each time when the plugin change // instead of specific version change because this is called only once. // It's ok to update them even the values are not changed. // Note that we don't update the provider name because it's used as // identifier at some place and the plugin should never change it. ContentValues values = new ContentValues(3); values.put(Imps.Provider.FULLNAME, providerFullName); values.put(Imps.Provider.SIGNUP_URL, signUpUrl); values.put(Imps.Provider.CATEGORY, ImApp.IMPS_CATEGORY); Uri uri = ContentUris.withAppendedId(Imps.Provider.CONTENT_URI, providerId); return cr.update(uri, values, null, null); } /** * Compare the saved version of a plugin provider with the newly loaded * version. */ private static boolean isPluginVersionChanged(ContentResolver cr, long providerId, String newVersion) { String oldVersion = Imps.ProviderSettings.getStringValue(cr, providerId, ImConfigNames.PLUGIN_VERSION); if (oldVersion == null) { return true; } return !oldVersion.equals(newVersion); } }
package com.github.jkschoen.jsma.model; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; @XmlAccessorType(XmlAccessType.FIELD) public abstract class Stats { @XmlElement(name="Bytes") private Integer bytes; @XmlElement(name="Hits") private Integer hits; @XmlElement(name="Large") private Integer large; @XmlElement(name="Medium") private Integer medium; @XmlElement(name="Original") private Integer original; @XmlElement(name="Small") private Integer small; @XmlElement(name="Video1280") private Integer video1280; @XmlElement(name="Video1920") private Integer video1920; @XmlElement(name="Video320") private Integer video320; @XmlElement(name="Video640") private Integer video640; @XmlElement(name="Video960") private Integer video960; @XmlElement(name="X2Large") private Integer x2Large ; @XmlElement(name="X3Large") private Integer x3Large ; @XmlElement(name="XLarge") private Integer xLarge ; public Stats(){} public Stats(Integer bytes, Integer hits, Integer large, Integer medium, Integer original, Integer small, Integer x2Large, Integer x3Large, Integer xLarge) { this.bytes = bytes; this.hits = hits; this.large = large; this.medium = medium; this.original = original; this.small = small; this.x2Large = x2Large; this.x3Large = x3Large; this.xLarge = xLarge; } /** * The bytes transferred for this image (or video) for a given month. */ public Integer getBytes() { return bytes; } /** * Sets the bytes transferred for this image (or video) for a given month. */ public void setBytes(Integer bytes) { this.bytes = bytes; } /** * The total hits for this image (or video) for a given month. */ public Integer getHits() { return hits; } /** * Set the total hits for this image (or video) for a given month. */ public void setHits(Integer hits) { this.hits = hits; } /** * The number of Large hits for this image (or video) for a given month. */ public Integer getLarge() { return large; } /** * Set the number of Large hits for this image (or video) for a given month. */ public void setLarge(Integer large) { this.large = large; } /** * The number of Medium hits for this image (or video) for a given month. */ public Integer getMedium() { return medium; } /** * Set the number of Medium hits for this image (or video) for a given month. */ public void setMedium(Integer medium) { this.medium = medium; } /** * The number of Original hits for this image (or video) for a given month. */ public Integer getOriginal() { return original; } /** * Set the number of Original hits for this image (or video) for a given month. */ public void setOriginal(Integer original) { this.original = original; } /** * The number of Small hits for this image (or video) for a given month. */ public Integer getSmall() { return small; } /** * Set the number of Small hits for this image (or video) for a given month. */ public void setSmall(Integer small) { this.small = small; } /** * The number of Video1280 hits for this image (or video) for a given month. */ public Integer getVideo1280() { return video1280; } /** * Set the number of Video1280 hits for this image (or video) for a given month. */ public void setVideo1280(Integer video1280) { this.video1280 = video1280; } /** * The number of Video1920 hits for this image (or video) for a given month. */ public Integer getVideo1920() { return video1920; } /** * Set the number of Video1920 hits for this image (or video) for a given month. */ public void setVideo1920(Integer video1920) { this.video1920 = video1920; } /** * The number of Video320 hits for this image (or video) for a given month. */ public Integer getVideo320() { return video320; } /** * Set the number of Video320 hits for this image (or video) for a given month. */ public void setVideo320(Integer video320) { this.video320 = video320; } /** * The number of Video640 hits for this image (or video) for a given month. */ public Integer getVideo640() { return video640; } /** * Set the number of Video640 hits for this image (or video) for a given month. */ public void setVideo640(Integer video640) { this.video640 = video640; } /** * The number of Video960 hits for this image (or video) for a given month. */ public Integer getVideo960() { return video960; } /** * Set the number of Video960 hits for this image (or video) for a given month. */ public void setVideo960(Integer video960) { this.video960 = video960; } /** * The number of X2Large hits for this image (or video) for a given month. */ public Integer getX2Large() { return x2Large; } /** * Set the number of X2Large hits for this image (or video) for a given month. */ public void setX2Large(Integer x2Large) { this.x2Large = x2Large; } /** * The number of X3Large hits for this image (or video) for a given month. */ public Integer getX3Large() { return x3Large; } /** * Set the number of X3Large hits for this image (or video) for a given month. */ public void setX3Large(Integer x3Large) { this.x3Large = x3Large; } /** * The number of XLarge hits for this image (or video) for a given month. */ public Integer getxLarge() { return xLarge; } /** * Set the number of XLarge hits for this image (or video) for a given month. */ public void setxLarge(Integer xLarge) { this.xLarge = xLarge; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * result + ((bytes == null) ? 0 : bytes.hashCode()); result = prime * result + ((hits == null) ? 0 : hits.hashCode()); result = prime * result + ((large == null) ? 0 : large.hashCode()); result = prime * result + ((medium == null) ? 0 : medium.hashCode()); result = prime * result + ((original == null) ? 0 : original.hashCode()); result = prime * result + ((small == null) ? 0 : small.hashCode()); result = prime * result + ((video1280 == null) ? 0 : video1280.hashCode()); result = prime * result + ((video1920 == null) ? 0 : video1920.hashCode()); result = prime * result + ((video320 == null) ? 0 : video320.hashCode()); result = prime * result + ((video640 == null) ? 0 : video640.hashCode()); result = prime * result + ((video960 == null) ? 0 : video960.hashCode()); result = prime * result + ((x2Large == null) ? 0 : x2Large.hashCode()); result = prime * result + ((x3Large == null) ? 0 : x3Large.hashCode()); result = prime * result + ((xLarge == null) ? 0 : xLarge.hashCode()); return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; Stats other = (Stats) obj; if (bytes == null) { if (other.bytes != null) return false; } else if (!bytes.equals(other.bytes)) return false; if (hits == null) { if (other.hits != null) return false; } else if (!hits.equals(other.hits)) return false; if (large == null) { if (other.large != null) return false; } else if (!large.equals(other.large)) return false; if (medium == null) { if (other.medium != null) return false; } else if (!medium.equals(other.medium)) return false; if (original == null) { if (other.original != null) return false; } else if (!original.equals(other.original)) return false; if (small == null) { if (other.small != null) return false; } else if (!small.equals(other.small)) return false; if (video1280 == null) { if (other.video1280 != null) return false; } else if (!video1280.equals(other.video1280)) return false; if (video1920 == null) { if (other.video1920 != null) return false; } else if (!video1920.equals(other.video1920)) return false; if (video320 == null) { if (other.video320 != null) return false; } else if (!video320.equals(other.video320)) return false; if (video640 == null) { if (other.video640 != null) return false; } else if (!video640.equals(other.video640)) return false; if (video960 == null) { if (other.video960 != null) return false; } else if (!video960.equals(other.video960)) return false; if (x2Large == null) { if (other.x2Large != null) return false; } else if (!x2Large.equals(other.x2Large)) return false; if (x3Large == null) { if (other.x3Large != null) return false; } else if (!x3Large.equals(other.x3Large)) return false; if (xLarge == null) { if (other.xLarge != null) return false; } else if (!xLarge.equals(other.xLarge)) return false; return true; } @Override public String toString() { return ", bytes=" + bytes + ", hits=" + hits + ", large=" + large + ", medium=" + medium + ", original=" + original + ", small=" + small + ", video1280=" + video1280 + ", video1920=" + video1920 + ", video320=" + video320 + ", video640=" + video640 + ", video960=" + video960 + ", x2Large=" + x2Large + ", x3Large=" + x3Large + ", xLarge=" + xLarge; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.index; import java.lang.reflect.Constructor; import java.util.*; import java.util.concurrent.*; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; import com.google.common.base.Joiner; import com.google.common.base.Strings; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.common.collect.Sets; import com.google.common.primitives.Longs; import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.MoreExecutors; import org.apache.commons.lang3.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutor; import org.apache.cassandra.concurrent.NamedThreadFactory; import org.apache.cassandra.concurrent.StageManager; import org.apache.cassandra.config.ColumnDefinition; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.cql3.statements.IndexTarget; import org.apache.cassandra.db.*; import org.apache.cassandra.db.compaction.CompactionManager; import org.apache.cassandra.db.filter.RowFilter; import org.apache.cassandra.db.lifecycle.SSTableSet; import org.apache.cassandra.db.lifecycle.View; import org.apache.cassandra.db.partitions.PartitionIterators; import org.apache.cassandra.db.partitions.PartitionUpdate; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.exceptions.InvalidRequestException; import org.apache.cassandra.index.internal.CassandraIndex; import org.apache.cassandra.index.transactions.*; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.schema.IndexMetadata; import org.apache.cassandra.schema.Indexes; import org.apache.cassandra.service.pager.SinglePartitionPager; import org.apache.cassandra.tracing.Tracing; import org.apache.cassandra.transport.ProtocolVersion; import org.apache.cassandra.transport.Server; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.concurrent.OpOrder; import org.apache.cassandra.utils.concurrent.Refs; /** * Handles the core maintenance functionality associated with indexes: adding/removing them to or from * a table, (re)building during bootstrap or other streaming operations, flushing, reloading metadata * and so on. * * The Index interface defines a number of methods which return {@code Callable<?>}. These are primarily the * management tasks for an index implementation. Most of them are currently executed in a blocking * fashion via submission to SIM's blockingExecutor. This provides the desired behaviour in pretty * much all cases, as tasks like flushing an index needs to be executed synchronously to avoid potentially * deadlocking on the FlushWriter or PostFlusher. Several of these {@code Callable<?>} returning methods on Index could * then be defined with as void and called directly from SIM (rather than being run via the executor service). * Separating the task defintion from execution gives us greater flexibility though, so that in future, for example, * if the flush process allows it we leave open the possibility of executing more of these tasks asynchronously. * * The primary exception to the above is the Callable returned from Index#addIndexedColumn. This may * involve a significant effort, building a new index over any existing data. We perform this task asynchronously; * as it is called as part of a schema update, which we do not want to block for a long period. Building non-custom * indexes is performed on the CompactionManager. * * This class also provides instances of processors which listen to updates to the base table and forward to * registered Indexes the info required to keep those indexes up to date. * There are two variants of these processors, each with a factory method provided by SIM: * IndexTransaction: deals with updates generated on the regular write path. * CleanupTransaction: used when partitions are modified during compaction or cleanup operations. * Further details on their usage and lifecycles can be found in the interface definitions below. * * Finally, the bestIndexFor method is used at query time to identify the most selective index of those able * to satisfy any search predicates defined by a ReadCommand's RowFilter. It returns a thin IndexAccessor object * which enables the ReadCommand to access the appropriate functions of the Index at various stages in its lifecycle. * e.g. the getEstimatedResultRows is required when StorageProxy calculates the initial concurrency factor for * distributing requests to replicas, whereas a Searcher instance is needed when the ReadCommand is executed locally on * a target replica. */ public class SecondaryIndexManager implements IndexRegistry { private static final Logger logger = LoggerFactory.getLogger(SecondaryIndexManager.class); // default page size (in rows) when rebuilding the index for a whole partition public static final int DEFAULT_PAGE_SIZE = 10000; private Map<String, Index> indexes = Maps.newConcurrentMap(); /** * The indexes that are ready to server requests. */ private Set<String> builtIndexes = Sets.newConcurrentHashSet(); // executes tasks returned by Indexer#addIndexColumn which may require index(es) to be (re)built private static final ExecutorService asyncExecutor = new JMXEnabledThreadPoolExecutor(1, StageManager.KEEPALIVE, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), new NamedThreadFactory("SecondaryIndexManagement"), "internal"); // executes all blocking tasks produced by Indexers e.g. getFlushTask, getMetadataReloadTask etc private static final ExecutorService blockingExecutor = MoreExecutors.newDirectExecutorService(); /** * The underlying column family containing the source data for these indexes */ public final ColumnFamilyStore baseCfs; public SecondaryIndexManager(ColumnFamilyStore baseCfs) { this.baseCfs = baseCfs; } /** * Drops and adds new indexes associated with the underlying CF */ public void reload() { // figure out what needs to be added and dropped. Indexes tableIndexes = baseCfs.metadata.getIndexes(); indexes.keySet() .stream() .filter(indexName -> !tableIndexes.has(indexName)) .forEach(this::removeIndex); // we call add for every index definition in the collection as // some may not have been created here yet, only added to schema for (IndexMetadata tableIndex : tableIndexes) addIndex(tableIndex); } private Future<?> reloadIndex(IndexMetadata indexDef) { Index index = indexes.get(indexDef.name); Callable<?> reloadTask = index.getMetadataReloadTask(indexDef); return reloadTask == null ? Futures.immediateFuture(null) : blockingExecutor.submit(reloadTask); } private Future<?> createIndex(IndexMetadata indexDef) { Index index = createInstance(indexDef); index.register(this); // if the index didn't register itself, we can probably assume that no initialization needs to happen final Callable<?> initialBuildTask = indexes.containsKey(indexDef.name) ? index.getInitializationTask() : null; if (initialBuildTask == null) { // We need to make sure that the index is marked as built in the case where the initialBuildTask // does not need to be run (if the index didn't register itself or if the base table was empty). markIndexBuilt(indexDef.name); return Futures.immediateFuture(null); } return asyncExecutor.submit(index.getInitializationTask()); } /** * Adds and builds a index * @param indexDef the IndexMetadata describing the index */ public synchronized Future<?> addIndex(IndexMetadata indexDef) { if (indexes.containsKey(indexDef.name)) return reloadIndex(indexDef); else return createIndex(indexDef); } /** * Checks if the specified index is queryable. * * @param index the index * @return <code>true</code> if the specified index is queryable, <code>false</code> otherwise */ public boolean isIndexQueryable(Index index) { return builtIndexes.contains(index.getIndexMetadata().name); } public synchronized void removeIndex(String indexName) { Index index = unregisterIndex(indexName); if (null != index) { markIndexRemoved(indexName); executeBlocking(index.getInvalidateTask()); } } public Set<IndexMetadata> getDependentIndexes(ColumnDefinition column) { if (indexes.isEmpty()) return Collections.emptySet(); Set<IndexMetadata> dependentIndexes = new HashSet<>(); for (Index index : indexes.values()) if (index.dependsOn(column)) dependentIndexes.add(index.getIndexMetadata()); return dependentIndexes; } /** * Called when dropping a Table */ public void markAllIndexesRemoved() { getBuiltIndexNames().forEach(this::markIndexRemoved); } /** * Does a full, blocking rebuild of the indexes specified by columns from the sstables. * Caller must acquire and release references to the sstables used here. * Note also that only this method of (re)building indexes: * a) takes a set of index *names* rather than Indexers * b) marks exsiting indexes removed prior to rebuilding * * @param sstables the data to build from * @param indexNames the list of indexes to be rebuilt */ public void rebuildIndexesBlocking(Collection<SSTableReader> sstables, Set<String> indexNames) { Set<Index> toRebuild = indexes.values().stream() .filter(index -> indexNames.contains(index.getIndexMetadata().name)) .filter(Index::shouldBuildBlocking) .collect(Collectors.toSet()); if (toRebuild.isEmpty()) { logger.info("No defined indexes with the supplied names: {}", Joiner.on(',').join(indexNames)); return; } toRebuild.forEach(indexer -> markIndexRemoved(indexer.getIndexMetadata().name)); buildIndexesBlocking(sstables, toRebuild); toRebuild.forEach(indexer -> markIndexBuilt(indexer.getIndexMetadata().name)); } public void buildAllIndexesBlocking(Collection<SSTableReader> sstables) { buildIndexesBlocking(sstables, indexes.values() .stream() .filter(Index::shouldBuildBlocking) .collect(Collectors.toSet())); } // For convenience, may be called directly from Index impls public void buildIndexBlocking(Index index) { if (index.shouldBuildBlocking()) { try (ColumnFamilyStore.RefViewFragment viewFragment = baseCfs.selectAndReference(View.selectFunction(SSTableSet.CANONICAL)); Refs<SSTableReader> sstables = viewFragment.refs) { buildIndexesBlocking(sstables, Collections.singleton(index)); markIndexBuilt(index.getIndexMetadata().name); } } } /** * Checks if the specified {@link ColumnFamilyStore} is a secondary index. * * @param cfs the <code>ColumnFamilyStore</code> to check. * @return <code>true</code> if the specified <code>ColumnFamilyStore</code> is a secondary index, * <code>false</code> otherwise. */ public static boolean isIndexColumnFamilyStore(ColumnFamilyStore cfs) { return isIndexColumnFamily(cfs.name); } /** * Checks if the specified {@link ColumnFamilyStore} is the one secondary index. * * @param cfName the name of the <code>ColumnFamilyStore</code> to check. * @return <code>true</code> if the specified <code>ColumnFamilyStore</code> is a secondary index, * <code>false</code> otherwise. */ public static boolean isIndexColumnFamily(String cfName) { return cfName.contains(Directories.SECONDARY_INDEX_NAME_SEPARATOR); } /** * Returns the parent of the specified {@link ColumnFamilyStore}. * * @param cfs the <code>ColumnFamilyStore</code> * @return the parent of the specified <code>ColumnFamilyStore</code> */ public static ColumnFamilyStore getParentCfs(ColumnFamilyStore cfs) { String parentCfs = getParentCfsName(cfs.name); return cfs.keyspace.getColumnFamilyStore(parentCfs); } /** * Returns the parent name of the specified {@link ColumnFamilyStore}. * * @param cfName the <code>ColumnFamilyStore</code> name * @return the parent name of the specified <code>ColumnFamilyStore</code> */ public static String getParentCfsName(String cfName) { assert isIndexColumnFamily(cfName); return StringUtils.substringBefore(cfName, Directories.SECONDARY_INDEX_NAME_SEPARATOR); } /** * Returns the index name * * @param cfs the <code>ColumnFamilyStore</code> * @return the index name */ public static String getIndexName(ColumnFamilyStore cfs) { return getIndexName(cfs.name); } /** * Returns the index name * * @param cfName the <code>ColumnFamilyStore</code> name * @return the index name */ public static String getIndexName(String cfName) { assert isIndexColumnFamily(cfName); return StringUtils.substringAfter(cfName, Directories.SECONDARY_INDEX_NAME_SEPARATOR); } private void buildIndexesBlocking(Collection<SSTableReader> sstables, Set<Index> indexes) { if (indexes.isEmpty()) return; logger.info("Submitting index build of {} for data in {}", indexes.stream().map(i -> i.getIndexMetadata().name).collect(Collectors.joining(",")), sstables.stream().map(SSTableReader::toString).collect(Collectors.joining(","))); Map<Index.IndexBuildingSupport, Set<Index>> byType = new HashMap<>(); for (Index index : indexes) { Set<Index> stored = byType.computeIfAbsent(index.getBuildTaskSupport(), i -> new HashSet<>()); stored.add(index); } List<Future<?>> futures = byType.entrySet() .stream() .map((e) -> e.getKey().getIndexBuildTask(baseCfs, e.getValue(), sstables)) .map(CompactionManager.instance::submitIndexBuild) .collect(Collectors.toList()); FBUtilities.waitOnFutures(futures); flushIndexesBlocking(indexes); logger.info("Index build of {} complete", indexes.stream().map(i -> i.getIndexMetadata().name).collect(Collectors.joining(","))); } /** * Marks the specified index as build. * <p>This method is public as it need to be accessible from the {@link Index} implementations</p> * @param indexName the index name */ public void markIndexBuilt(String indexName) { builtIndexes.add(indexName); if (DatabaseDescriptor.isDaemonInitialized()) SystemKeyspace.setIndexBuilt(baseCfs.keyspace.getName(), indexName); } /** * Marks the specified index as removed. * <p>This method is public as it need to be accessible from the {@link Index} implementations</p> * @param indexName the index name */ public void markIndexRemoved(String indexName) { SystemKeyspace.setIndexRemoved(baseCfs.keyspace.getName(), indexName); } public Index getIndexByName(String indexName) { return indexes.get(indexName); } private Index createInstance(IndexMetadata indexDef) { Index newIndex; if (indexDef.isCustom()) { assert indexDef.options != null; String className = indexDef.options.get(IndexTarget.CUSTOM_INDEX_OPTION_NAME); assert ! Strings.isNullOrEmpty(className); try { Class<? extends Index> indexClass = FBUtilities.classForName(className, "Index"); Constructor<? extends Index> ctor = indexClass.getConstructor(ColumnFamilyStore.class, IndexMetadata.class); newIndex = (Index)ctor.newInstance(baseCfs, indexDef); } catch (Exception e) { throw new RuntimeException(e); } } else { newIndex = CassandraIndex.newIndex(baseCfs, indexDef); } return newIndex; } /** * Truncate all indexes */ public void truncateAllIndexesBlocking(final long truncatedAt) { executeAllBlocking(indexes.values().stream(), (index) -> index.getTruncateTask(truncatedAt)); } /** * Remove all indexes */ public void invalidateAllIndexesBlocking() { markAllIndexesRemoved(); executeAllBlocking(indexes.values().stream(), Index::getInvalidateTask); } /** * Perform a blocking flush all indexes */ public void flushAllIndexesBlocking() { flushIndexesBlocking(ImmutableSet.copyOf(indexes.values())); } /** * Perform a blocking flush of selected indexes */ public void flushIndexesBlocking(Set<Index> indexes) { if (indexes.isEmpty()) return; List<Future<?>> wait = new ArrayList<>(); List<Index> nonCfsIndexes = new ArrayList<>(); // for each CFS backed index, submit a flush task which we'll wait on for completion // for the non-CFS backed indexes, we'll flush those while we wait. synchronized (baseCfs.getTracker()) { indexes.forEach(index -> index.getBackingTable() .map(cfs -> wait.add(cfs.forceFlush())) .orElseGet(() -> nonCfsIndexes.add(index))); } executeAllBlocking(nonCfsIndexes.stream(), Index::getBlockingFlushTask); FBUtilities.waitOnFutures(wait); } /** * Performs a blocking flush of all custom indexes */ public void flushAllNonCFSBackedIndexesBlocking() { executeAllBlocking(indexes.values() .stream() .filter(index -> !index.getBackingTable().isPresent()), Index::getBlockingFlushTask); } /** * Performs a blocking execution of pre-join tasks of all indexes */ public void executePreJoinTasksBlocking(boolean hadBootstrap) { logger.info("Executing pre-join{} tasks for: {}", hadBootstrap ? " post-bootstrap" : "", this.baseCfs); executeAllBlocking(indexes.values().stream(), (index) -> { return index.getPreJoinTask(hadBootstrap); }); } /** * @return all indexes which are marked as built and ready to use */ public List<String> getBuiltIndexNames() { Set<String> allIndexNames = new HashSet<>(); indexes.values().stream() .map(i -> i.getIndexMetadata().name) .forEach(allIndexNames::add); return SystemKeyspace.getBuiltIndexes(baseCfs.keyspace.getName(), allIndexNames); } /** * @return all backing Tables used by registered indexes */ public Set<ColumnFamilyStore> getAllIndexColumnFamilyStores() { Set<ColumnFamilyStore> backingTables = new HashSet<>(); indexes.values().forEach(index -> index.getBackingTable().ifPresent(backingTables::add)); return backingTables; } /** * @return if there are ANY indexes registered for this table */ public boolean hasIndexes() { return !indexes.isEmpty(); } /** * When building an index against existing data in sstables, add the given partition to the index */ public void indexPartition(DecoratedKey key, Set<Index> indexes, int pageSize) { if (logger.isTraceEnabled()) logger.trace("Indexing partition {}", baseCfs.metadata.getKeyValidator().getString(key.getKey())); if (!indexes.isEmpty()) { SinglePartitionReadCommand cmd = SinglePartitionReadCommand.fullPartitionRead(baseCfs.metadata, FBUtilities.nowInSeconds(), key); int nowInSec = cmd.nowInSec(); boolean readStatic = false; SinglePartitionPager pager = new SinglePartitionPager(cmd, null, ProtocolVersion.CURRENT); while (!pager.isExhausted()) { try (ReadExecutionController controller = cmd.executionController(); OpOrder.Group writeGroup = Keyspace.writeOrder.start(); RowIterator partition = PartitionIterators.getOnlyElement(pager.fetchPageInternal(pageSize, controller), cmd)) { Set<Index.Indexer> indexers = indexes.stream() .map(index -> index.indexerFor(key, partition.columns(), nowInSec, writeGroup, IndexTransaction.Type.UPDATE)) .filter(Objects::nonNull) .collect(Collectors.toSet()); indexers.forEach(Index.Indexer::begin); // only process the static row once per partition if (!readStatic && !partition.staticRow().isEmpty()) { indexers.forEach(indexer -> indexer.insertRow(partition.staticRow())); readStatic = true; } while (partition.hasNext()) { Row row = partition.next(); indexers.forEach(indexer -> indexer.insertRow(row)); } indexers.forEach(Index.Indexer::finish); } } } } /** * Return the page size used when indexing an entire partition */ public int calculateIndexingPageSize() { if (Boolean.getBoolean("cassandra.force_default_indexing_page_size")) return DEFAULT_PAGE_SIZE; double targetPageSizeInBytes = 32 * 1024 * 1024; double meanPartitionSize = baseCfs.getMeanPartitionSize(); if (meanPartitionSize <= 0) return DEFAULT_PAGE_SIZE; int meanCellsPerPartition = baseCfs.getMeanColumns(); if (meanCellsPerPartition <= 0) return DEFAULT_PAGE_SIZE; int columnsPerRow = baseCfs.metadata.partitionColumns().regulars.size(); if (meanCellsPerPartition <= 0) return DEFAULT_PAGE_SIZE; int meanRowsPerPartition = meanCellsPerPartition / columnsPerRow; double meanRowSize = meanPartitionSize / meanRowsPerPartition; int pageSize = (int) Math.max(1, Math.min(DEFAULT_PAGE_SIZE, targetPageSizeInBytes / meanRowSize)); logger.trace("Calculated page size {} for indexing {}.{} ({}/{}/{}/{})", pageSize, baseCfs.metadata.ksName, baseCfs.metadata.cfName, meanPartitionSize, meanCellsPerPartition, meanRowsPerPartition, meanRowSize); return pageSize; } /** * Delete all data from all indexes for this partition. * For when cleanup rips a partition out entirely. * * TODO : improve cleanup transaction to batch updates and perform them async */ public void deletePartition(UnfilteredRowIterator partition, int nowInSec) { // we need to acquire memtable lock because secondary index deletion may // cause a race (see CASSANDRA-3712). This is done internally by the // index transaction when it commits CleanupTransaction indexTransaction = newCleanupTransaction(partition.partitionKey(), partition.columns(), nowInSec); indexTransaction.start(); indexTransaction.onPartitionDeletion(new DeletionTime(FBUtilities.timestampMicros(), nowInSec)); indexTransaction.commit(); while (partition.hasNext()) { Unfiltered unfiltered = partition.next(); if (unfiltered.kind() != Unfiltered.Kind.ROW) continue; indexTransaction = newCleanupTransaction(partition.partitionKey(), partition.columns(), nowInSec); indexTransaction.start(); indexTransaction.onRowDelete((Row)unfiltered); indexTransaction.commit(); } } /** * Called at query time to choose which (if any) of the registered index implementations to use for a given query. * * This is a two step processes, firstly compiling the set of searchable indexes then choosing the one which reduces * the search space the most. * * In the first phase, if the command's RowFilter contains any custom index expressions, the indexes that they * specify are automatically included. Following that, the registered indexes are filtered to include only those * which support the standard expressions in the RowFilter. * * The filtered set then sorted by selectivity, as reported by the Index implementations' getEstimatedResultRows * method. * * Implementation specific validation of the target expression, either custom or standard, by the selected * index should be performed in the searcherFor method to ensure that we pick the right index regardless of * the validity of the expression. * * This method is only called once during the lifecycle of a ReadCommand and the result is * cached for future use when obtaining a Searcher, getting the index's underlying CFS for * ReadOrderGroup, or an estimate of the result size from an average index query. * * @param command ReadCommand to be executed * @return an Index instance, ready to use during execution of the command, or null if none * of the registered indexes can support the command. */ public Index getBestIndexFor(ReadCommand command) { if (indexes.isEmpty() || command.rowFilter().isEmpty()) return null; Set<Index> searchableIndexes = new HashSet<>(); for (RowFilter.Expression expression : command.rowFilter()) { if (expression.isCustom()) { // Only a single custom expression is allowed per query and, if present, // we want to always favour the index specified in such an expression RowFilter.CustomExpression customExpression = (RowFilter.CustomExpression)expression; logger.trace("Command contains a custom index expression, using target index {}", customExpression.getTargetIndex().name); Tracing.trace("Command contains a custom index expression, using target index {}", customExpression.getTargetIndex().name); return indexes.get(customExpression.getTargetIndex().name); } else if (!expression.isUserDefined()) { indexes.values().stream() .filter(index -> index.supportsExpression(expression.column(), expression.operator())) .forEach(searchableIndexes::add); } } if (searchableIndexes.isEmpty()) { logger.trace("No applicable indexes found"); Tracing.trace("No applicable indexes found"); return null; } Index selected = searchableIndexes.size() == 1 ? Iterables.getOnlyElement(searchableIndexes) : searchableIndexes.stream() .min((a, b) -> Longs.compare(a.getEstimatedResultRows(), b.getEstimatedResultRows())) .orElseThrow(() -> new AssertionError("Could not select most selective index")); // pay for an additional threadlocal get() rather than build the strings unnecessarily if (Tracing.isTracing()) { Tracing.trace("Index mean cardinalities are {}. Scanning with {}.", searchableIndexes.stream().map(i -> i.getIndexMetadata().name + ':' + i.getEstimatedResultRows()) .collect(Collectors.joining(",")), selected.getIndexMetadata().name); } return selected; } public Optional<Index> getBestIndexFor(RowFilter.Expression expression) { return indexes.values().stream().filter((i) -> i.supportsExpression(expression.column(), expression.operator())).findFirst(); } /** * Called at write time to ensure that values present in the update * are valid according to the rules of all registered indexes which * will process it. The partition key as well as the clustering and * cell values for each row in the update may be checked by index * implementations * @param update PartitionUpdate containing the values to be validated by registered Index implementations * @throws InvalidRequestException */ public void validate(PartitionUpdate update) throws InvalidRequestException { for (Index index : indexes.values()) index.validate(update); } /** * IndexRegistry methods */ public void registerIndex(Index index) { String name = index.getIndexMetadata().name; indexes.put(name, index); logger.trace("Registered index {}", name); } public void unregisterIndex(Index index) { unregisterIndex(index.getIndexMetadata().name); } private Index unregisterIndex(String name) { Index removed = indexes.remove(name); builtIndexes.remove(name); logger.trace(removed == null ? "Index {} was not registered" : "Removed index {} from registry", name); return removed; } public Index getIndex(IndexMetadata metadata) { return indexes.get(metadata.name); } public Collection<Index> listIndexes() { return ImmutableSet.copyOf(indexes.values()); } /** * Handling of index updates. * Implementations of the various IndexTransaction interfaces, for keeping indexes in sync with base data * during updates, compaction and cleanup. Plus factory methods for obtaining transaction instances. */ /** * Transaction for updates on the write path. */ public UpdateTransaction newUpdateTransaction(PartitionUpdate update, OpOrder.Group opGroup, int nowInSec) { if (!hasIndexes()) return UpdateTransaction.NO_OP; Index.Indexer[] indexers = indexes.values().stream() .map(i -> i.indexerFor(update.partitionKey(), update.columns(), nowInSec, opGroup, IndexTransaction.Type.UPDATE)) .filter(Objects::nonNull) .toArray(Index.Indexer[]::new); return indexers.length == 0 ? UpdateTransaction.NO_OP : new WriteTimeTransaction(indexers); } /** * Transaction for use when merging rows during compaction */ public CompactionTransaction newCompactionTransaction(DecoratedKey key, PartitionColumns partitionColumns, int versions, int nowInSec) { // the check for whether there are any registered indexes is already done in CompactionIterator return new IndexGCTransaction(key, partitionColumns, versions, nowInSec, listIndexes()); } /** * Transaction for use when removing partitions during cleanup */ public CleanupTransaction newCleanupTransaction(DecoratedKey key, PartitionColumns partitionColumns, int nowInSec) { if (!hasIndexes()) return CleanupTransaction.NO_OP; return new CleanupGCTransaction(key, partitionColumns, nowInSec, listIndexes()); } /** * A single use transaction for processing a partition update on the regular write path */ private static final class WriteTimeTransaction implements UpdateTransaction { private final Index.Indexer[] indexers; private WriteTimeTransaction(Index.Indexer...indexers) { // don't allow null indexers, if we don't need any use a NullUpdater object for (Index.Indexer indexer : indexers) assert indexer != null; this.indexers = indexers; } public void start() { for (Index.Indexer indexer : indexers) indexer.begin(); } public void onPartitionDeletion(DeletionTime deletionTime) { for (Index.Indexer indexer : indexers) indexer.partitionDelete(deletionTime); } public void onRangeTombstone(RangeTombstone tombstone) { for (Index.Indexer indexer : indexers) indexer.rangeTombstone(tombstone); } public void onInserted(Row row) { for (Index.Indexer indexer : indexers) indexer.insertRow(row); } public void onUpdated(Row existing, Row updated) { final Row.Builder toRemove = BTreeRow.sortedBuilder(); toRemove.newRow(existing.clustering()); toRemove.addPrimaryKeyLivenessInfo(existing.primaryKeyLivenessInfo()); toRemove.addRowDeletion(existing.deletion()); final Row.Builder toInsert = BTreeRow.sortedBuilder(); toInsert.newRow(updated.clustering()); toInsert.addPrimaryKeyLivenessInfo(updated.primaryKeyLivenessInfo()); toInsert.addRowDeletion(updated.deletion()); // diff listener collates the columns to be added & removed from the indexes RowDiffListener diffListener = new RowDiffListener() { public void onPrimaryKeyLivenessInfo(int i, Clustering clustering, LivenessInfo merged, LivenessInfo original) { } public void onDeletion(int i, Clustering clustering, Row.Deletion merged, Row.Deletion original) { } public void onComplexDeletion(int i, Clustering clustering, ColumnDefinition column, DeletionTime merged, DeletionTime original) { } public void onCell(int i, Clustering clustering, Cell merged, Cell original) { if (merged != null && !merged.equals(original)) toInsert.addCell(merged); if (merged == null || (original != null && shouldCleanupOldValue(original, merged))) toRemove.addCell(original); } }; Rows.diff(diffListener, updated, existing); Row oldRow = toRemove.build(); Row newRow = toInsert.build(); for (Index.Indexer indexer : indexers) indexer.updateRow(oldRow, newRow); } public void commit() { for (Index.Indexer indexer : indexers) indexer.finish(); } private boolean shouldCleanupOldValue(Cell oldCell, Cell newCell) { // If either the value or timestamp is different, then we // should delete from the index. If not, then we can infer that // at least one of the cells is an ExpiringColumn and that the // difference is in the expiry time. In this case, we don't want to // delete the old value from the index as the tombstone we insert // will just hide the inserted value. // Completely identical cells (including expiring columns with // identical ttl & localExpirationTime) will not get this far due // to the oldCell.equals(newCell) in StandardUpdater.update return !oldCell.value().equals(newCell.value()) || oldCell.timestamp() != newCell.timestamp(); } } /** * A single-use transaction for updating indexes for a single partition during compaction where the only * operation is to merge rows * TODO : make this smarter at batching updates so we can use a single transaction to process multiple rows in * a single partition */ private static final class IndexGCTransaction implements CompactionTransaction { private final DecoratedKey key; private final PartitionColumns columns; private final int versions; private final int nowInSec; private final Collection<Index> indexes; private Row[] rows; private IndexGCTransaction(DecoratedKey key, PartitionColumns columns, int versions, int nowInSec, Collection<Index> indexes) { this.key = key; this.columns = columns; this.versions = versions; this.indexes = indexes; this.nowInSec = nowInSec; } public void start() { if (versions > 0) rows = new Row[versions]; } public void onRowMerge(Row merged, Row...versions) { // Diff listener constructs rows representing deltas between the merged and original versions // These delta rows are then passed to registered indexes for removal processing final Row.Builder[] builders = new Row.Builder[versions.length]; RowDiffListener diffListener = new RowDiffListener() { public void onPrimaryKeyLivenessInfo(int i, Clustering clustering, LivenessInfo merged, LivenessInfo original) { if (original != null && (merged == null || !merged.isLive(nowInSec))) getBuilder(i, clustering).addPrimaryKeyLivenessInfo(original); } public void onDeletion(int i, Clustering clustering, Row.Deletion merged, Row.Deletion original) { } public void onComplexDeletion(int i, Clustering clustering, ColumnDefinition column, DeletionTime merged, DeletionTime original) { } public void onCell(int i, Clustering clustering, Cell merged, Cell original) { if (original != null && (merged == null || !merged.isLive(nowInSec))) getBuilder(i, clustering).addCell(original); } private Row.Builder getBuilder(int index, Clustering clustering) { if (builders[index] == null) { builders[index] = BTreeRow.sortedBuilder(); builders[index].newRow(clustering); } return builders[index]; } }; Rows.diff(diffListener, merged, versions); for(int i = 0; i < builders.length; i++) if (builders[i] != null) rows[i] = builders[i].build(); } public void commit() { if (rows == null) return; try (OpOrder.Group opGroup = Keyspace.writeOrder.start()) { for (Index index : indexes) { Index.Indexer indexer = index.indexerFor(key, columns, nowInSec, opGroup, Type.COMPACTION); if (indexer == null) continue; indexer.begin(); for (Row row : rows) if (row != null) indexer.removeRow(row); indexer.finish(); } } } } /** * A single-use transaction for updating indexes for a single partition during cleanup, where * partitions and rows are only removed * TODO : make this smarter at batching updates so we can use a single transaction to process multiple rows in * a single partition */ private static final class CleanupGCTransaction implements CleanupTransaction { private final DecoratedKey key; private final PartitionColumns columns; private final int nowInSec; private final Collection<Index> indexes; private Row row; private DeletionTime partitionDelete; private CleanupGCTransaction(DecoratedKey key, PartitionColumns columns, int nowInSec, Collection<Index> indexes) { this.key = key; this.columns = columns; this.indexes = indexes; this.nowInSec = nowInSec; } public void start() { } public void onPartitionDeletion(DeletionTime deletionTime) { partitionDelete = deletionTime; } public void onRowDelete(Row row) { this.row = row; } public void commit() { if (row == null && partitionDelete == null) return; try (OpOrder.Group opGroup = Keyspace.writeOrder.start()) { for (Index index : indexes) { Index.Indexer indexer = index.indexerFor(key, columns, nowInSec, opGroup, Type.CLEANUP); if (indexer == null) continue; indexer.begin(); if (partitionDelete != null) indexer.partitionDelete(partitionDelete); if (row != null) indexer.removeRow(row); indexer.finish(); } } } } private static void executeBlocking(Callable<?> task) { if (null != task) FBUtilities.waitOnFuture(blockingExecutor.submit(task)); } private static void executeAllBlocking(Stream<Index> indexers, Function<Index, Callable<?>> function) { if (function == null) { logger.error("failed to flush indexes: {} because flush task is missing.", indexers); return; } List<Future<?>> waitFor = new ArrayList<>(); indexers.forEach(indexer -> { Callable<?> task = function.apply(indexer); if (null != task) waitFor.add(blockingExecutor.submit(task)); }); FBUtilities.waitOnFutures(waitFor); } }
/* $Id$ */ /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.manifoldcf.authorities.authorities.sharepoint; import org.apache.manifoldcf.core.interfaces.*; import org.apache.manifoldcf.agents.interfaces.*; import org.apache.manifoldcf.authorities.interfaces.*; import org.apache.manifoldcf.authorities.system.Logging; import org.apache.manifoldcf.authorities.system.ManifoldCF; import org.apache.manifoldcf.core.util.URLEncoder; import org.apache.manifoldcf.core.util.URLDecoder; import org.apache.manifoldcf.connectorcommon.interfaces.*; import java.io.*; import java.util.*; import java.net.*; import java.util.concurrent.TimeUnit; import javax.naming.*; import javax.naming.ldap.*; import javax.naming.directory.*; import org.apache.http.conn.HttpClientConnectionManager; import org.apache.http.client.HttpClient; import org.apache.http.impl.conn.PoolingHttpClientConnectionManager; import org.apache.http.impl.client.HttpClientBuilder; import org.apache.http.protocol.HttpRequestExecutor; import org.apache.http.impl.client.HttpClients; import org.apache.http.client.config.RequestConfig; import org.apache.http.client.CredentialsProvider; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.config.SocketConfig; import org.apache.http.conn.ssl.SSLConnectionSocketFactory; import org.apache.http.conn.ssl.BrowserCompatHostnameVerifier; import org.apache.http.HttpEntity; import org.apache.http.HttpResponse; import org.apache.http.auth.AuthScope; import org.apache.http.auth.NTCredentials; import org.apache.http.client.methods.HttpGet; import org.apache.http.impl.client.DefaultRedirectStrategy; import org.apache.http.util.EntityUtils; import org.apache.http.client.HttpRequestRetryHandler; import org.apache.http.protocol.HttpContext; import org.apache.http.HttpHost; /** This is the native SharePoint implementation of the IAuthorityConnector interface. */ public class SharePointAuthority extends org.apache.manifoldcf.authorities.authorities.BaseAuthorityConnector { public static final String _rcsid = "@(#)$Id$"; // Data from the parameters /** Cache manager. */ private ICacheManager cacheManager = null; private boolean hasSessionParameters = false; /** Length of time that a SharePoint session can remain idle */ private static final long SharePointExpirationInterval = 300000L; // SharePoint server parameters // These are needed for caching, so they are set at connect() time private boolean isClaimSpace = false; private String serverProtocol = null; private String serverUrl = null; private String fileBaseUrl = null; private String serverUserName = null; private String password = null; private String ntlmDomain = null; private String serverName = null; private String serverPortString = null; private String serverLocation = null; private String strippedUserName = null; private String encodedServerLocation = null; private String keystoreData = null; private String proxyHost = null; private String proxyPortString = null; private String proxyUsername = null; private String proxyPassword = null; private String proxyDomain = null; private String cacheLRUsize = null; private String cacheLifetime = null; // These are calculated when the session is set up private int serverPort = -1; private SPSProxyHelper proxy = null; private long sharepointSessionTimeout; private long responseLifetime = -1L; private int LRUsize = -1; private IKeystoreManager keystoreManager = null; private HttpClientConnectionManager connectionManager = null; private HttpClient httpClient = null; // Current host name private static String currentHost = null; static { // Find the current host name try { java.net.InetAddress addr = java.net.InetAddress.getLocalHost(); // Get hostname currentHost = addr.getHostName(); } catch (UnknownHostException e) { } } /** Constructor. */ public SharePointAuthority() { } /** Set thread context. */ @Override public void setThreadContext(IThreadContext tc) throws ManifoldCFException { super.setThreadContext(tc); cacheManager = CacheManagerFactory.make(tc); } /** Clear thread context. */ @Override public void clearThreadContext() { super.clearThreadContext(); cacheManager = null; } /** Connect. The configuration parameters are included. *@param configParams are the configuration parameters for this connection. */ @Override public void connect(ConfigParams configParams) { super.connect(configParams); // Pick up all the parameters that go into the cache key here cacheLifetime = configParams.getParameter(SharePointConfig.PARAM_CACHELIFETIME); if (cacheLifetime == null) cacheLifetime = "1"; cacheLRUsize = configParams.getParameter(SharePointConfig.PARAM_CACHELRUSIZE); if (cacheLRUsize == null) cacheLRUsize = "1000"; String serverVersion = configParams.getParameter( SharePointConfig.PARAM_SERVERVERSION ); if (serverVersion == null) serverVersion = "4.0"; // Authority needs to do nothing with SharePoint version right now. String serverClaimSpace = configParams.getParameter( SharePointConfig.PARAM_SERVERCLAIMSPACE); if (serverClaimSpace == null) serverClaimSpace = "false"; isClaimSpace = serverClaimSpace.equals("true"); serverProtocol = configParams.getParameter( SharePointConfig.PARAM_SERVERPROTOCOL ); if (serverProtocol == null) serverProtocol = "http"; serverName = configParams.getParameter( SharePointConfig.PARAM_SERVERNAME ); serverPortString = configParams.getParameter( SharePointConfig.PARAM_SERVERPORT ); serverLocation = configParams.getParameter(SharePointConfig.PARAM_SERVERLOCATION); if (serverLocation == null) serverLocation = ""; if (serverLocation.endsWith("/")) serverLocation = serverLocation.substring(0,serverLocation.length()-1); if (serverLocation.length() > 0 && !serverLocation.startsWith("/")) serverLocation = "/" + serverLocation; encodedServerLocation = serverLocation; serverLocation = decodePath(serverLocation); serverUserName = configParams.getParameter(SharePointConfig.PARAM_SERVERUSERNAME); password = configParams.getObfuscatedParameter(SharePointConfig.PARAM_SERVERPASSWORD); int index = serverUserName.indexOf("\\"); if (index != -1) { strippedUserName = serverUserName.substring(index+1); ntlmDomain = serverUserName.substring(0,index); } else { strippedUserName = null; ntlmDomain = null; } proxyHost = params.getParameter(SharePointConfig.PARAM_PROXYHOST); proxyPortString = params.getParameter(SharePointConfig.PARAM_PROXYPORT); proxyUsername = params.getParameter(SharePointConfig.PARAM_PROXYUSER); proxyPassword = params.getParameter(SharePointConfig.PARAM_PROXYPASSWORD); proxyDomain = params.getParameter(SharePointConfig.PARAM_PROXYDOMAIN); keystoreData = params.getParameter(SharePointConfig.PARAM_SERVERKEYSTORE); } // All methods below this line will ONLY be called if a connect() call succeeded // on this instance! /** Check connection for sanity. */ @Override public String check() throws ManifoldCFException { getSharePointSession(); try { URL urlServer = new URL( serverUrl ); } catch ( MalformedURLException e ) { return "Illegal SharePoint url: "+e.getMessage(); } try { proxy.checkConnection( "/" ); } catch (ManifoldCFException e) { return e.getMessage(); } return super.check(); } /** Poll. The connection should be closed if it has been idle for too long. */ @Override public void poll() throws ManifoldCFException { long currentTime = System.currentTimeMillis(); if (proxy != null && System.currentTimeMillis() >= sharepointSessionTimeout) expireSharePointSession(); if (connectionManager != null) connectionManager.closeIdleConnections(60000L,TimeUnit.MILLISECONDS); super.poll(); } /** This method is called to assess whether to count this connector instance should * actually be counted as being connected. *@return true if the connector instance is actually connected. */ @Override public boolean isConnected() { return connectionManager != null; } /** Close the connection. Call this before discarding the repository connector. */ @Override public void disconnect() throws ManifoldCFException { // Clean up caching parameters cacheLifetime = null; cacheLRUsize = null; // Clean up SharePoint parameters isClaimSpace = false; serverUrl = null; fileBaseUrl = null; serverUserName = null; strippedUserName = null; password = null; ntlmDomain = null; serverName = null; serverLocation = null; encodedServerLocation = null; serverPort = -1; proxyHost = null; proxyPortString = null; proxyUsername = null; proxyPassword = null; proxyDomain = null; keystoreData = null; keystoreManager = null; proxy = null; httpClient = null; if (connectionManager != null) connectionManager.shutdown(); connectionManager = null; hasSessionParameters = false; super.disconnect(); } /** Obtain the access tokens for a given user name. *@param userName is the user name or identifier. *@return the response tokens (according to the current authority). * (Should throws an exception only when a condition cannot be properly described within the authorization response object.) */ @Override public AuthorizationResponse getAuthorizationResponse(String userName) throws ManifoldCFException { getSessionParameters(); // Construct a cache description object ICacheDescription objectDescription = new AuthorizationResponseDescription(userName, serverName,serverPortString,serverLocation,serverProtocol,serverUserName,password, this.responseLifetime,this.LRUsize); // Enter the cache ICacheHandle ch = cacheManager.enterCache(new ICacheDescription[]{objectDescription},null,null); try { ICacheCreateHandle createHandle = cacheManager.enterCreateSection(ch); try { // Lookup the object AuthorizationResponse response = (AuthorizationResponse)cacheManager.lookupObject(createHandle,objectDescription); if (response != null) return response; // Create the object. response = getAuthorizationResponseUncached(userName); // Save it in the cache cacheManager.saveObject(createHandle,objectDescription,response); // And return it... return response; } finally { cacheManager.leaveCreateSection(createHandle); } } finally { cacheManager.leaveCache(ch); } } /** Obtain the access tokens for a given user name, uncached. *@param userName is the user name or identifier. *@return the response tokens (according to the current authority). * (Should throws an exception only when a condition cannot be properly described within the authorization response object.) */ protected AuthorizationResponse getAuthorizationResponseUncached(String userName) throws ManifoldCFException { //String searchBase = "CN=Administrator,CN=Users,DC=qa-ad-76,DC=metacarta,DC=com"; int index = userName.indexOf("@"); if (index == -1) throw new ManifoldCFException("Username is in unexpected form (no @): '"+userName+"'"); String userPart = userName.substring(0,index); String domainPart = userName.substring(index+1); // First, look up user in SharePoint. getSharePointSession(); List<String> sharePointTokens = proxy.getAccessTokens("/", domainPart + "\\" + userPart); if (sharePointTokens == null) return RESPONSE_USERNOTFOUND_ADDITIVE; return new AuthorizationResponse(sharePointTokens.toArray(new String[0]),AuthorizationResponse.RESPONSE_OK); } /** Obtain the default access tokens for a given user name. *@param userName is the user name or identifier. *@return the default response tokens, presuming that the connect method fails. */ @Override public AuthorizationResponse getDefaultAuthorizationResponse(String userName) { // The default response if the getConnection method fails return RESPONSE_UNREACHABLE_ADDITIVE; } // UI support methods. // // These support methods are involved in setting up authority connection configuration information. The configuration methods cannot assume that the // current authority object is connected. That is why they receive a thread context argument. /** Output the configuration header section. * This method is called in the head section of the connector's configuration page. Its purpose is to add the required tabs to the list, and to output any * javascript methods that might be needed by the configuration editing HTML. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabsArray is an array of tab names. Add to this array any tab names that are specific to the connector. */ @Override public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray) throws ManifoldCFException, IOException { tabsArray.add(Messages.getString(locale,"SharePointAuthority.Server")); tabsArray.add(Messages.getString(locale,"SharePointAuthority.Cache")); Messages.outputResourceWithVelocity(out,locale,"editConfiguration.js",null); } /** Output the configuration body section. * This method is called in the body section of the authority connector's configuration page. Its purpose is to present the required form elements for editing. * The coder can presume that the HTML that is output from this configuration will be within appropriate <html>, <body>, and <form> tags. The name of the * form is "editconnection". *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@param tabName is the current tab name. */ @Override public void outputConfigurationBody(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName) throws ManifoldCFException, IOException { Map<String,Object> velocityContext = new HashMap<String,Object>(); velocityContext.put("TabName",tabName); fillInCacheTab(velocityContext,out,parameters); fillInServerTab(velocityContext,out,parameters); Messages.outputResourceWithVelocity(out,locale,"editConfiguration_Cache.html",velocityContext); Messages.outputResourceWithVelocity(out,locale,"editConfiguration_Server.html",velocityContext); } protected static void fillInServerTab(Map<String,Object> velocityContext, IHTTPOutput out, ConfigParams parameters) throws ManifoldCFException { String serverVersion = parameters.getParameter(SharePointConfig.PARAM_SERVERVERSION); if (serverVersion == null) serverVersion = "2.0"; String serverClaimSpace = parameters.getParameter(SharePointConfig.PARAM_SERVERCLAIMSPACE); if (serverClaimSpace == null) serverClaimSpace = "false"; String serverProtocol = parameters.getParameter(SharePointConfig.PARAM_SERVERPROTOCOL); if (serverProtocol == null) serverProtocol = "http"; String serverName = parameters.getParameter(SharePointConfig.PARAM_SERVERNAME); if (serverName == null) serverName = "localhost"; String serverPort = parameters.getParameter(SharePointConfig.PARAM_SERVERPORT); if (serverPort == null) serverPort = ""; String serverLocation = parameters.getParameter(SharePointConfig.PARAM_SERVERLOCATION); if (serverLocation == null) serverLocation = ""; String userName = parameters.getParameter(SharePointConfig.PARAM_SERVERUSERNAME); if (userName == null) userName = ""; String password = parameters.getObfuscatedParameter(SharePointConfig.PARAM_SERVERPASSWORD); if (password == null) password = ""; else password = out.mapPasswordToKey(password); String keystore = parameters.getParameter(SharePointConfig.PARAM_SERVERKEYSTORE); IKeystoreManager localKeystore; if (keystore == null) localKeystore = KeystoreManagerFactory.make(""); else localKeystore = KeystoreManagerFactory.make("",keystore); List<Map<String,String>> certificates = new ArrayList<Map<String,String>>(); String[] contents = localKeystore.getContents(); for (String alias : contents) { String description = localKeystore.getDescription(alias); if (description.length() > 128) description = description.substring(0,125) + "..."; Map<String,String> certificate = new HashMap<String,String>(); certificate.put("ALIAS", alias); certificate.put("DESCRIPTION", description); certificates.add(certificate); } String proxyHost = parameters.getParameter(SharePointConfig.PARAM_PROXYHOST); if (proxyHost == null) proxyHost = ""; String proxyPort = parameters.getParameter(SharePointConfig.PARAM_PROXYPORT); if (proxyPort == null) proxyPort = ""; String proxyUser = parameters.getParameter(SharePointConfig.PARAM_PROXYUSER); if (proxyUser == null) proxyUser = ""; String proxyPassword = parameters.getParameter(SharePointConfig.PARAM_PROXYPASSWORD); if (proxyPassword == null) proxyPassword = ""; else proxyPassword = out.mapPasswordToKey(proxyPassword); String proxyDomain = parameters.getParameter(SharePointConfig.PARAM_PROXYDOMAIN); if (proxyDomain == null) proxyDomain = ""; // Fill in context velocityContext.put("SERVERVERSION", serverVersion); velocityContext.put("SERVERCLAIMSPACE", serverClaimSpace); velocityContext.put("SERVERPROTOCOL", serverProtocol); velocityContext.put("SERVERNAME", serverName); velocityContext.put("SERVERPORT", serverPort); velocityContext.put("SERVERLOCATION", serverLocation); velocityContext.put("SERVERUSERNAME", userName); velocityContext.put("SERVERPASSWORD", password); if (keystore != null) velocityContext.put("KEYSTORE", keystore); velocityContext.put("CERTIFICATELIST", certificates); velocityContext.put("PROXYHOST", proxyHost); velocityContext.put("PROXYPORT", proxyPort); velocityContext.put("PROXYUSER", proxyUser); velocityContext.put("PROXYPASSWORD", proxyPassword); velocityContext.put("PROXYDOMAIN", proxyDomain); } protected static void fillInCacheTab(Map<String,Object> velocityContext, IPasswordMapperActivity mapper, ConfigParams parameters) { String cacheLifetime = parameters.getParameter(SharePointConfig.PARAM_CACHELIFETIME); if (cacheLifetime == null) cacheLifetime = "1"; velocityContext.put("CACHELIFETIME",cacheLifetime); String cacheLRUsize = parameters.getParameter(SharePointConfig.PARAM_CACHELRUSIZE); if (cacheLRUsize == null) cacheLRUsize = "1000"; velocityContext.put("CACHELRUSIZE",cacheLRUsize); } /** Process a configuration post. * This method is called at the start of the authority connector's configuration page, whenever there is a possibility that form data for a connection has been * posted. Its purpose is to gather form information and modify the configuration parameters accordingly. * The name of the posted form is "editconnection". *@param threadContext is the local thread context. *@param variableContext is the set of variables available from the post, including binary file post information. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. *@return null if all is well, or a string error message if there is an error that should prevent saving of the connection (and cause a redirection to an error page). */ @Override public String processConfigurationPost(IThreadContext threadContext, IPostParameters variableContext, Locale locale, ConfigParams parameters) throws ManifoldCFException { // Cache parameters String cacheLifetime = variableContext.getParameter("cachelifetime"); if (cacheLifetime != null) parameters.setParameter(SharePointConfig.PARAM_CACHELIFETIME,cacheLifetime); String cacheLRUsize = variableContext.getParameter("cachelrusize"); if (cacheLRUsize != null) parameters.setParameter(SharePointConfig.PARAM_CACHELRUSIZE,cacheLRUsize); // SharePoint server parameters String serverVersion = variableContext.getParameter("serverVersion"); if (serverVersion != null) parameters.setParameter(SharePointConfig.PARAM_SERVERVERSION,serverVersion); String serverClaimSpace = variableContext.getParameter("serverClaimSpace"); if (serverClaimSpace != null) parameters.setParameter(SharePointConfig.PARAM_SERVERCLAIMSPACE,serverClaimSpace); String serverProtocol = variableContext.getParameter("serverProtocol"); if (serverProtocol != null) parameters.setParameter(SharePointConfig.PARAM_SERVERPROTOCOL,serverProtocol); String serverName = variableContext.getParameter("serverName"); if (serverName != null) parameters.setParameter(SharePointConfig.PARAM_SERVERNAME,serverName); String serverPort = variableContext.getParameter("serverPort"); if (serverPort != null) parameters.setParameter(SharePointConfig.PARAM_SERVERPORT,serverPort); String serverLocation = variableContext.getParameter("serverLocation"); if (serverLocation != null) parameters.setParameter(SharePointConfig.PARAM_SERVERLOCATION,serverLocation); String userName = variableContext.getParameter("serverUserName"); if (userName != null) parameters.setParameter(SharePointConfig.PARAM_SERVERUSERNAME,userName); String password = variableContext.getParameter("serverPassword"); if (password != null) parameters.setObfuscatedParameter(SharePointConfig.PARAM_SERVERPASSWORD,variableContext.mapKeyToPassword(password)); String proxyHost = variableContext.getParameter("proxyhost"); if (proxyHost != null) parameters.setParameter(SharePointConfig.PARAM_PROXYHOST,proxyHost); String proxyPort = variableContext.getParameter("proxyport"); if (proxyPort != null) parameters.setParameter(SharePointConfig.PARAM_PROXYPORT,proxyPort); String proxyUser = variableContext.getParameter("proxyuser"); if (proxyUser != null) parameters.setParameter(SharePointConfig.PARAM_PROXYUSER,proxyUser); String proxyPassword = variableContext.getParameter("proxypassword"); if (proxyPassword != null) parameters.setObfuscatedParameter(SharePointConfig.PARAM_PROXYPASSWORD,variableContext.mapKeyToPassword(proxyPassword)); String proxyDomain = variableContext.getParameter("proxydomain"); if (proxyDomain != null) parameters.setParameter(SharePointConfig.PARAM_PROXYDOMAIN,proxyDomain); String keystoreValue = variableContext.getParameter("keystoredata"); if (keystoreValue != null) parameters.setParameter(SharePointConfig.PARAM_SERVERKEYSTORE,keystoreValue); String configOp = variableContext.getParameter("configop"); if (configOp != null) { if (configOp.equals("Delete")) { String alias = variableContext.getParameter("shpkeystorealias"); keystoreValue = parameters.getParameter(SharePointConfig.PARAM_SERVERKEYSTORE); IKeystoreManager mgr; if (keystoreValue != null) mgr = KeystoreManagerFactory.make("",keystoreValue); else mgr = KeystoreManagerFactory.make(""); mgr.remove(alias); parameters.setParameter(SharePointConfig.PARAM_SERVERKEYSTORE,mgr.getString()); } else if (configOp.equals("Add")) { String alias = IDFactory.make(threadContext); byte[] certificateValue = variableContext.getBinaryBytes("shpcertificate"); keystoreValue = parameters.getParameter(SharePointConfig.PARAM_SERVERKEYSTORE); IKeystoreManager mgr; if (keystoreValue != null) mgr = KeystoreManagerFactory.make("",keystoreValue); else mgr = KeystoreManagerFactory.make(""); java.io.InputStream is = new java.io.ByteArrayInputStream(certificateValue); String certError = null; try { mgr.importCertificate(alias,is); } catch (Throwable e) { certError = e.getMessage(); } finally { try { is.close(); } catch (IOException e) { // Don't report anything } } if (certError != null) { // Redirect to error page return "Illegal certificate: "+certError; } parameters.setParameter(SharePointConfig.PARAM_SERVERKEYSTORE,mgr.getString()); } } return null; } /** View configuration. * This method is called in the body section of the authority connector's view configuration page. Its purpose is to present the connection information to the user. * The coder can presume that the HTML that is output from this configuration will be within appropriate <html> and <body> tags. *@param threadContext is the local thread context. *@param out is the output to which any HTML should be sent. *@param parameters are the configuration parameters, as they currently exist, for this connection being configured. */ @Override public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters) throws ManifoldCFException, IOException { Map<String,Object> velocityContext = new HashMap<String,Object>(); fillInCacheTab(velocityContext,out,parameters); fillInServerTab(velocityContext,out,parameters); Messages.outputResourceWithVelocity(out,locale,"viewConfiguration.html",velocityContext); } // Protected methods /** Get parameters needed for caching. */ protected void getSessionParameters() throws ManifoldCFException { if (!hasSessionParameters) { try { responseLifetime = Long.parseLong(this.cacheLifetime) * 60L * 1000L; LRUsize = Integer.parseInt(this.cacheLRUsize); } catch (NumberFormatException e) { throw new ManifoldCFException("Cache lifetime or Cache LRU size must be an integer: "+e.getMessage(),e); } hasSessionParameters = true; } } protected void getSharePointSession() throws ManifoldCFException { if (proxy == null) { // Set up server URL try { if (serverPortString == null || serverPortString.length() == 0) { if (serverProtocol.equals("https")) this.serverPort = 443; else this.serverPort = 80; } else this.serverPort = Integer.parseInt(serverPortString); } catch (NumberFormatException e) { throw new ManifoldCFException(e.getMessage(),e); } int proxyPort = 8080; if (proxyPortString != null && proxyPortString.length() > 0) { try { proxyPort = Integer.parseInt(proxyPortString); } catch (NumberFormatException e) { throw new ManifoldCFException(e.getMessage(),e); } } serverUrl = serverProtocol + "://" + serverName; if (serverProtocol.equals("https")) { if (serverPort != 443) serverUrl += ":" + Integer.toString(serverPort); } else { if (serverPort != 80) serverUrl += ":" + Integer.toString(serverPort); } fileBaseUrl = serverUrl + encodedServerLocation; int connectionTimeout = 60000; int socketTimeout = 900000; // Set up ssl if indicated connectionManager = new PoolingHttpClientConnectionManager(); CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); SSLConnectionSocketFactory myFactory = null; if (keystoreData != null) { keystoreManager = KeystoreManagerFactory.make("",keystoreData); myFactory = new SSLConnectionSocketFactory(keystoreManager.getSecureSocketFactory(), new BrowserCompatHostnameVerifier()); } if (strippedUserName != null) { credentialsProvider.setCredentials( new AuthScope(serverName,serverPort), new NTCredentials(strippedUserName, password, currentHost, ntlmDomain)); } RequestConfig.Builder requestBuilder = RequestConfig.custom() .setCircularRedirectsAllowed(true) .setSocketTimeout(socketTimeout) .setStaleConnectionCheckEnabled(true) .setExpectContinueEnabled(false) .setConnectTimeout(connectionTimeout) .setConnectionRequestTimeout(socketTimeout); // If there's a proxy, set that too. if (proxyHost != null && proxyHost.length() > 0) { // Configure proxy authentication if (proxyUsername != null && proxyUsername.length() > 0) { if (proxyPassword == null) proxyPassword = ""; if (proxyDomain == null) proxyDomain = ""; credentialsProvider.setCredentials( new AuthScope(proxyHost, proxyPort), new NTCredentials(proxyUsername, proxyPassword, currentHost, proxyDomain)); } HttpHost proxy = new HttpHost(proxyHost, proxyPort); requestBuilder.setProxy(proxy); } HttpClientBuilder builder = HttpClients.custom() .setConnectionManager(connectionManager) .setMaxConnTotal(1) .disableAutomaticRetries() .setDefaultRequestConfig(requestBuilder.build()) .setDefaultSocketConfig(SocketConfig.custom() .setTcpNoDelay(true) .setSoTimeout(socketTimeout) .build()) .setDefaultCredentialsProvider(credentialsProvider); if (myFactory != null) builder.setSSLSocketFactory(myFactory); builder.setRequestExecutor(new HttpRequestExecutor(socketTimeout)) .setRedirectStrategy(new DefaultRedirectStrategy()); httpClient = builder.build(); proxy = new SPSProxyHelper( serverUrl, encodedServerLocation, serverLocation, serverUserName, password, org.apache.manifoldcf.connectorcommon.common.CommonsHTTPSender.class, "client-config.wsdd", httpClient, isClaimSpace ); } sharepointSessionTimeout = System.currentTimeMillis() + SharePointExpirationInterval; } protected void expireSharePointSession() throws ManifoldCFException { serverPort = -1; serverUrl = null; fileBaseUrl = null; keystoreManager = null; proxy = null; httpClient = null; if (connectionManager != null) connectionManager.shutdown(); connectionManager = null; } /** Decode a path item. */ public static String pathItemDecode(String pathItem) { return URLDecoder.decode(pathItem.replaceAll("\\%20","+")); } /** Encode a path item. */ public static String pathItemEncode(String pathItem) { String output = URLEncoder.encode(pathItem); return output.replaceAll("\\+","%20"); } /** Given a path that is /-separated, and otherwise encoded, decode properly to convert to * unencoded form. */ public static String decodePath(String relPath) { StringBuilder sb = new StringBuilder(); String[] pathEntries = relPath.split("/"); int k = 0; boolean isFirst = true; while (k < pathEntries.length) { if (isFirst) isFirst = false; else sb.append("/"); sb.append(pathItemDecode(pathEntries[k++])); } return sb.toString(); } /** Given a path that is /-separated, and otherwise unencoded, encode properly for an actual * URI */ public static String encodePath(String relPath) { StringBuilder sb = new StringBuilder(); String[] pathEntries = relPath.split("/"); int k = 0; boolean isFirst = true; while (k < pathEntries.length) { if (isFirst) isFirst = false; else sb.append("/"); sb.append(pathItemEncode(pathEntries[k++])); } return sb.toString(); } protected static StringSet emptyStringSet = new StringSet(); /** This is the cache object descriptor for cached access tokens from * this connector. */ protected static class AuthorizationResponseDescription extends org.apache.manifoldcf.core.cachemanager.BaseDescription { /** The user name */ protected final String userName; /** The response lifetime */ protected final long responseLifetime; /** The expiration time */ protected long expirationTime = -1; // Parameters designed to guarantee cache key uniqueness protected final String serverName; protected final String serverPortString; protected final String serverLocation; protected final String serverProtocol; protected final String serverUserName; protected final String password; /** Constructor. */ public AuthorizationResponseDescription(String userName, String serverName, String serverPortString, String serverLocation, String serverProtocol, String serverUserName, String password, long responseLifetime, int LRUsize) { super("SharePointAuthority",LRUsize); this.userName = userName; this.responseLifetime = responseLifetime; this.serverName = serverName; this.serverPortString = serverPortString; this.serverLocation = serverLocation; this.serverProtocol = serverProtocol; this.serverUserName = serverUserName; this.password = password; } /** Return the invalidation keys for this object. */ public StringSet getObjectKeys() { return emptyStringSet; } /** Get the critical section name, used for synchronizing the creation of the object */ public String getCriticalSectionName() { StringBuilder sb = new StringBuilder(getClass().getName()); sb.append("-").append(userName); sb.append("-").append(serverName); sb.append("-").append(serverPortString); sb.append("-").append(serverLocation); sb.append("-").append(serverProtocol); sb.append("-").append(serverUserName); sb.append("-").append(password); return sb.toString(); } /** Return the object expiration interval */ public long getObjectExpirationTime(long currentTime) { if (expirationTime == -1) expirationTime = currentTime + responseLifetime; return expirationTime; } public int hashCode() { int rval = userName.hashCode(); rval += serverName.hashCode(); rval += serverPortString.hashCode(); rval += serverLocation.hashCode(); rval += serverProtocol.hashCode(); rval += serverUserName.hashCode(); rval += password.hashCode(); return rval; } public boolean equals(Object o) { if (!(o instanceof AuthorizationResponseDescription)) return false; AuthorizationResponseDescription ard = (AuthorizationResponseDescription)o; if (!ard.userName.equals(userName)) return false; if (!ard.serverName.equals(serverName)) return false; if (!ard.serverPortString.equals(serverPortString)) return false; if (!ard.serverLocation.equals(serverLocation)) return false; if (!ard.serverProtocol.equals(serverProtocol)) return false; if (!ard.serverUserName.equals(serverUserName)) return false; if (!ard.password.equals(password)) return false; return true; } } }
/* * Copyright 2015 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.unix; import io.netty.channel.ChannelException; import io.netty.util.CharsetUtil; import io.netty.util.NetUtil; import java.io.IOException; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.PortUnreachableException; import java.net.SocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ClosedChannelException; import static io.netty.channel.unix.Errors.ERRNO_EAGAIN_NEGATIVE; import static io.netty.channel.unix.Errors.ERRNO_EINPROGRESS_NEGATIVE; import static io.netty.channel.unix.Errors.ERRNO_EWOULDBLOCK_NEGATIVE; import static io.netty.channel.unix.Errors.ERROR_ECONNREFUSED_NEGATIVE; import static io.netty.channel.unix.Errors.handleConnectErrno; import static io.netty.channel.unix.Errors.ioResult; import static io.netty.channel.unix.Errors.newIOException; import static io.netty.channel.unix.NativeInetAddress.address; import static io.netty.channel.unix.NativeInetAddress.ipv4MappedIpv6Address; /** * Provides a JNI bridge to native socket operations. * <strong>Internal usage only!</strong> */ public class Socket extends FileDescriptor { private static volatile boolean isIpv6Preferred; @Deprecated public static final int UDS_SUN_PATH_SIZE = 100; protected final boolean ipv6; public Socket(int fd) { super(fd); ipv6 = isIPv6(fd); } /** * Returns {@code true} if we should use IPv6 internally, {@code false} otherwise. */ private boolean useIpv6(InetAddress address) { return useIpv6(this, address); } /** * Returns {@code true} if the given socket and address combination should use IPv6 internally, * {@code false} otherwise. */ protected static boolean useIpv6(Socket socket, InetAddress address) { return socket.ipv6 || address instanceof Inet6Address; } public final void shutdown() throws IOException { shutdown(true, true); } public final void shutdown(boolean read, boolean write) throws IOException { for (;;) { // We need to only shutdown what has not been shutdown yet, and if there is no change we should not // shutdown anything. This is because if the underlying FD is reused and we still have an object which // represents the previous incarnation of the FD we need to be sure we don't inadvertently shutdown the // "new" FD without explicitly having a change. final int oldState = state; if (isClosed(oldState)) { throw new ClosedChannelException(); } int newState = oldState; if (read && !isInputShutdown(newState)) { newState = inputShutdown(newState); } if (write && !isOutputShutdown(newState)) { newState = outputShutdown(newState); } // If there is no change in state, then we should not take any action. if (newState == oldState) { return; } if (casState(oldState, newState)) { break; } } int res = shutdown(fd, read, write); if (res < 0) { ioResult("shutdown", res); } } public final boolean isShutdown() { int state = this.state; return isInputShutdown(state) && isOutputShutdown(state); } public final boolean isInputShutdown() { return isInputShutdown(state); } public final boolean isOutputShutdown() { return isOutputShutdown(state); } public final int sendTo(ByteBuffer buf, int pos, int limit, InetAddress addr, int port) throws IOException { return sendTo(buf, pos, limit, addr, port, false); } public final int sendTo(ByteBuffer buf, int pos, int limit, InetAddress addr, int port, boolean fastOpen) throws IOException { // just duplicate the toNativeInetAddress code here to minimize object creation as this method is expected // to be called frequently byte[] address; int scopeId; if (addr instanceof Inet6Address) { address = addr.getAddress(); scopeId = ((Inet6Address) addr).getScopeId(); } else { // convert to ipv4 mapped ipv6 address; scopeId = 0; address = ipv4MappedIpv6Address(addr.getAddress()); } int flags = fastOpen ? msgFastopen() : 0; int res = sendTo(fd, useIpv6(addr), buf, pos, limit, address, scopeId, port, flags); if (res >= 0) { return res; } if (res == ERRNO_EINPROGRESS_NEGATIVE && fastOpen) { // This happens when we (as a client) have no pre-existing cookie for doing a fast-open connection. // In this case, our TCP connection will be established normally, but no data was transmitted at this time. // We'll just transmit the data with normal writes later. return 0; } if (res == ERROR_ECONNREFUSED_NEGATIVE) { throw new PortUnreachableException("sendTo failed"); } return ioResult("sendTo", res); } public final int sendToDomainSocket(ByteBuffer buf, int pos, int limit, byte[] path) throws IOException { int res = sendToDomainSocket(fd, buf, pos, limit, path); if (res >= 0) { return res; } return ioResult("sendToDomainSocket", res); } public final int sendToAddress(long memoryAddress, int pos, int limit, InetAddress addr, int port) throws IOException { return sendToAddress(memoryAddress, pos, limit, addr, port, false); } public final int sendToAddress(long memoryAddress, int pos, int limit, InetAddress addr, int port, boolean fastOpen) throws IOException { // just duplicate the toNativeInetAddress code here to minimize object creation as this method is expected // to be called frequently byte[] address; int scopeId; if (addr instanceof Inet6Address) { address = addr.getAddress(); scopeId = ((Inet6Address) addr).getScopeId(); } else { // convert to ipv4 mapped ipv6 address; scopeId = 0; address = ipv4MappedIpv6Address(addr.getAddress()); } int flags = fastOpen ? msgFastopen() : 0; int res = sendToAddress(fd, useIpv6(addr), memoryAddress, pos, limit, address, scopeId, port, flags); if (res >= 0) { return res; } if (res == ERRNO_EINPROGRESS_NEGATIVE && fastOpen) { // This happens when we (as a client) have no pre-existing cookie for doing a fast-open connection. // In this case, our TCP connection will be established normally, but no data was transmitted at this time. // We'll just transmit the data with normal writes later. return 0; } if (res == ERROR_ECONNREFUSED_NEGATIVE) { throw new PortUnreachableException("sendToAddress failed"); } return ioResult("sendToAddress", res); } public final int sendToAddressDomainSocket(long memoryAddress, int pos, int limit, byte[] path) throws IOException { int res = sendToAddressDomainSocket(fd, memoryAddress, pos, limit, path); if (res >= 0) { return res; } return ioResult("sendToAddressDomainSocket", res); } public final int sendToAddresses(long memoryAddress, int length, InetAddress addr, int port) throws IOException { return sendToAddresses(memoryAddress, length, addr, port, false); } public final int sendToAddresses(long memoryAddress, int length, InetAddress addr, int port, boolean fastOpen) throws IOException { // just duplicate the toNativeInetAddress code here to minimize object creation as this method is expected // to be called frequently byte[] address; int scopeId; if (addr instanceof Inet6Address) { address = addr.getAddress(); scopeId = ((Inet6Address) addr).getScopeId(); } else { // convert to ipv4 mapped ipv6 address; scopeId = 0; address = ipv4MappedIpv6Address(addr.getAddress()); } int flags = fastOpen ? msgFastopen() : 0; int res = sendToAddresses(fd, useIpv6(addr), memoryAddress, length, address, scopeId, port, flags); if (res >= 0) { return res; } if (res == ERRNO_EINPROGRESS_NEGATIVE && fastOpen) { // This happens when we (as a client) have no pre-existing cookie for doing a fast-open connection. // In this case, our TCP connection will be established normally, but no data was transmitted at this time. // We'll just transmit the data with normal writes later. return 0; } if (res == ERROR_ECONNREFUSED_NEGATIVE) { throw new PortUnreachableException("sendToAddresses failed"); } return ioResult("sendToAddresses", res); } public final int sendToAddressesDomainSocket(long memoryAddress, int length, byte[] path) throws IOException { int res = sendToAddressesDomainSocket(fd, memoryAddress, length, path); if (res >= 0) { return res; } return ioResult("sendToAddressesDomainSocket", res); } public final DatagramSocketAddress recvFrom(ByteBuffer buf, int pos, int limit) throws IOException { return recvFrom(fd, buf, pos, limit); } public final DatagramSocketAddress recvFromAddress(long memoryAddress, int pos, int limit) throws IOException { return recvFromAddress(fd, memoryAddress, pos, limit); } public final DomainDatagramSocketAddress recvFromDomainSocket(ByteBuffer buf, int pos, int limit) throws IOException { return recvFromDomainSocket(fd, buf, pos, limit); } public final DomainDatagramSocketAddress recvFromAddressDomainSocket(long memoryAddress, int pos, int limit) throws IOException { return recvFromAddressDomainSocket(fd, memoryAddress, pos, limit); } public final int recvFd() throws IOException { int res = recvFd(fd); if (res > 0) { return res; } if (res == 0) { return -1; } if (res == ERRNO_EAGAIN_NEGATIVE || res == ERRNO_EWOULDBLOCK_NEGATIVE) { // Everything consumed so just return -1 here. return 0; } throw newIOException("recvFd", res); } public final int sendFd(int fdToSend) throws IOException { int res = sendFd(fd, fdToSend); if (res >= 0) { return res; } if (res == ERRNO_EAGAIN_NEGATIVE || res == ERRNO_EWOULDBLOCK_NEGATIVE) { // Everything consumed so just return -1 here. return -1; } throw newIOException("sendFd", res); } public final boolean connect(SocketAddress socketAddress) throws IOException { int res; if (socketAddress instanceof InetSocketAddress) { InetSocketAddress inetSocketAddress = (InetSocketAddress) socketAddress; InetAddress inetAddress = inetSocketAddress.getAddress(); NativeInetAddress address = NativeInetAddress.newInstance(inetAddress); res = connect(fd, useIpv6(inetAddress), address.address, address.scopeId, inetSocketAddress.getPort()); } else if (socketAddress instanceof DomainSocketAddress) { DomainSocketAddress unixDomainSocketAddress = (DomainSocketAddress) socketAddress; res = connectDomainSocket(fd, unixDomainSocketAddress.path().getBytes(CharsetUtil.UTF_8)); } else { throw new Error("Unexpected SocketAddress implementation " + socketAddress); } if (res < 0) { return handleConnectErrno("connect", res); } return true; } public final boolean finishConnect() throws IOException { int res = finishConnect(fd); if (res < 0) { return handleConnectErrno("finishConnect", res); } return true; } public final void disconnect() throws IOException { int res = disconnect(fd, ipv6); if (res < 0) { handleConnectErrno("disconnect", res); } } public final void bind(SocketAddress socketAddress) throws IOException { if (socketAddress instanceof InetSocketAddress) { InetSocketAddress addr = (InetSocketAddress) socketAddress; InetAddress inetAddress = addr.getAddress(); NativeInetAddress address = NativeInetAddress.newInstance(inetAddress); int res = bind(fd, useIpv6(inetAddress), address.address, address.scopeId, addr.getPort()); if (res < 0) { throw newIOException("bind", res); } } else if (socketAddress instanceof DomainSocketAddress) { DomainSocketAddress addr = (DomainSocketAddress) socketAddress; int res = bindDomainSocket(fd, addr.path().getBytes(CharsetUtil.UTF_8)); if (res < 0) { throw newIOException("bind", res); } } else { throw new Error("Unexpected SocketAddress implementation " + socketAddress); } } public final void listen(int backlog) throws IOException { int res = listen(fd, backlog); if (res < 0) { throw newIOException("listen", res); } } public final int accept(byte[] addr) throws IOException { int res = accept(fd, addr); if (res >= 0) { return res; } if (res == ERRNO_EAGAIN_NEGATIVE || res == ERRNO_EWOULDBLOCK_NEGATIVE) { // Everything consumed so just return -1 here. return -1; } throw newIOException("accept", res); } public final InetSocketAddress remoteAddress() { byte[] addr = remoteAddress(fd); // addr may be null if getpeername failed. // See https://github.com/netty/netty/issues/3328 return addr == null ? null : address(addr, 0, addr.length); } public final InetSocketAddress localAddress() { byte[] addr = localAddress(fd); // addr may be null if getpeername failed. // See https://github.com/netty/netty/issues/3328 return addr == null ? null : address(addr, 0, addr.length); } public final int getReceiveBufferSize() throws IOException { return getReceiveBufferSize(fd); } public final int getSendBufferSize() throws IOException { return getSendBufferSize(fd); } public final boolean isKeepAlive() throws IOException { return isKeepAlive(fd) != 0; } public final boolean isTcpNoDelay() throws IOException { return isTcpNoDelay(fd) != 0; } public final boolean isReuseAddress() throws IOException { return isReuseAddress(fd) != 0; } public final boolean isReusePort() throws IOException { return isReusePort(fd) != 0; } public final boolean isBroadcast() throws IOException { return isBroadcast(fd) != 0; } public final int getSoLinger() throws IOException { return getSoLinger(fd); } public final int getSoError() throws IOException { return getSoError(fd); } public final int getTrafficClass() throws IOException { return getTrafficClass(fd, ipv6); } public final void setKeepAlive(boolean keepAlive) throws IOException { setKeepAlive(fd, keepAlive ? 1 : 0); } public final void setReceiveBufferSize(int receiveBufferSize) throws IOException { setReceiveBufferSize(fd, receiveBufferSize); } public final void setSendBufferSize(int sendBufferSize) throws IOException { setSendBufferSize(fd, sendBufferSize); } public final void setTcpNoDelay(boolean tcpNoDelay) throws IOException { setTcpNoDelay(fd, tcpNoDelay ? 1 : 0); } public final void setSoLinger(int soLinger) throws IOException { setSoLinger(fd, soLinger); } public final void setReuseAddress(boolean reuseAddress) throws IOException { setReuseAddress(fd, reuseAddress ? 1 : 0); } public final void setReusePort(boolean reusePort) throws IOException { setReusePort(fd, reusePort ? 1 : 0); } public final void setBroadcast(boolean broadcast) throws IOException { setBroadcast(fd, broadcast ? 1 : 0); } public final void setTrafficClass(int trafficClass) throws IOException { setTrafficClass(fd, ipv6, trafficClass); } public static boolean isIPv6Preferred() { return isIpv6Preferred; } private static native boolean isIPv6Preferred0(boolean ipv4Preferred); private static native boolean isIPv6(int fd); @Override public String toString() { return "Socket{" + "fd=" + fd + '}'; } public static Socket newSocketStream() { return new Socket(newSocketStream0()); } public static Socket newSocketDgram() { return new Socket(newSocketDgram0()); } public static Socket newSocketDomain() { return new Socket(newSocketDomain0()); } public static Socket newSocketDomainDgram() { return new Socket(newSocketDomainDgram0()); } public static void initialize() { isIpv6Preferred = isIPv6Preferred0(NetUtil.isIpV4StackPreferred()); } protected static int newSocketStream0() { return newSocketStream0(isIPv6Preferred()); } protected static int newSocketStream0(boolean ipv6) { int res = newSocketStreamFd(ipv6); if (res < 0) { throw new ChannelException(newIOException("newSocketStream", res)); } return res; } protected static int newSocketDgram0() { return newSocketDgram0(isIPv6Preferred()); } protected static int newSocketDgram0(boolean ipv6) { int res = newSocketDgramFd(ipv6); if (res < 0) { throw new ChannelException(newIOException("newSocketDgram", res)); } return res; } protected static int newSocketDomain0() { int res = newSocketDomainFd(); if (res < 0) { throw new ChannelException(newIOException("newSocketDomain", res)); } return res; } protected static int newSocketDomainDgram0() { int res = newSocketDomainDgramFd(); if (res < 0) { throw new ChannelException(newIOException("newSocketDomainDgram", res)); } return res; } private static native int shutdown(int fd, boolean read, boolean write); private static native int connect(int fd, boolean ipv6, byte[] address, int scopeId, int port); private static native int connectDomainSocket(int fd, byte[] path); private static native int finishConnect(int fd); private static native int disconnect(int fd, boolean ipv6); private static native int bind(int fd, boolean ipv6, byte[] address, int scopeId, int port); private static native int bindDomainSocket(int fd, byte[] path); private static native int listen(int fd, int backlog); private static native int accept(int fd, byte[] addr); private static native byte[] remoteAddress(int fd); private static native byte[] localAddress(int fd); private static native int sendTo( int fd, boolean ipv6, ByteBuffer buf, int pos, int limit, byte[] address, int scopeId, int port, int flags); private static native int sendToAddress( int fd, boolean ipv6, long memoryAddress, int pos, int limit, byte[] address, int scopeId, int port, int flags); private static native int sendToAddresses( int fd, boolean ipv6, long memoryAddress, int length, byte[] address, int scopeId, int port, int flags); private static native int sendToDomainSocket(int fd, ByteBuffer buf, int pos, int limit, byte[] path); private static native int sendToAddressDomainSocket(int fd, long memoryAddress, int pos, int limit, byte[] path); private static native int sendToAddressesDomainSocket(int fd, long memoryAddress, int length, byte[] path); private static native DatagramSocketAddress recvFrom( int fd, ByteBuffer buf, int pos, int limit) throws IOException; private static native DatagramSocketAddress recvFromAddress( int fd, long memoryAddress, int pos, int limit) throws IOException; private static native DomainDatagramSocketAddress recvFromDomainSocket( int fd, ByteBuffer buf, int pos, int limit) throws IOException; private static native DomainDatagramSocketAddress recvFromAddressDomainSocket( int fd, long memoryAddress, int pos, int limit) throws IOException; private static native int recvFd(int fd); private static native int sendFd(int socketFd, int fd); private static native int msgFastopen(); private static native int newSocketStreamFd(boolean ipv6); private static native int newSocketDgramFd(boolean ipv6); private static native int newSocketDomainFd(); private static native int newSocketDomainDgramFd(); private static native int isReuseAddress(int fd) throws IOException; private static native int isReusePort(int fd) throws IOException; private static native int getReceiveBufferSize(int fd) throws IOException; private static native int getSendBufferSize(int fd) throws IOException; private static native int isKeepAlive(int fd) throws IOException; private static native int isTcpNoDelay(int fd) throws IOException; private static native int isBroadcast(int fd) throws IOException; private static native int getSoLinger(int fd) throws IOException; private static native int getSoError(int fd) throws IOException; private static native int getTrafficClass(int fd, boolean ipv6) throws IOException; private static native void setReuseAddress(int fd, int reuseAddress) throws IOException; private static native void setReusePort(int fd, int reuseAddress) throws IOException; private static native void setKeepAlive(int fd, int keepAlive) throws IOException; private static native void setReceiveBufferSize(int fd, int receiveBufferSize) throws IOException; private static native void setSendBufferSize(int fd, int sendBufferSize) throws IOException; private static native void setTcpNoDelay(int fd, int tcpNoDelay) throws IOException; private static native void setSoLinger(int fd, int soLinger) throws IOException; private static native void setBroadcast(int fd, int broadcast) throws IOException; private static native void setTrafficClass(int fd, boolean ipv6, int trafficClass) throws IOException; }
/* * Copyright 2009 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static com.google.javascript.jscomp.CompilerTestCase.lines; import static com.google.javascript.jscomp.testing.NodeSubject.assertNode; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; import com.google.javascript.jscomp.ExpressionDecomposer.DecompositionType; import com.google.javascript.jscomp.type.SemanticReverseAbstractInterpreter; import com.google.javascript.rhino.IR; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.JSTypeRegistry; import java.util.HashSet; import java.util.Set; import java.util.function.Function; import javax.annotation.Nullable; import junit.framework.TestCase; /** * Unit tests for {@link ExpressionDecomposer} * * @author johnlenz@google.com (John Lenz) */ // Note: functions "foo" and "goo" are external functions in the helper. public final class ExpressionDecomposerTest extends TestCase { private boolean allowMethodCallDecomposing; private final Set<String> knownConstants = new HashSet<>(); // How many times to run `moveExpression` or `exposeExpression`. private int times; // Whether we should run type checking and test the type information in the output expression private boolean shouldTestTypes; @Override public void setUp() { allowMethodCallDecomposing = false; knownConstants.clear(); times = 1; // Tests using ES6+ features not in the typechecker should set this option to false shouldTestTypes = true; } public void testCanExposeExpression1() { // Can't move or decompose some classes of expressions. helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "while(foo());", "foo"); helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "while(x = goo()&&foo()){}", "foo"); helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "while(x += goo()&&foo()){}", "foo"); helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "do{}while(foo());", "foo"); helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "for(;foo(););", "foo"); // This case could be supported for loops without conditional continues // by moving the increment into the loop body. helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "for(;;foo());", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "for(foo();;);", "foo"); // This is potentially doable but a bit too complex currently. helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "switch(1){case foo():;}", "foo"); } public void testCanExposeExpression2() { helperCanExposeExpression( DecompositionType.MOVABLE, "foo()", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "x = foo()", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "var x = foo()", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "const x = foo()", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "let x = foo()", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "if(foo()){}", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "switch(foo()){}", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "switch(foo()){}", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "function f(){ return foo();}", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "x = foo() && 1", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "x = foo() || 1", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "x = foo() ? 0 : 1", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "(function(a){b = a})(foo())", "foo"); helperCanExposeExpression( DecompositionType.MOVABLE, "function f(){ throw foo();}", "foo"); } public void testCanExposeExpression3() { helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "x = 0 && foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "x = 1 || foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "var x = 1 ? foo() : 0", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "const x = 1 ? foo() : 0", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "let x = 1 ? foo() : 0", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "goo() && foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "x = goo() && foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "x += goo() && foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "var x = goo() && foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "const x = goo() && foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "let x = goo() && foo()", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "if(goo() && foo()){}", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "switch(goo() && foo()){}", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "switch(goo() && foo()){}", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "switch(x = goo() && foo()){}", "foo"); helperCanExposeExpression( DecompositionType.DECOMPOSABLE, "function f(){ return goo() && foo();}", "foo"); } public void testCanExposeExpression4a() { // 'this' must be preserved in call. helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "if (goo.a(1, foo()));", "foo"); } public void testCanExposeExpression4b() { allowMethodCallDecomposing = true; helperCanExposeExpression(DecompositionType.DECOMPOSABLE, "if (goo.a(1, foo()));", "foo"); } public void testCanExposeExpression5a() { // 'this' must be preserved in call. helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "if (goo['a'](foo()));", "foo"); } public void testCanExposeExpression5b() { allowMethodCallDecomposing = true; helperCanExposeExpression(DecompositionType.DECOMPOSABLE, "if (goo['a'](foo()));", "foo"); } public void testCanExposeExpression6a() { // 'this' must be preserved in call. helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "z:if (goo.a(1, foo()));", "foo"); } public void testCanExposeExpression6b() { allowMethodCallDecomposing = true; helperCanExposeExpression(DecompositionType.DECOMPOSABLE, "z:if (goo.a(1, foo()));", "foo"); } public void testCanExposeExpression7() { // Verify calls to function expressions are movable. helperCanExposeFunctionExpression( DecompositionType.MOVABLE, lines( "(function(map){descriptions_=map})(", " function(){", " var ret={};", " ret[INIT]='a';", " ret[MIGRATION_BANNER_DISMISS]='b';", " return ret", " }());"), 2); } public void testCanExposeExpression8() { // Can it be decompose? helperCanExposeExpression( DecompositionType.DECOMPOSABLE, lines( "HangoutStarter.prototype.launchHangout = function() {", " var self = a.b;", " var myUrl = new goog.Uri(", " getDomServices_(self).getDomHelper().getWindow().location.href);", "};"), "getDomServices_"); // Verify it is properly expose the target expression. helperExposeExpression( lines( "HangoutStarter.prototype.launchHangout = function() {", " var self = a.b;", " var myUrl =", " new goog.Uri(getDomServices_(self).getDomHelper().getWindow().location.href);", "};"), "getDomServices_", lines( "HangoutStarter.prototype.launchHangout = function() {", " var self = a.b;", " var temp_const$jscomp$0 = goog.Uri;", " var myUrl = new temp_const$jscomp$0(", " getDomServices_(self).getDomHelper().getWindow().location.href);", "}")); // Verify the results can be properly moved. helperMoveExpression( lines( "HangoutStarter.prototype.launchHangout = function() {", " var self = a.b;", " var temp_const$jscomp$0 = goog.Uri;", " var myUrl = new temp_const$jscomp$0(", " getDomServices_(self).getDomHelper().getWindow().location.href);", "}"), "getDomServices_", lines( "HangoutStarter.prototype.launchHangout = function() {", " var self=a.b;", " var temp_const$jscomp$0=goog.Uri;", " var result$jscomp$0=getDomServices_(self);", " var myUrl=new temp_const$jscomp$0(", " result$jscomp$0.getDomHelper().getWindow().location.href);", "}")); } public void testCanExposeExpression9() { helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "function *f() { for (let x of yield y) {} }", "yield"); } public void testCanExposeExpression10() { helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "function *f() { for (let x in yield y) {} }", "yield"); } public void testCanExposeExpression11() { // expressions in parameter lists helperCanExposeExpression(DecompositionType.UNDECOMPOSABLE, "function f(x = foo()) {}", "foo"); helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "function f({[foo()]: x}) {}", "foo"); helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "(function (x = foo()) {})()", "foo"); helperCanExposeExpression( DecompositionType.UNDECOMPOSABLE, "(function ({[foo()]: x}) {})()", "foo"); } public void testCanExposeExpression12() { // Test destructuring rhs is evaluated before the lhs shouldTestTypes = false; helperCanExposeExpression(DecompositionType.MOVABLE, "const {a, b = goo()} = foo();", "foo"); helperCanExposeExpression(DecompositionType.MOVABLE, "const [a, b = goo()] = foo();", "foo"); helperCanExposeExpression(DecompositionType.MOVABLE, "({a, b = goo()} = foo());", "foo"); // TODO(b/73902507): We probably want to treat this as UNDECOMPOSABLE, since it's a lot of work // to handle default values correctly. See also testMoveExpression15. helperCanExposeExpression(DecompositionType.DECOMPOSABLE, "[{ [foo()]: a } = goo()] = arr;", "foo"); } public void testMoveExpression1() { // There isn't a reason to do this, but it works. helperMoveExpression("foo()", "foo", "var result$jscomp$0 = foo(); result$jscomp$0;"); } public void testMoveExpression2() { helperMoveExpression( "x = foo()", "foo", "var result$jscomp$0 = foo(); x = result$jscomp$0;"); } public void testMoveExpression3() { helperMoveExpression( "var x = foo()", "foo", "var result$jscomp$0 = foo(); var x = result$jscomp$0;"); } public void testMoveExpression4() { shouldTestTypes = false; helperMoveExpression( "const x = foo()", "foo", "var result$jscomp$0 = foo(); const x = result$jscomp$0;"); } public void testMoveExpression5() { shouldTestTypes = false; helperMoveExpression( "let x = foo()", "foo", "var result$jscomp$0 = foo(); let x = result$jscomp$0;"); } public void testMoveExpression6() { helperMoveExpression( "if(foo()){}", "foo", "var result$jscomp$0 = foo(); if (result$jscomp$0);"); } public void testMoveExpression7() { helperMoveExpression( "switch(foo()){}", "foo", "var result$jscomp$0 = foo(); switch(result$jscomp$0){}"); } public void testMoveExpression8() { helperMoveExpression( "switch(1 + foo()){}", "foo", "var result$jscomp$0 = foo(); switch(1 + result$jscomp$0){}"); } public void testMoveExpression9() { helperMoveExpression( "function f(){ return foo();}", "foo", "function f(){ var result$jscomp$0 = foo(); return result$jscomp$0;}"); } public void testMoveExpression10() { helperMoveExpression( "x = foo() && 1", "foo", "var result$jscomp$0 = foo(); x = result$jscomp$0 && 1"); } public void testMoveExpression11() { helperMoveExpression( "x = foo() || 1", "foo", "var result$jscomp$0 = foo(); x = result$jscomp$0 || 1"); } public void testMoveExpression12() { helperMoveExpression( "x = foo() ? 0 : 1", "foo", "var result$jscomp$0 = foo(); x = result$jscomp$0 ? 0 : 1"); } public void testMoveExpression13() { shouldTestTypes = false; helperMoveExpression( "const {a, b} = foo();", "foo", "var result$jscomp$0 = foo(); const {a, b} = result$jscomp$0;"); } public void testMoveExpression14() { shouldTestTypes = false; helperMoveExpression( "({a, b} = foo());", "foo", "var result$jscomp$0 = foo(); ({a, b} = result$jscomp$0);"); } public void testMoveExpression15() { // TODO(b/73902507): fix this test. we can't just unilaterally call foo() before the // the destructuring, since foo() is conditionally evaluated. // We could do something like what happens in transpilation to correctly decompose this // expression. However, that would be a lot of work for probably little gain, and we // should just treat foo() as undecomposable for now. shouldTestTypes = false; helperMoveExpression( "const [a = foo()] = arr;", "foo", "var result$jscomp$0 = foo(); const [a = result$jscomp$0] = arr;"); } /* Decomposition tests. */ public void testExposeExpression1() { helperExposeExpression( "x = 0 && foo()", "foo", "var temp$jscomp$0; if (temp$jscomp$0 = 0) temp$jscomp$0 = foo(); x = temp$jscomp$0;"); } public void testExposeExpression2() { helperExposeExpression( "x = 1 || foo()", "foo", "var temp$jscomp$0; if (temp$jscomp$0 = 1); else temp$jscomp$0=foo(); x = temp$jscomp$0;"); } public void testExposeExpression3() { helperExposeExpression( "var x = 1 ? foo() : 0", "foo", "var temp$jscomp$0;" + " if (1) temp$jscomp$0 = foo(); else temp$jscomp$0 = 0;var x = temp$jscomp$0;"); } public void testExposeExpression4() { shouldTestTypes = false; helperExposeExpression( "const x = 1 ? foo() : 0", "foo", "var temp$jscomp$0;" + " if (1) temp$jscomp$0 = foo(); else temp$jscomp$0 = 0;const x = temp$jscomp$0;"); } public void testExposeExpression5() { shouldTestTypes = false; helperExposeExpression( "let x = 1 ? foo() : 0", "foo", "var temp$jscomp$0;" + " if (1) temp$jscomp$0 = foo(); else temp$jscomp$0 = 0;let x = temp$jscomp$0;"); } public void testExposeExpression6() { helperExposeExpression( "goo() && foo()", "foo", "if (goo()) foo();"); } public void testExposeExpression7() { helperExposeExpression( "x = goo() && foo()", "foo", "var temp$jscomp$0; if (temp$jscomp$0 = goo()) temp$jscomp$0 = foo(); x = temp$jscomp$0;"); } public void testExposeExpression8() { helperExposeExpression( "var x = 1 + (goo() && foo())", "foo", "var temp$jscomp$0; if (temp$jscomp$0 = goo()) temp$jscomp$0 = foo();" + "var x = 1 + temp$jscomp$0;"); } public void testExposeExpression9() { shouldTestTypes = false; helperExposeExpression( "const x = 1 + (goo() && foo())", "foo", "var temp$jscomp$0; if (temp$jscomp$0 = goo()) temp$jscomp$0 = foo();" + "const x = 1 + temp$jscomp$0;"); } public void testExposeExpression10() { shouldTestTypes = false; helperExposeExpression( "let x = 1 + (goo() && foo())", "foo", "var temp$jscomp$0; if (temp$jscomp$0 = goo()) temp$jscomp$0 = foo();" + "let x = 1 + temp$jscomp$0;"); } public void testExposeExpression11() { helperExposeExpression( "if(goo() && foo());", "foo", lines( "var temp$jscomp$0;", "if (temp$jscomp$0 = goo()) temp$jscomp$0 = foo();", "if(temp$jscomp$0);")); } public void testExposeExpression12() { helperExposeExpression( "switch(goo() && foo()){}", "foo", lines( "var temp$jscomp$0;", "if (temp$jscomp$0 = goo()) temp$jscomp$0 = foo();", "switch(temp$jscomp$0){}")); } public void testExposeExpression13() { helperExposeExpression( "switch(1 + goo() + foo()){}", "foo", "var temp_const$jscomp$0 = 1 + goo(); switch(temp_const$jscomp$0 + foo()){}"); } public void testExposeExpression14() { helperExposeExpression( "function f(){ return goo() && foo();}", "foo", lines( "function f() {", " var temp$jscomp$0; if (temp$jscomp$0 = goo()) temp$jscomp$0 = foo();", " return temp$jscomp$0;", "}")); } public void testExposeExpression15() { // TODO(johnlenz): We really want a constant marking pass. // The value "goo" should be constant, but it isn't known to be so. helperExposeExpression( "if (goo(1, goo(2), (1 ? foo() : 0)));", "foo", lines( "var temp_const$jscomp$1 = goo;", "var temp_const$jscomp$0 = goo(2);", "var temp$jscomp$2;", "if (1) temp$jscomp$2 = foo(); else temp$jscomp$2 = 0;", "if (temp_const$jscomp$1(1, temp_const$jscomp$0, temp$jscomp$2));")); } public void testExposeExpression16() { helperExposeExpression( "throw bar() && foo();", "foo", "var temp$jscomp$0; if (temp$jscomp$0 = bar()) temp$jscomp$0=foo(); throw temp$jscomp$0;"); } public void testExposeExpression17() { allowMethodCallDecomposing = true; helperExposeExpression( "x.foo(y())", "y", lines( "var temp_const$jscomp$1 = x;", "var temp_const$jscomp$0 = temp_const$jscomp$1.foo;", "temp_const$jscomp$0.call(temp_const$jscomp$1, y());")); } public void testExposeExpression18() { allowMethodCallDecomposing = true; shouldTestTypes = false; helperExposeExpression( lines( "const {a, b, c} = condition ?", " y() :", " {a: 0, b: 0, c: 1};"), "y", lines( "var temp$jscomp$0;", "if (condition) {", " temp$jscomp$0 = y();", "} else {", " temp$jscomp$0 = {a: 0, b: 0, c: 1};", "}", "const {a, b, c} = temp$jscomp$0;")); } public void testMoveClass1() { shouldTestTypes = false; helperMoveExpression( "alert(class X {});", ExpressionDecomposerTest::findClass, "var result$jscomp$0 = class X {}; alert(result$jscomp$0);"); } public void testMoveClass2() { shouldTestTypes = false; helperMoveExpression( "console.log(1, 2, class X {});", ExpressionDecomposerTest::findClass, "var result$jscomp$0 = class X {}; console.log(1, 2, result$jscomp$0);"); } public void testMoveYieldExpression1() { helperMoveExpression( "function *f() { return { a: yield 1, c: foo(yield 2, yield 3) }; }", "yield", lines( "function *f() {", " var result$jscomp$0 = yield 1;", " return { a: result$jscomp$0, c: foo(yield 2, yield 3) };", "}")); helperMoveExpression( "function *f() { return { a: 0, c: foo(yield 2, yield 3) }; }", "yield", lines( "function *f() {", " var result$jscomp$0 = yield 2;", " return { a: 0, c: foo(result$jscomp$0, yield 3) };", "}")); helperMoveExpression( "function *f() { return { a: 0, c: foo(1, yield 3) }; }", "yield", lines( "function *f() {", " var result$jscomp$0 = yield 3;", " return { a: 0, c: foo(1, result$jscomp$0) };", "}")); } public void testMoveYieldExpression2() { helperMoveExpression( "function *f() { return (yield 1) || (yield 2); }", "yield", lines( "function *f() {", " var result$jscomp$0 = yield 1;", " return result$jscomp$0 || (yield 2);", "}")); } public void testMoveYieldExpression3() { helperMoveExpression( "function *f() { return x.y(yield 1); }", "yield", lines( "function *f() {", " var result$jscomp$0 = yield 1;", " return x.y(result$jscomp$0);", "}")); } public void testExposeYieldExpression1() { helperExposeExpression( "function *f(x) { return x || (yield 2); }", "yield", lines( "function *f(x) {", " var temp$jscomp$0;", " if (temp$jscomp$0=x); else temp$jscomp$0 = yield 2;", " return temp$jscomp$0", "}")); } public void testExposeYieldExpression2() { allowMethodCallDecomposing = true; helperExposeExpression( "function *f() { return x.y(yield 1); }", "yield", lines( "function *f() {", " var temp_const$jscomp$1 = x;", " var temp_const$jscomp$0 = temp_const$jscomp$1.y;", " return temp_const$jscomp$0.call(temp_const$jscomp$1, yield 1);", "}")); } public void testExposeYieldExpression3() { allowMethodCallDecomposing = true; String before = "function *f() { return g.call(yield 1); }"; String after = lines( "function *f() {", " var temp_const$jscomp$1 = g;", " var temp_const$jscomp$0 = temp_const$jscomp$1.call;", " return temp_const$jscomp$0.call(temp_const$jscomp$1, yield 1);", "}"); helperExposeExpression(before, "yield", after); // Check that we don't decompose again, which would result in an infinite loop when inlining // functions. times = 2; helperExposeExpression(before, "yield", after); } public void testExposeYieldExpression4() { allowMethodCallDecomposing = true; helperExposeExpression( "function *f() { return g.apply([yield 1, yield 2]); }", "yield", lines( "function *f() {", " var temp_const$jscomp$1 = g;", " var temp_const$jscomp$0 = temp_const$jscomp$1.apply;", " return temp_const$jscomp$0.call(temp_const$jscomp$1, [yield 1, yield 2]);", "}")); } // Simple name on LHS of assignment-op. public void testExposePlusEquals1() { helperExposeExpression( "var x = 0; x += foo() + 1", "foo", "var x = 0; var temp_const$jscomp$0 = x; x = temp_const$jscomp$0 + (foo() + 1);"); helperExposeExpression( "var x = 0; y = (x += foo()) + x", "foo", "var x = 0; var temp_const$jscomp$0 = x; y = (x = temp_const$jscomp$0 + foo()) + x"); } // Structure on LHS of assignment-op. public void testExposePlusEquals2() { helperExposeExpression( "var x = {}; x.a += foo() + 1", "foo", lines( "var x = {}; var temp_const$jscomp$0 = x;", "var temp_const$jscomp$1 = temp_const$jscomp$0.a;", "temp_const$jscomp$0.a = temp_const$jscomp$1 + (foo() + 1);")); helperExposeExpression( "var x = {}; y = (x.a += foo()) + x.a", "foo", lines( "var x = {}; var temp_const$jscomp$0 = x;", "var temp_const$jscomp$1 = temp_const$jscomp$0.a;", "y = (temp_const$jscomp$0.a = temp_const$jscomp$1 + foo()) + x.a")); } // Constant object on LHS of assignment-op. public void testExposePlusEquals3() { helperExposeExpression( "/** @const */ var XX = {}; XX.a += foo() + 1", "foo", "var XX = {}; var temp_const$jscomp$0 = XX.a;" + "XX.a = temp_const$jscomp$0 + (foo() + 1);"); helperExposeExpression( "var XX = {}; y = (XX.a += foo()) + XX.a", "foo", "var XX = {}; var temp_const$jscomp$0 = XX.a;" + "y = (XX.a = temp_const$jscomp$0 + foo()) + XX.a"); } // Function all on LHS of assignment-op. public void testExposePlusEquals4() { helperExposeExpression( "var x = {}; goo().a += foo() + 1", "foo", lines( "var x = {};", "var temp_const$jscomp$0 = goo();", "var temp_const$jscomp$1 = temp_const$jscomp$0.a;", "temp_const$jscomp$0.a = temp_const$jscomp$1 + (foo() + 1);")); helperExposeExpression( "var x = {}; y = (goo().a += foo()) + goo().a", "foo", lines( "var x = {};", "var temp_const$jscomp$0 = goo();", "var temp_const$jscomp$1 = temp_const$jscomp$0.a;", "y = (temp_const$jscomp$0.a = temp_const$jscomp$1 + foo()) + goo().a")); } // Test multiple levels public void testExposePlusEquals5() { helperExposeExpression( "var x = {}; goo().a.b += foo() + 1", "foo", lines( "var x = {};", "var temp_const$jscomp$0 = goo().a;", "var temp_const$jscomp$1 = temp_const$jscomp$0.b;", "temp_const$jscomp$0.b = temp_const$jscomp$1 + (foo() + 1);")); helperExposeExpression( "var x = {}; y = (goo().a.b += foo()) + goo().a", "foo", lines( "var x = {};", "var temp_const$jscomp$0 = goo().a;", "var temp_const$jscomp$1 = temp_const$jscomp$0.b;", "y = (temp_const$jscomp$0.b = temp_const$jscomp$1 + foo()) + goo().a")); } public void testExposeObjectLit1() { // Validate that getter and setters methods are seen as side-effect // free and that values can move past them. We don't need to be // concerned with exposing the getter or setter here but the // decomposer does not have a method of exposing properties, only variables. helperMoveExpression( "var x = {get a() {}, b: foo()};", "foo", "var result$jscomp$0=foo();var x = {get a() {}, b: result$jscomp$0};"); helperMoveExpression( "var x = {set a(p) {}, b: foo()};", "foo", "var result$jscomp$0=foo();var x = {set a(p) {}, b: result$jscomp$0};"); } public void testFindExpressionRoot1() { assertNode(findExpressionRoot("var x = f()", "f")).hasType(Token.VAR); } public void testFindExpressionRoot2() { assertNode(findExpressionRoot("foo(bar(f()));", "f")).hasType(Token.EXPR_RESULT); } public void testFindExpressionRoot3() { assertThat(findExpressionRoot("for (let x of f()) {}", "f")).isNull(); } public void testFindExpressionRoot4() { assertThat(findExpressionRoot("for (let x in f()) {}", "f")).isNull(); } public void testFindExpressionRoot5() { assertNode(findExpressionRoot("for (let x = f();;) {}", "f")).hasType(Token.FOR); } /** Test case helpers. */ /** * @return The result of calling {@link ExpressionDecomposer#findExpressionRoot} on the CALL * node in {@code js} whose callee is a NAME matching {@code name}. */ @Nullable private Node findExpressionRoot(String js, String name) { Compiler compiler = getCompiler(); Node tree = parse(compiler, js); Node call = findCall(tree, name); checkNotNull(call); Node root = ExpressionDecomposer.findExpressionRoot(call); if (root != null) { checkState(NodeUtil.isStatement(root), root); } return root; } private void helperCanExposeFunctionExpression( DecompositionType expectedResult, String code, int call) { Compiler compiler = getCompiler(); ExpressionDecomposer decomposer = new ExpressionDecomposer( compiler, compiler.getUniqueNameIdSupplier(), knownConstants, newScope(), allowMethodCallDecomposing); Node tree = parse(compiler, code); assertNotNull(tree); Node externsRoot = parse(compiler, "function goo() {} function foo() {}"); assertNotNull(externsRoot); Node callSite = findCall(tree, null, call); assertNotNull("Call " + call + " was not found.", callSite); compiler.resetUniqueNameId(); DecompositionType result = decomposer.canExposeExpression(callSite); assertEquals(expectedResult, result); } private void helperCanExposeExpression( DecompositionType expectedResult, String code, String fnName) { Compiler compiler = getCompiler(); ExpressionDecomposer decomposer = new ExpressionDecomposer( compiler, compiler.getUniqueNameIdSupplier(), knownConstants, newScope(), allowMethodCallDecomposing); Node tree = parse(compiler, code); assertNotNull(tree); Node externsRoot = parse(compiler, "function goo() {} function foo() {}"); assertNotNull(externsRoot); Node callSite = findCall(tree, fnName); assertNotNull("Call to " + fnName + " was not found.", callSite); compiler.resetUniqueNameId(); DecompositionType result = decomposer.canExposeExpression(callSite); assertEquals(expectedResult, result); } private void helperExposeExpression( String code, String fnName, String expectedResult) { helperExposeExpression(code, tree -> findCall(tree, fnName), expectedResult); } private void helperExposeExpression( String code, Function<Node, Node> nodeFinder, String expectedResult) { Compiler compiler = getCompiler(); ExpressionDecomposer decomposer = new ExpressionDecomposer( compiler, compiler.getUniqueNameIdSupplier(), knownConstants, newScope(), allowMethodCallDecomposing); decomposer.setTempNamePrefix("temp"); decomposer.setResultNamePrefix("result"); Node expectedRoot = parse(compiler, expectedResult); Node tree = parse(compiler, code); Node originalTree = tree.cloneTree(); assertNotNull(tree); if (shouldTestTypes) { processForTypecheck(compiler, tree); } Node expr = nodeFinder.apply(tree); assertWithMessage("Expected node was not found.").that(expr).isNotNull(); DecompositionType result = decomposer.canExposeExpression(expr); assertEquals(DecompositionType.DECOMPOSABLE, result); compiler.resetUniqueNameId(); for (int i = 0; i < times; i++) { decomposer.exposeExpression(expr); } validateSourceInfo(compiler, tree); String explanation = expectedRoot.checkTreeEquals(tree); assertNull("\nExpected: " + compiler.toSource(expectedRoot) + "\nResult: " + compiler.toSource(tree) + "\n" + explanation, explanation); if (shouldTestTypes) { Node trueExpr = nodeFinder.apply(originalTree); compiler.resetUniqueNameId(); for (int i = 0; i < times; i++) { decomposer.exposeExpression(trueExpr); } processForTypecheck(compiler, originalTree); checkTypeStringsEqualAsTree(originalTree, tree); } } private void helperMoveExpression( String code, String fnName, String expectedResult) { helperMoveExpression(code, tree -> findCall(tree, fnName), expectedResult); } private void helperMoveExpression( String code, Function<Node, Node> nodeFinder, String expectedResult) { Compiler compiler = getCompiler(); ExpressionDecomposer decomposer = new ExpressionDecomposer( compiler, compiler.getUniqueNameIdSupplier(), knownConstants, newScope(), allowMethodCallDecomposing); decomposer.setTempNamePrefix("temp"); decomposer.setResultNamePrefix("result"); Node expectedRoot = parse(compiler, expectedResult); Node tree = parse(compiler, code); Node originalTree = tree.cloneTree(); assertNotNull(tree); if (shouldTestTypes) { processForTypecheck(compiler, tree); } Node expr = nodeFinder.apply(tree); assertWithMessage("Expected node was not found.").that(expr).isNotNull(); compiler.resetUniqueNameId(); for (int i = 0; i < times; i++) { decomposer.moveExpression(expr); } validateSourceInfo(compiler, tree); String explanation = expectedRoot.checkTreeEquals(tree); assertNull("\nExpected: " + compiler.toSource(expectedRoot) + "\nResult: " + compiler.toSource(tree) + "\n" + explanation, explanation); if (shouldTestTypes) { // find a basis for comparison: Node originalExpr = nodeFinder.apply(originalTree); compiler.resetUniqueNameId(); for (int i = 0; i < times; i++) { decomposer.moveExpression(originalExpr); } processForTypecheck(compiler, originalTree); checkTypeStringsEqualAsTree(originalTree, tree); } } private void checkTypeStringsEqualAsTree(Node rootExpected, Node rootActual) { JSType expectedType = rootExpected.getJSType(); JSType actualType = rootActual.getJSType(); if (expectedType == null || actualType == null) { assertEquals("Expected " + rootExpected + " but got " + rootActual, expectedType, actualType); } else if (expectedType.isUnknownType() && actualType.isUnknownType()) { // continue } else { // we can't compare actual equality because the types are from different runs of the // type inference, so we just compare the strings. assertEquals( "Expected " + rootExpected + " but got " + rootActual, expectedType.toAnnotationString(JSType.Nullability.EXPLICIT), actualType.toAnnotationString(JSType.Nullability.EXPLICIT)); } Node child1 = rootExpected.getFirstChild(); Node child2 = rootActual.getFirstChild(); while (child1 != null) { checkTypeStringsEqualAsTree(child1, child2); child1 = child1.getNext(); child2 = child2.getNext(); } } private Compiler getCompiler() { Compiler compiler = new Compiler(); CompilerOptions options = new CompilerOptions(); options.setLanguage(LanguageMode.ECMASCRIPT_2015); options.setCodingConvention(new GoogleCodingConvention()); options.setAllowMethodCallDecomposing(allowMethodCallDecomposing); compiler.initOptions(options); return compiler; } private void processForTypecheck(AbstractCompiler compiler, Node jsRoot) { Node scriptRoot = IR.root(jsRoot); compiler.setTypeCheckingHasRun(true); JSTypeRegistry registry = compiler.getTypeRegistry(); (new TypeCheck(compiler, new SemanticReverseAbstractInterpreter(registry), registry)) .processForTesting(null, scriptRoot.getFirstChild()); } @Nullable private static Node findClass(Node n) { if (n.isClass()) { return n; } for (Node child : n.children()) { Node maybeClass = findClass(child); if (maybeClass != null) { return maybeClass; } } return null; } private static Node findCall(Node n, String name) { return findCall(n, name, 1); } /** * @param name The name to look for. If name is null, look for a yield expression instead. * @param call The call to look for. * @return The return the Nth instance of the CALL/YIELD node * matching name found in a pre-order traversal. */ private static Node findCall( Node root, @Nullable final String name, final int call) { class Find { int found = 0; Node find(Node n) { if (n.isCall() || n.isYield()) { if (name == null || (n.isYield() && "yield".equals(name)) || (n.isCall() && n.getFirstChild().matchesQualifiedName(name))) { found++; if (found == call) { return n; } } } for (Node c : n.children()) { Node result = find(c); if (result != null) { return result; } } return null; } } return (new Find()).find(root); } private void validateSourceInfo(Compiler compiler, Node subtree) { (new LineNumberCheck(compiler)).setCheckSubTree(subtree); // Source information problems are reported as compiler errors. if (compiler.getErrorCount() != 0) { String msg = "Error encountered: "; for (JSError err : compiler.getErrors()) { msg += err + "\n"; } assertEquals(msg, 0, compiler.getErrorCount()); } } private static Node parse(Compiler compiler, String js) { Node n = Normalize.parseAndNormalizeTestCode(compiler, js); assertThat(compiler.getErrors()).isEmpty(); return n; } private Scope newScope() { return Scope.createGlobalScope(new Node(Token.ROOT)); } }
/* * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.example.android.monthcalendarwidget; import android.annotation.TargetApi; import android.app.PendingIntent; import android.appwidget.AppWidgetManager; import android.appwidget.AppWidgetProvider; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.content.res.Resources; import android.os.Build; import android.os.Bundle; import android.preference.PreferenceManager; import android.text.format.DateFormat; import android.view.View; import android.widget.RemoteViews; import java.text.DateFormatSymbols; import java.util.Calendar; public class MonthCalendarWidget extends AppWidgetProvider { private static final String ACTION_PREVIOUS_MONTH = "com.example.android.monthcalendarwidget.action.PREVIOUS_MONTH"; private static final String ACTION_NEXT_MONTH = "com.example.android.monthcalendarwidget.action.NEXT_MONTH"; private static final String ACTION_RESET_MONTH = "com.example.android.monthcalendarwidget.action.RESET_MONTH"; private static final String PREF_MONTH = "month"; private static final String PREF_YEAR = "year"; @Override public void onUpdate(Context context, AppWidgetManager appWidgetManager, int[] appWidgetIds) { super.onUpdate(context, appWidgetManager, appWidgetIds); for (int appWidgetId : appWidgetIds) { drawWidget(context, appWidgetId); } } private void redrawWidgets(Context context) { int[] appWidgetIds = AppWidgetManager.getInstance(context).getAppWidgetIds( new ComponentName(context, MonthCalendarWidget.class)); for (int appWidgetId : appWidgetIds) { drawWidget(context, appWidgetId); } } @Override public void onReceive(Context context, Intent intent) { super.onReceive(context, intent); String action = intent.getAction(); if (ACTION_PREVIOUS_MONTH.equals(action)) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); Calendar cal = Calendar.getInstance(); int thisMonth = sp.getInt(PREF_MONTH, cal.get(Calendar.MONTH)); int thisYear = sp.getInt(PREF_YEAR, cal.get(Calendar.YEAR)); cal.set(Calendar.MONTH, thisMonth); cal.set(Calendar.YEAR, thisYear); cal.add(Calendar.MONTH, -1); sp.edit() .putInt(PREF_MONTH, cal.get(Calendar.MONTH)) .putInt(PREF_YEAR, cal.get(Calendar.YEAR)) .apply(); redrawWidgets(context); } else if (ACTION_NEXT_MONTH.equals(action)) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); Calendar cal = Calendar.getInstance(); int thisMonth = sp.getInt(PREF_MONTH, cal.get(Calendar.MONTH)); int thisYear = sp.getInt(PREF_YEAR, cal.get(Calendar.YEAR)); cal.set(Calendar.MONTH, thisMonth); cal.set(Calendar.YEAR, thisYear); cal.add(Calendar.MONTH, 1); sp.edit() .putInt(PREF_MONTH, cal.get(Calendar.MONTH)) .putInt(PREF_YEAR, cal.get(Calendar.YEAR)) .apply(); redrawWidgets(context); } else if (ACTION_RESET_MONTH.equals(action)) { SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); sp.edit().remove(PREF_MONTH).remove(PREF_YEAR).apply(); redrawWidgets(context); } } @Override @TargetApi(Build.VERSION_CODES.JELLY_BEAN) public void onAppWidgetOptionsChanged(Context context, AppWidgetManager appWidgetManager, int appWidgetId, Bundle newOptions) { super.onAppWidgetOptionsChanged(context, appWidgetManager, appWidgetId, newOptions); drawWidget(context, appWidgetId); } private void drawWidget(Context context, int appWidgetId) { AppWidgetManager appWidgetManager = AppWidgetManager.getInstance(context); Resources res = context.getResources(); Bundle widgetOptions = appWidgetManager.getAppWidgetOptions(appWidgetId); boolean shortMonthName = false; boolean mini = false; int numWeeks = 6; if (widgetOptions != null) { int minWidthDp = widgetOptions.getInt(AppWidgetManager.OPTION_APPWIDGET_MIN_WIDTH); int minHeightDp = widgetOptions.getInt(AppWidgetManager.OPTION_APPWIDGET_MIN_HEIGHT); shortMonthName = minWidthDp <= res.getInteger(R.integer.max_width_short_month_label_dp); mini = minHeightDp <= res.getInteger(R.integer.max_height_mini_view_dp); if (mini) { numWeeks = minHeightDp <= res.getInteger(R.integer.max_height_mini_view_1_row_dp) ? 1 : 2; } } SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(context); RemoteViews rv = new RemoteViews(context.getPackageName(), R.layout.widget); Calendar cal = Calendar.getInstance(); int today = cal.get(Calendar.DAY_OF_YEAR); int todayYear = cal.get(Calendar.YEAR); int thisMonth; if (!mini) { thisMonth = sp.getInt(PREF_MONTH, cal.get(Calendar.MONTH)); int thisYear = sp.getInt(PREF_YEAR, cal.get(Calendar.YEAR)); cal.set(Calendar.MONTH, thisMonth); cal.set(Calendar.YEAR, thisYear); } else { thisMonth = cal.get(Calendar.MONTH); } rv.setTextViewText(R.id.month_label, DateFormat.format( shortMonthName ? "MMM yy" : "MMMM yyyy", cal)); if (!mini) { cal.set(Calendar.DAY_OF_MONTH, 1); int monthStartDayOfWeek = cal.get(Calendar.DAY_OF_WEEK); cal.add(Calendar.DAY_OF_MONTH, 1 - monthStartDayOfWeek); } else { int todayDayOfWeek = cal.get(Calendar.DAY_OF_WEEK); cal.add(Calendar.DAY_OF_MONTH, 1 - todayDayOfWeek); } rv.removeAllViews(R.id.calendar); RemoteViews headerRowRv = new RemoteViews(context.getPackageName(), R.layout.row_header); DateFormatSymbols dfs = DateFormatSymbols.getInstance(); String[] weekdays = dfs.getShortWeekdays(); for (int day = Calendar.SUNDAY; day <= Calendar.SATURDAY; day++) { RemoteViews dayRv = new RemoteViews(context.getPackageName(), R.layout.cell_header); dayRv.setTextViewText(android.R.id.text1, weekdays[day]); headerRowRv.addView(R.id.row_container, dayRv); } rv.addView(R.id.calendar, headerRowRv); for (int week = 0; week < numWeeks; week++) { RemoteViews rowRv = new RemoteViews(context.getPackageName(), R.layout.row_week); for (int day = 0; day < 7; day++) { boolean inMonth = cal.get(Calendar.MONTH) == thisMonth; boolean inYear = cal.get(Calendar.YEAR) == todayYear; boolean isToday = inYear && inMonth && (cal.get(Calendar.DAY_OF_YEAR) == today); boolean isFirstOfMonth = cal.get(Calendar.DAY_OF_MONTH) == 1; int cellLayoutResId = R.layout.cell_day; if (isToday) { cellLayoutResId = R.layout.cell_today; } else if (inMonth) { cellLayoutResId = R.layout.cell_day_this_month; } RemoteViews cellRv = new RemoteViews(context.getPackageName(), cellLayoutResId); cellRv.setTextViewText(android.R.id.text1, Integer.toString(cal.get(Calendar.DAY_OF_MONTH))); if (isFirstOfMonth) { cellRv.setTextViewText(R.id.month_label, DateFormat.format("MMM", cal)); } rowRv.addView(R.id.row_container, cellRv); cal.add(Calendar.DAY_OF_MONTH, 1); } rv.addView(R.id.calendar, rowRv); } rv.setViewVisibility(R.id.prev_month_button, mini ? View.GONE : View.VISIBLE); rv.setOnClickPendingIntent(R.id.prev_month_button, PendingIntent.getBroadcast(context, 0, new Intent(context, MonthCalendarWidget.class) .setAction(ACTION_PREVIOUS_MONTH), PendingIntent.FLAG_UPDATE_CURRENT)); rv.setViewVisibility(R.id.next_month_button, mini ? View.GONE : View.VISIBLE); rv.setOnClickPendingIntent(R.id.next_month_button, PendingIntent.getBroadcast(context, 0, new Intent(context, MonthCalendarWidget.class) .setAction(ACTION_NEXT_MONTH), PendingIntent.FLAG_UPDATE_CURRENT)); rv.setOnClickPendingIntent(R.id.month_label, PendingIntent.getBroadcast(context, 0, new Intent(context, MonthCalendarWidget.class) .setAction(ACTION_RESET_MONTH), PendingIntent.FLAG_UPDATE_CURRENT)); rv.setViewVisibility(R.id.month_bar, numWeeks <= 1 ? View.GONE : View.VISIBLE); appWidgetManager.updateAppWidget(appWidgetId, rv); } }
package org.ethereum.db; import org.ethereum.core.AccountState; import org.ethereum.core.Block; import org.ethereum.core.Repository; import org.ethereum.vm.DataWord; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.spongycastle.util.encoders.Hex; import java.math.BigInteger; import java.util.HashMap; import java.util.Map; import java.util.Set; import static org.ethereum.crypto.HashUtil.sha3; import static org.ethereum.util.ByteUtil.wrap; /** * @author Roman Mandeleil * @since 17.11.2014 */ public class RepositoryDummy extends RepositoryImpl { private static final Logger logger = LoggerFactory.getLogger("repository"); private Map<ByteArrayWrapper, AccountState> worldState = new HashMap<>(); private Map<ByteArrayWrapper, ContractDetails> detailsDB = new HashMap<>(); @Override public void reset() { worldState.clear(); detailsDB.clear(); } @Override public void close() { worldState.clear(); detailsDB.clear(); } @Override public boolean isClosed() { throw new UnsupportedOperationException(); } @Override public void updateBatch(HashMap<ByteArrayWrapper, AccountState> stateCache, HashMap<ByteArrayWrapper, ContractDetails> detailsCache) { for (ByteArrayWrapper hash : stateCache.keySet()) { AccountState accountState = stateCache.get(hash); ContractDetails contractDetails = detailsCache.get(hash); if (accountState.isDeleted()) { worldState.remove(hash); detailsDB.remove(hash); logger.debug("delete: [{}]", Hex.toHexString(hash.getData())); } else { if (accountState.isDirty() || contractDetails.isDirty()) { detailsDB.put(hash, contractDetails); accountState.setStateRoot(contractDetails.getStorageHash()); accountState.setCodeHash(sha3(contractDetails.getCode())); worldState.put(hash, accountState); if (logger.isDebugEnabled()) { logger.debug("update: [{}],nonce: [{}] balance: [{}] \n [{}]", Hex.toHexString(hash.getData()), accountState.getNonce(), accountState.getBalance(), contractDetails.getStorage()); } } } } stateCache.clear(); detailsCache.clear(); } @Override public void flush() { throw new UnsupportedOperationException(); } @Override public void rollback() { throw new UnsupportedOperationException(); } @Override public void commit() { throw new UnsupportedOperationException(); } @Override public void syncToRoot(byte[] root) { throw new UnsupportedOperationException(); } @Override public Repository startTracking() { return new RepositoryTrack(this); } @Override public void dumpState(Block block, long gasUsed, int txNumber, byte[] txHash) { } @Override public Set<byte[]> getAccountsKeys() { return null; } public Set<ByteArrayWrapper> getFullAddressSet() { return worldState.keySet(); } @Override public BigInteger addBalance(byte[] addr, BigInteger value) { AccountState account = getAccountState(addr); if (account == null) account = createAccount(addr); BigInteger result = account.addToBalance(value); worldState.put(wrap(addr), account); return result; } @Override public BigInteger getBalance(byte[] addr) { AccountState account = getAccountState(addr); if (account == null) return BigInteger.ZERO; return account.getBalance(); } @Override public DataWord getStorageValue(byte[] addr, DataWord key) { ContractDetails details = getContractDetails(addr); if (details == null) return null; return details.get(key); } @Override public void addStorageRow(byte[] addr, DataWord key, DataWord value) { ContractDetails details = getContractDetails(addr); if (details == null) { createAccount(addr); details = getContractDetails(addr); } details.put(key, value); detailsDB.put(wrap(addr), details); } @Override public byte[] getCode(byte[] addr) { ContractDetails details = getContractDetails(addr); if (details == null) return null; return details.getCode(); } @Override public void saveCode(byte[] addr, byte[] code) { ContractDetails details = getContractDetails(addr); if (details == null) { createAccount(addr); details = getContractDetails(addr); } details.setCode(code); detailsDB.put(wrap(addr), details); } @Override public BigInteger getNonce(byte[] addr) { AccountState account = getAccountState(addr); if (account == null) account = createAccount(addr); return account.getNonce(); } @Override public BigInteger increaseNonce(byte[] addr) { AccountState account = getAccountState(addr); if (account == null) account = createAccount(addr); account.incrementNonce(); worldState.put(wrap(addr), account); return account.getNonce(); } public BigInteger setNonce(byte[] addr, BigInteger nonce) { AccountState account = getAccountState(addr); if (account == null) account = createAccount(addr); account.setNonce(nonce); worldState.put(wrap(addr), account); return account.getNonce(); } @Override public void delete(byte[] addr) { worldState.remove(wrap(addr)); detailsDB.remove(wrap(addr)); } @Override public ContractDetails getContractDetails(byte[] addr) { return detailsDB.get(wrap(addr)); } @Override public AccountState getAccountState(byte[] addr) { return worldState.get(wrap(addr)); } @Override public AccountState createAccount(byte[] addr) { AccountState accountState = new AccountState(config()); worldState.put(wrap(addr), accountState); ContractDetails contractDetails = commonConfig.contractDetailsImpl(); detailsDB.put(wrap(addr), contractDetails); return accountState; } @Override public boolean isExist(byte[] addr) { return getAccountState(addr) != null; } @Override public byte[] getRoot() { throw new UnsupportedOperationException(); } @Override public void loadAccount(byte[] addr, HashMap<ByteArrayWrapper, AccountState> cacheAccounts, HashMap<ByteArrayWrapper, ContractDetails> cacheDetails) { AccountState account = getAccountState(addr); ContractDetails details = getContractDetails(addr); if (account == null) account = new AccountState(config()); else account = account.clone(); if (details == null) details = commonConfig.contractDetailsImpl(); else details = details.clone(); cacheAccounts.put(wrap(addr), account); cacheDetails.put(wrap(addr), details); } }
package com.lessvoid.coregl.lwjgl.input; import org.lwjgl.input.Keyboard; import org.lwjgl.input.Mouse; import com.lessvoid.coregl.input.spi.CoreMouseEvent; public final class CoreMouseEventLwjgl implements CoreMouseEvent { public static final short EVENT_MOUSE_CLICKED = 0xE00, EVENT_MOUSE_MOVED = 0xE10, EVENT_MOUSE_DRAGGED = 0xE20, EVENT_MOUSE_ENTERED = 0xE30, EVENT_MOUSE_EXITED = 0xE40, EVENT_MOUSE_PRESSED = 0xE50, EVENT_MOUSE_RELEASED = 0xE60, EVENT_MOUSE_WHEEL_MOVED = 0xE70; private final short eventType, btn; private final int mouseX, mouseY; private final float wheelRot; private final long timestamp; private final boolean isRepeated, isCtrlDown, isShiftDown, isMetaDown, isAltDown, isRightAltDown; private boolean isConsumed; public CoreMouseEventLwjgl(final short eventType, final short btn, final long nanoTime, final int mouseX, final int mouseY, final float wheelRot, final boolean isRepeated, final boolean isCtrlDown, final boolean isShiftDown, final boolean isMetaDown, final boolean isAltDown, final boolean isRightAltDown) { this.eventType = eventType; this.btn = btn; this.mouseX = mouseX; this.mouseY = mouseY; this.wheelRot = wheelRot; this.isRepeated = isRepeated; this.isCtrlDown = isCtrlDown; this.isShiftDown = isShiftDown; this.isMetaDown = isMetaDown; this.isAltDown = isAltDown; this.isRightAltDown = isRightAltDown; timestamp = nanoTime / 1000000L; } /** * Creates a new CoreMouseEvent using the data collected from the last call to * <code>Mouse.poll()</code>. It is the responsibility of the caller to call * <code>Mouse.next()</code> to advance to the next buffered event. <br> * <br> * Mouse event type resolution requires information about the previous state * of the mouse that is outside of the scope of this class. It may sometimes * even be necessary to create more than one event from the data polled via * the LWJGL Mouse class. It is the responsibility of the caller to determine * the appropriate type for the event being created; this method will then * pull the rest of the static event data from the Mouse's current event * state. * * @param mouseEventType * the type of the mouse event, as determined by the caller * @return the CoreMouseEventLwjgl representing this event * @throws IllegalStateException * if the Keyboard has not yet been initialized via * <code>Keyboard.create()</code>. */ public static CoreMouseEventLwjgl createMouseEventFromCurrentState(final short mouseEventType) { if (!Keyboard.isCreated()) throw new IllegalStateException("Keyboard has not yet been initialized."); if (!Mouse.isCreated()) throw new IllegalStateException("Mouse has not yet been initialized."); final boolean isAltDown = Keyboard.isKeyDown(Keyboard.KEY_LMENU), isRightAltDown = Keyboard.isKeyDown(Keyboard.KEY_RMENU); final boolean isCtrlDown = Keyboard.isKeyDown(Keyboard.KEY_LCONTROL) || Keyboard.isKeyDown(Keyboard.KEY_RCONTROL); final boolean isShiftDown = Keyboard.isKeyDown(Keyboard.KEY_LSHIFT) || Keyboard.isKeyDown(Keyboard.KEY_RSHIFT); final boolean isMetaDown = Keyboard.isKeyDown(Keyboard.KEY_LMETA) || Keyboard.isKeyDown(Keyboard.KEY_RMETA); return new CoreMouseEventLwjgl(mouseEventType, (short) Mouse.getEventButton(), Keyboard.getEventNanoseconds(), Mouse.getEventX(), Mouse.getEventY(), Mouse.getEventDWheel(), Keyboard.isRepeatEvent(), isCtrlDown, isShiftDown, isMetaDown, isAltDown, isRightAltDown); } @Override public short BUTTON_COUNT() { return (short) Mouse.getButtonCount(); } @Override public short BUTTON1() { return 0; } @Override public short BUTTON2() { return 1; } @Override public short BUTTON3() { return 2; } @Override public short BUTTON4() { return 3; } @Override public short BUTTON5() { return 4; } @Override public short BUTTON6() { return 5; } @Override public short BUTTON7() { return 6; } @Override public short BUTTON8() { return 7; } @Override public short BUTTON9() { return 8; } @Override public short EVENT_MOUSE_CLICKED() { return EVENT_MOUSE_CLICKED; } @Override public short EVENT_MOUSE_DRAGGED() { return EVENT_MOUSE_DRAGGED; } @Override public short EVENT_MOUSE_ENTERED() { return EVENT_MOUSE_ENTERED; } @Override public short EVENT_MOUSE_EXITED() { return EVENT_MOUSE_EXITED; } @Override public short EVENT_MOUSE_MOVED() { return EVENT_MOUSE_MOVED; } @Override public short EVENT_MOUSE_PRESSED() { return EVENT_MOUSE_PRESSED; } @Override public short EVENT_MOUSE_RELEASED() { return EVENT_MOUSE_RELEASED; } @Override public short EVENT_MOUSE_WHEEL_MOVED() { return EVENT_MOUSE_WHEEL_MOVED; } @Override public boolean isAutoRepeat() { return isRepeated; } @Override public boolean isConsumed() { return isConsumed; } @Override public long getTimestamp() { return timestamp; } @Override public int getEventType() { return eventType; } @Override public boolean isType(final int type) { return eventType == type; } @Override public void setConsumed(final boolean isConsumed) { this.isConsumed = isConsumed; } @Override public short getButton() { return btn; } /** * Always returns 1 for LWJGL implementation. LWJGL then presumably registers * two separate events in the queue for double/triple clicks. */ @Override public short getClickCount() { return 1; } @Override public float getWheelRotation() { return wheelRot; } @Override public int getX() { return mouseX; } @Override public int getY() { return mouseY; } /** * Always returns -1 for LWJGL implementation. */ @Override public int getButtonDownCount() { return -1; } @Override public boolean isAltDown() { return isAltDown; } @Override public boolean isAltGraphDown() { return isRightAltDown; } @Override public boolean isAnyButtonDown() { return eventType == EVENT_MOUSE_PRESSED || eventType == EVENT_MOUSE_DRAGGED; } @Override public boolean isButtonDown(final int btn) { return this.btn == btn && (eventType == EVENT_MOUSE_PRESSED || eventType == EVENT_MOUSE_DRAGGED); } /** * Always returns false for LWJGL implementation. LWJGL does not provide any * definition for "confined" events. */ @Override public boolean isConfined() { return false; } @Override public boolean isControlDown() { return isCtrlDown; } @Override public boolean isMetaDown() { return isMetaDown; } @Override public boolean isShiftDown() { return isShiftDown; } }
/* * Copyright 2016-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at: * * http://aws.amazon.com/apache2.0 * * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES * OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and * limitations under the License. */ package com.amazonaws.services.dynamodbv2.datamodeling; import com.amazonaws.annotation.SdkInternalApi; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperFieldModel.DynamoDBAttributeType; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperFieldModel.Reflect; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperModelFactory.TableFactory; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTypeConverter.AbstractConverter; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTypeConverter.DelegateConverter; import com.amazonaws.services.dynamodbv2.datamodeling.StandardBeanProperties.Bean; import com.amazonaws.services.dynamodbv2.datamodeling.StandardBeanProperties.Beans; import com.amazonaws.services.dynamodbv2.model.AttributeValue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.joda.time.DateTime; import java.nio.ByteBuffer; import java.util.Calendar; import java.util.Collection; import java.util.Date; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import static com.amazonaws.services.dynamodbv2.datamodeling.StandardTypeConverters.Scalar.BOOLEAN; import static com.amazonaws.services.dynamodbv2.datamodeling.StandardTypeConverters.Scalar.DEFAULT; import static com.amazonaws.services.dynamodbv2.datamodeling.StandardTypeConverters.Scalar.STRING; import static com.amazonaws.services.dynamodbv2.datamodeling.StandardTypeConverters.Vector.LIST; import static com.amazonaws.services.dynamodbv2.datamodeling.StandardTypeConverters.Vector.MAP; import static com.amazonaws.services.dynamodbv2.datamodeling.StandardTypeConverters.Vector.SET; import static com.amazonaws.services.dynamodbv2.model.ScalarAttributeType.B; import static com.amazonaws.services.dynamodbv2.model.ScalarAttributeType.N; import static com.amazonaws.services.dynamodbv2.model.ScalarAttributeType.S; /** * Pre-defined strategies for mapping between Java types and DynamoDB types. */ @SdkInternalApi final class StandardModelFactories { private static final Log LOG = LogFactory.getLog(StandardModelFactories.class); /** * Creates the standard {@link DynamoDBMapperModelFactory} factory. */ static final DynamoDBMapperModelFactory of(S3Link.Factory s3Links) { return new StandardModelFactory(s3Links); } /** * {@link TableFactory} mapped by {@link ConversionSchema}. */ private static final class StandardModelFactory implements DynamoDBMapperModelFactory { private final ConcurrentMap<ConversionSchema,TableFactory> cache; private final S3Link.Factory s3Links; private StandardModelFactory(S3Link.Factory s3Links) { this.cache = new ConcurrentHashMap<ConversionSchema,TableFactory>(); this.s3Links = s3Links; } @Override public TableFactory getTableFactory(DynamoDBMapperConfig config) { final ConversionSchema schema = config.getConversionSchema(); if (!cache.containsKey(schema)) { RuleFactory<Object> rules = rulesOf(config, s3Links, this); rules = new ConversionSchemas.ItemConverterRuleFactory<Object>(config, s3Links, rules); cache.putIfAbsent(schema, new StandardTableFactory(rules)); } return cache.get(schema); } } /** * {@link DynamoDBMapperTableModel} mapped by the clazz. */ private static final class StandardTableFactory implements TableFactory { private final ConcurrentMap<Class<?>,DynamoDBMapperTableModel<?>> cache; private final RuleFactory<Object> rules; private StandardTableFactory(RuleFactory<Object> rules) { this.cache = new ConcurrentHashMap<Class<?>,DynamoDBMapperTableModel<?>>(); this.rules = rules; } @Override @SuppressWarnings("unchecked") public <T> DynamoDBMapperTableModel<T> getTable(Class<T> clazz) { if (!this.cache.containsKey(clazz)) { this.cache.putIfAbsent(clazz, new TableBuilder<T>(clazz, rules).build()); } return (DynamoDBMapperTableModel<T>)this.cache.get(clazz); } } /** * {@link DynamoDBMapperTableModel} builder. */ private static final class TableBuilder<T> extends DynamoDBMapperTableModel.Builder<T> { private TableBuilder(Class<T> clazz, Beans<T> beans, RuleFactory<Object> rules) { super(clazz, beans.properties()); for (final Bean<T,Object> bean : beans.map().values()) { try { with(new FieldBuilder<T,Object>(clazz, bean, rules.getRule(bean.type())).build()); } catch (final RuntimeException e) { throw new DynamoDBMappingException(String.format( "%s[%s] could not be mapped for type %s", clazz.getSimpleName(), bean.properties().attributeName(), bean.type() ), e); } } } private TableBuilder(Class<T> clazz, RuleFactory<Object> rules) { this(clazz, StandardBeanProperties.<T>of(clazz), rules); } } /** * {@link DynamoDBMapperFieldModel} builder. */ private static final class FieldBuilder<T,V> extends DynamoDBMapperFieldModel.Builder<T,V> { private FieldBuilder(Class<T> clazz, Bean<T,V> bean, Rule<V> rule) { super(clazz, bean.properties()); if (bean.type().attributeType() != null) { with(bean.type().attributeType()); } else { with(rule.getAttributeType()); } with(rule.newConverter(bean.type())); with(bean.reflect()); } } /** * Creates a new set of conversion rules based on the configuration. */ private static final <T> RuleFactory<T> rulesOf(DynamoDBMapperConfig config, S3Link.Factory s3Links, DynamoDBMapperModelFactory models) { final boolean ver1 = (config.getConversionSchema() == ConversionSchemas.V1); final boolean ver2 = (config.getConversionSchema() == ConversionSchemas.V2); final boolean v2Compatible = (config.getConversionSchema() == ConversionSchemas.V2_COMPATIBLE); final DynamoDBTypeConverterFactory.Builder scalars = config.getTypeConverterFactory().override(); scalars.with(String.class, S3Link.class, s3Links); final Rules<T> factory = new Rules<T>(scalars.build()); factory.add(factory.new NativeType(!ver1)); factory.add(factory.new V2CompatibleBool(v2Compatible)); factory.add(factory.new NativeBool(ver2)); factory.add(factory.new StringScalar(true)); factory.add(factory.new DateToEpochRule(true)); factory.add(factory.new NumberScalar(true)); factory.add(factory.new BinaryScalar(true)); factory.add(factory.new NativeBoolSet(ver2)); factory.add(factory.new StringScalarSet(true)); factory.add(factory.new NumberScalarSet(true)); factory.add(factory.new BinaryScalarSet(true)); factory.add(factory.new ObjectSet(ver2)); factory.add(factory.new ObjectStringSet(!ver2)); factory.add(factory.new ObjectList(!ver1)); factory.add(factory.new ObjectMap(!ver1)); factory.add(factory.new ObjectDocumentMap(!ver1, models, config)); return factory; } /** * Groups the conversion rules to be evaluated. */ private static final class Rules<T> implements RuleFactory<T> { private final Set<Rule<T>> rules = new LinkedHashSet<Rule<T>>(); private final DynamoDBTypeConverterFactory scalars; private Rules(DynamoDBTypeConverterFactory scalars) { this.scalars = scalars; } @SuppressWarnings("unchecked") private void add(Rule<?> rule) { this.rules.add((Rule<T>)rule); } @Override public Rule<T> getRule(ConvertibleType<T> type) { for (final Rule<T> rule : rules) { if (rule.isAssignableFrom(type)) { return rule; } } return new NotSupported(); } /** * Native {@link AttributeValue} conversion. */ private class NativeType extends AbstractRule<AttributeValue,T> { private NativeType(boolean supported) { super(DynamoDBAttributeType.NULL, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.supported && type.is(AttributeValue.class); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type) { return joinAll(type.<AttributeValue>typeConverter()); } @Override public AttributeValue get(AttributeValue o) { return o; } @Override public void set(AttributeValue value, AttributeValue o) { value.withS(o.getS()).withN(o.getN()).withB(o.getB()) .withSS(o.getSS()).withNS(o.getNS()).withBS(o.getBS()) .withBOOL(o.getBOOL()).withL(o.getL()).withM(o.getM()) .withNULL(o.getNULL()); } } /** * {@code S} conversion */ private class StringScalar extends AbstractRule<String,T> { private StringScalar(boolean supported) { super(DynamoDBAttributeType.S, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && (type.attributeType() != null || type.is(S)); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type) { return joinAll(getConverter(String.class, type), type.<String>typeConverter()); } @Override public String get(AttributeValue value) { return value.getS(); } @Override public void set(AttributeValue value, String o) { value.setS(o); } @Override public AttributeValue convert(String o) { return o.length() == 0 ? null : super.convert(o); } } /** * {@code N} conversion */ private class NumberScalar extends AbstractRule<String,T> { private NumberScalar(boolean supported) { super(DynamoDBAttributeType.N, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && (type.attributeType() != null || type.is(N)); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type) { return joinAll(getConverter(String.class, type), type.<String>typeConverter()); } @Override public String get(AttributeValue value) { return value.getN(); } @Override public void set(AttributeValue value, String o) { value.setN(o); } } /** * {@code N} conversion */ private class DateToEpochRule extends AbstractRule<Long,T> { private DateToEpochRule(boolean supported) { super(DynamoDBAttributeType.N, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return (type.is(Date.class) || type.is(Calendar.class) || type.is(DateTime.class)) && super.isAssignableFrom(type) && (type.attributeType() != null || type.is(N)); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type) { return joinAll(getConverter(Long.class, type), type.<Long>typeConverter()); } @Override public Long get(AttributeValue value) { return Long.valueOf(value.getN()); } @Override public void set(AttributeValue value, Long o) { value.setN(String.valueOf(o)); } } /** * {@code B} conversion */ private class BinaryScalar extends AbstractRule<ByteBuffer,T> { private BinaryScalar(boolean supported) { super(DynamoDBAttributeType.B, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && (type.attributeType() != null || type.is(B)); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type) { return joinAll(getConverter(ByteBuffer.class, type), type.<ByteBuffer>typeConverter()); } @Override public ByteBuffer get(AttributeValue value) { return value.getB(); } @Override public void set(AttributeValue value, ByteBuffer o) { value.setB(o); } } /** * {@code SS} conversion */ private class StringScalarSet extends AbstractRule<List<String>,Collection<T>> { private StringScalarSet(boolean supported) { super(DynamoDBAttributeType.SS, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && (type.attributeType() != null || type.is(S, SET)); } @Override public DynamoDBTypeConverter<AttributeValue,Collection<T>> newConverter(ConvertibleType<Collection<T>> type) { return joinAll(SET.join(getConverter(String.class, type.<T>param(0))), type.<List<String>>typeConverter()); } @Override public List<String> get(AttributeValue value) { return value.getSS(); } @Override public void set(AttributeValue value, List<String> o) { value.setSS(o); } } /** * {@code NS} conversion */ private class NumberScalarSet extends AbstractRule<List<String>,Collection<T>> { private NumberScalarSet(boolean supported) { super(DynamoDBAttributeType.NS, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && (type.attributeType() != null || type.is(N, SET)); } @Override public DynamoDBTypeConverter<AttributeValue,Collection<T>> newConverter(ConvertibleType<Collection<T>> type) { return joinAll(SET.join(getConverter(String.class, type.<T>param(0))), type.<List<String>>typeConverter()); } @Override public List<String> get(AttributeValue value) { return value.getNS(); } @Override public void set(AttributeValue value, List<String> o) { value.setNS(o); } } /** * {@code BS} conversion */ private class BinaryScalarSet extends AbstractRule<List<ByteBuffer>,Collection<T>> { private BinaryScalarSet(boolean supported) { super(DynamoDBAttributeType.BS, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && (type.attributeType() != null || type.is(B, SET)); } @Override public DynamoDBTypeConverter<AttributeValue,Collection<T>> newConverter(ConvertibleType<Collection<T>> type) { return joinAll(SET.join(getConverter(ByteBuffer.class, type.<T>param(0))), type.<List<ByteBuffer>>typeConverter()); } @Override public List<ByteBuffer> get(AttributeValue value) { return value.getBS(); } @Override public void set(AttributeValue value, List<ByteBuffer> o) { value.setBS(o); } } /** * {@code SS} conversion */ private class ObjectStringSet extends StringScalarSet { private ObjectStringSet(boolean supported) { super(supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return type.attributeType() == null && super.supported && type.is(SET); } @Override public DynamoDBTypeConverter<AttributeValue,Collection<T>> newConverter(ConvertibleType<Collection<T>> type) { LOG.warn("Marshaling a set of non-String objects to a DynamoDB " + "StringSet. You won't be able to read these objects back " + "out of DynamoDB unless you REALLY know what you're doing: " + "it's probably a bug. If you DO know what you're doing feel" + "free to ignore this warning, but consider using a custom " + "marshaler for this instead."); return joinAll(SET.join(scalars.getConverter(String.class, DEFAULT.<T>type())), type.<List<String>>typeConverter()); } } /** * Native boolean conversion. */ private class NativeBool extends AbstractRule<Boolean,T> { private NativeBool(boolean supported) { super(DynamoDBAttributeType.BOOL, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && type.is(BOOLEAN); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type) { return joinAll(getConverter(Boolean.class, type), type.<Boolean>typeConverter()); } @Override public Boolean get(AttributeValue o) { return o.getBOOL(); } @Override public void set(AttributeValue o, Boolean value) { o.setBOOL(value); } @Override public Boolean unconvert(AttributeValue o) { if (o.getBOOL() == null && o.getN() != null) { return BOOLEAN.<Boolean>convert(o.getN()); } return super.unconvert(o); } } /** * Native boolean conversion. */ private class V2CompatibleBool extends AbstractRule<String, T> { private V2CompatibleBool(boolean supported) { super(DynamoDBAttributeType.N, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && type.is(BOOLEAN); } @Override public DynamoDBTypeConverter<AttributeValue, T> newConverter(ConvertibleType<T> type) { return joinAll(getConverter(String.class, type), type.<String>typeConverter()); } /** * For V2 Compatible schema we support loading booleans from a numeric attribute value (0/1) or the native boolean * type. */ @Override public String get(AttributeValue o) { if(o.getBOOL() != null) { // Handle native bools, transform to expected numeric representation. return o.getBOOL() ? "1" : "0"; } return o.getN(); } /** * For the V2 compatible schema we save as a numeric attribute value unless overridden by {@link * DynamoDBNativeBoolean} or {@link DynamoDBTyped}. */ @Override public void set(AttributeValue o, String value) { o.setN(value); } } /** * Any {@link Set} conversions. */ private class ObjectSet extends AbstractRule<List<AttributeValue>,Collection<T>> { private ObjectSet(boolean supported) { super(DynamoDBAttributeType.L, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && type.param(0) != null && type.is(SET); } @Override public DynamoDBTypeConverter<AttributeValue,Collection<T>> newConverter(ConvertibleType<Collection<T>> type) { return joinAll(SET.join(getConverter(type.<T>param(0))), type.<List<AttributeValue>>typeConverter()); } @Override public List<AttributeValue> get(AttributeValue value) { return value.getL(); } @Override public void set(AttributeValue value, List<AttributeValue> o) { value.setL(o); } } /** * Native bool {@link Set} conversions. */ private class NativeBoolSet extends ObjectSet { private NativeBoolSet(boolean supported) { super(supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && type.param(0).is(BOOLEAN); } @Override public List<AttributeValue> unconvert(AttributeValue o) { if (o.getL() == null && o.getNS() != null) { return LIST.convert(o.getNS(), new NativeBool(true).join(scalars.getConverter(Boolean.class, String.class))); } return super.unconvert(o); } } /** * Any {@link List} conversions. */ private class ObjectList extends AbstractRule<List<AttributeValue>,List<T>> { private ObjectList(boolean supported) { super(DynamoDBAttributeType.L, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && type.param(0) != null && type.is(LIST); } @Override public DynamoDBTypeConverter<AttributeValue,List<T>> newConverter(ConvertibleType<List<T>> type) { return joinAll(LIST.join(getConverter(type.<T>param(0))), type.<List<AttributeValue>>typeConverter()); } @Override public List<AttributeValue> get(AttributeValue value) { return value.getL(); } @Override public void set(AttributeValue value, List<AttributeValue> o) { value.setL(o); } } /** * Any {@link Map} conversions. */ private class ObjectMap extends AbstractRule<Map<String,AttributeValue>,Map<String,T>> { private ObjectMap(boolean supported) { super(DynamoDBAttributeType.M, supported); } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return super.isAssignableFrom(type) && type.param(1) != null && type.is(MAP) && type.param(0).is(STRING); } @Override public DynamoDBTypeConverter<AttributeValue,Map<String,T>> newConverter(ConvertibleType<Map<String,T>> type) { return joinAll( MAP.<String,AttributeValue,T>join(getConverter(type.<T>param(1))), type.<Map<String,AttributeValue>>typeConverter() ); } @Override public Map<String,AttributeValue> get(AttributeValue value) { return value.getM(); } @Override public void set(AttributeValue value, Map<String,AttributeValue> o) { value.setM(o); } } /** * All object conversions. */ private class ObjectDocumentMap extends AbstractRule<Map<String,AttributeValue>,T> { private final DynamoDBMapperModelFactory models; private final DynamoDBMapperConfig config; private ObjectDocumentMap(boolean supported, DynamoDBMapperModelFactory models, DynamoDBMapperConfig config) { super(DynamoDBAttributeType.M, supported); this.models = models; this.config = config; } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return type.attributeType() == getAttributeType() && super.supported && !type.is(MAP); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(final ConvertibleType<T> type) { return joinAll(new DynamoDBTypeConverter<Map<String,AttributeValue>,T>() { public final Map<String,AttributeValue> convert(final T o) { return models.getTableFactory(config).getTable(type.targetType()).convert(o); } public final T unconvert(final Map<String,AttributeValue> o) { return models.getTableFactory(config).getTable(type.targetType()).unconvert(o); } }, type.<Map<String,AttributeValue>>typeConverter()); } @Override public Map<String,AttributeValue> get(AttributeValue value) { return value.getM(); } @Override public void set(AttributeValue value, Map<String,AttributeValue> o) { value.setM(o); } } /** * Default conversion when no match could be determined. */ private class NotSupported extends AbstractRule<T,T> { private NotSupported() { super(DynamoDBAttributeType.NULL, false); } @Override public DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type) { return this; } @Override public T get(AttributeValue value) { throw new DynamoDBMappingException("not supported; requires @DynamoDBTyped or @DynamoDBTypeConverted"); } @Override public void set(AttributeValue value, T o) { throw new DynamoDBMappingException("not supported; requires @DynamoDBTyped or @DynamoDBTypeConverted"); } } /** * Gets the scalar converter for the given source and target types. */ private <S> DynamoDBTypeConverter<S,T> getConverter(Class<S> sourceType, ConvertibleType<T> type) { return scalars.getConverter(sourceType, type.targetType()); } /** * Gets the nested converter for the given conversion type. * Also wraps the resulting converter with a nullable converter. */ private DynamoDBTypeConverter<AttributeValue,T> getConverter(ConvertibleType<T> type) { return new DelegateConverter<AttributeValue,T>(getRule(type).newConverter(type)) { public final AttributeValue convert(T o) { return o == null ? new AttributeValue().withNULL(true) : super.convert(o); } }; } } /** * Basic attribute value conversion functions. */ private static abstract class AbstractRule<S,T> extends AbstractConverter<AttributeValue,S> implements Reflect<AttributeValue,S>, Rule<T> { protected final DynamoDBAttributeType attributeType; protected final boolean supported; protected AbstractRule(DynamoDBAttributeType attributeType, boolean supported) { this.attributeType = attributeType; this.supported = supported; } @Override public boolean isAssignableFrom(ConvertibleType<?> type) { return type.attributeType() == null ? supported : type.attributeType() == attributeType; } @Override public DynamoDBAttributeType getAttributeType() { return this.attributeType; } @Override public AttributeValue convert(final S o) { final AttributeValue value = new AttributeValue(); set(value, o); return value; } @Override public S unconvert(final AttributeValue o) { final S value = get(o); if (value == null && o.isNULL() == null) { throw new DynamoDBMappingException("expected " + attributeType + " in value " + o); } return value; } } /** * Attribute value conversion. */ static interface Rule<T> { boolean isAssignableFrom(ConvertibleType<?> type); DynamoDBTypeConverter<AttributeValue,T> newConverter(ConvertibleType<T> type); DynamoDBAttributeType getAttributeType(); } /** * Attribute value conversion factory. */ static interface RuleFactory<T> { Rule<T> getRule(ConvertibleType<T> type); } }
/* * Copyright (c) 2017, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.sl.runtime; import java.math.BigInteger; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.interop.InteropLibrary; import com.oracle.truffle.api.interop.TruffleObject; import com.oracle.truffle.api.interop.UnsupportedMessageException; import com.oracle.truffle.api.library.ExportLibrary; import com.oracle.truffle.api.library.ExportMessage; @ExportLibrary(InteropLibrary.class) public final class SLBigNumber implements TruffleObject, Comparable<SLBigNumber> { private static final long LONG_MAX_SAFE_DOUBLE = 9007199254740991L; // 2 ** 53 - 1 private static final int INT_MAX_SAFE_FLOAT = 16777215; // 2 ** 24 - 1 private static boolean inSafeDoubleRange(long l) { return l >= -LONG_MAX_SAFE_DOUBLE && l <= LONG_MAX_SAFE_DOUBLE; } private static boolean inSafeFloatRange(int i) { return i >= -INT_MAX_SAFE_FLOAT && i <= INT_MAX_SAFE_FLOAT; } private final BigInteger value; public SLBigNumber(BigInteger value) { this.value = value; } public SLBigNumber(long value) { this.value = BigInteger.valueOf(value); } public BigInteger getValue() { return value; } @TruffleBoundary public int compareTo(SLBigNumber o) { return value.compareTo(o.getValue()); } @Override @TruffleBoundary public String toString() { return value.toString(); } @Override @TruffleBoundary public boolean equals(Object obj) { if (obj instanceof SLBigNumber) { return value.equals(((SLBigNumber) obj).getValue()); } return false; } @Override public int hashCode() { return value.hashCode(); } @SuppressWarnings("static-method") @ExportMessage boolean isNumber() { return fitsInLong(); } @ExportMessage @TruffleBoundary boolean fitsInByte() { return value.bitLength() < 8; } @ExportMessage @TruffleBoundary boolean fitsInShort() { return value.bitLength() < 16; } @ExportMessage @TruffleBoundary boolean fitsInFloat() { return fitsInInt() && inSafeFloatRange(value.intValue()); } @ExportMessage @TruffleBoundary boolean fitsInLong() { return value.bitLength() < 64; } @ExportMessage @TruffleBoundary boolean fitsInInt() { return value.bitLength() < 32; } @ExportMessage @TruffleBoundary boolean fitsInDouble() { return fitsInLong() && inSafeDoubleRange(value.longValue()); } @ExportMessage @TruffleBoundary double asDouble() throws UnsupportedMessageException { if (fitsInDouble()) { return value.doubleValue(); } else { throw UnsupportedMessageException.create(); } } @ExportMessage @TruffleBoundary long asLong() throws UnsupportedMessageException { if (fitsInLong()) { return value.longValue(); } else { throw UnsupportedMessageException.create(); } } @ExportMessage @TruffleBoundary byte asByte() throws UnsupportedMessageException { if (fitsInByte()) { return value.byteValue(); } else { throw UnsupportedMessageException.create(); } } @ExportMessage @TruffleBoundary int asInt() throws UnsupportedMessageException { if (fitsInInt()) { return value.intValue(); } else { throw UnsupportedMessageException.create(); } } @ExportMessage @TruffleBoundary float asFloat() throws UnsupportedMessageException { if (fitsInFloat()) { return value.floatValue(); } else { throw UnsupportedMessageException.create(); } } @ExportMessage @TruffleBoundary short asShort() throws UnsupportedMessageException { if (fitsInShort()) { return value.shortValue(); } else { throw UnsupportedMessageException.create(); } } }
package net.java.sen.util; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Vector; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; public class DoubleArrayTrie { private final static int BUF_SIZE = 500000; private static Log log = LogFactory.getLog(DoubleArrayTrie.class); private int array[]; private int used[]; private int size; private int alloc_size; private char str[][]; private int str_size; private int len[]; private int val[]; private int next_check_pos; @SuppressWarnings("unused") private int no_delete; private class Node { int code; int depth; int left; int right; }; public DoubleArrayTrie() { array = null; used = null; size = 0; alloc_size = 0; no_delete = 0; } public void load(String fileName) throws IOException { log.info("loading double array trie dict = " + fileName); long start = System.currentTimeMillis(); File file = new File(fileName); array = new int[(int) (file.length() / 4)]; DataInputStream is = new DataInputStream(new BufferedInputStream( new FileInputStream(file), BUF_SIZE)); for (int i = 0; i < array.length; i++) { array[i] = is.readInt(); } log.info("loaded time = " + (((double) (System.currentTimeMillis() - start)) / 1000) + "[ms]"); } int[] _resize(int ptr[], int n, int l, int v) { int tmp[] = new int[l]; if (ptr != null) { l = ptr.length; } else { l = 0; } for (int i = 0; i < l; i++) tmp[i] = ptr[i]; for (int i = l; i < l; i++) tmp[i] = v; ptr = null; return tmp; } int resize(int new_size) { array = _resize(array, alloc_size << 1, new_size << 1, (int) 0); used = _resize(used, alloc_size, new_size, (int) 0); alloc_size = new_size; return new_size; } int fetch(Node parent, Vector siblings) { int prev = 0; if (log.isTraceEnabled()) { log.trace("parent.left=" + parent.left); log.trace("parent.right=" + parent.right); log.trace("parent.depth=" + parent.depth); } for (int i = parent.left; i < parent.right; i++) { if (((len != null) ? len[i] : str[i].length) < parent.depth) continue; char tmp[] = str[i]; int cur = 0; if (((len != null) ? len[i] : str[i].length) != parent.depth) { if (log.isTraceEnabled()) log.trace("tmp[" + parent.depth + "]=" + tmp[parent.depth]); cur = (int) tmp[parent.depth] + 1; } if (prev > cur) { log.error("given strings are not sorted.\n"); throw new RuntimeException( "Fatal: given strings are not sorted.\n"); } if (cur != prev || siblings.size() == 0) { Node tmp_node = new Node(); tmp_node.depth = parent.depth + 1; tmp_node.code = cur; tmp_node.left = i; if (siblings.size() != 0) ((Node) siblings.get(siblings.size() - 1)).right = i; siblings.add(tmp_node); } prev = cur; } if (siblings.size() != 0) ((Node) siblings.get(siblings.size() - 1)).right = parent.right; return siblings.size(); } int insert(Vector siblings) { int begin = 0; int pos = (((((Node) siblings.get(0)).code + 1) > ((int) next_check_pos)) ? (((Node) siblings .get(0)).code + 1) : ((int) next_check_pos)) - 1; int nonzero_num = 0; int first = 0; while (true) { pos++; { int t = (int) (pos); if (t > alloc_size) { resize((int) (t * 1.05)); } } ; if (array[(((int) pos) << 1) + 1] != 0) { nonzero_num++; continue; } else if (first == 0) { next_check_pos = pos; first = 1; } begin = pos - ((Node) siblings.get(0)).code; { int t = (int) (begin + ((Node) siblings .get(siblings.size() - 1)).code); if (t > alloc_size) { resize((int) (t * 1.05)); } } ; if (used[begin] != 0) continue; boolean flag = false; for (int i = 1; i < siblings.size(); i++) { if (array[(((int) begin + ((Node) siblings.get(i)).code) << 1) + 1] != 0) { flag = true; break; } } if (!flag) break; } if (1.0 * nonzero_num / (pos - next_check_pos + 1) >= 0.95) next_check_pos = pos; used[begin] = 1; size = (((size) > ((int) begin + ((Node) siblings.get(siblings.size() - 1)).code + 1)) ? (size) : ((int) begin + ((Node) siblings.get(siblings.size() - 1)).code + 1)); for (int i = 0; i < siblings.size(); i++) { array[(((int) begin + ((Node) siblings.get(i)).code) << 1) + 1] = begin; } for (int i = 0; i < siblings.size(); i++) { Vector new_siblings = new Vector(); if (fetch(((Node) siblings.get(i)), new_siblings) == 0) { array[((int) begin + (int) ((Node) siblings.get(i)).code) << 1] = (val != null) ? (int) (-val[((Node) siblings .get(i)).left] - 1) : (int) (-((Node) siblings.get(i)).left - 1); if ((val != null) && ((int) (-val[((Node) siblings.get(i)).left] - 1) >= 0)) { log.error("negative value is assgined."); throw new RuntimeException( "Fatal: negative value is assgined."); } } else { int ins = (int) insert(new_siblings); array[((int) begin + ((Node) siblings.get(i)).code) << 1] = ins; } } return begin; } void clear() { array = null; used = null; alloc_size = 0; size = 0; no_delete = 0; } int get_unit_size() { return 8; }; int get_size() { return size; }; int get_nonzero_size() { int result = 0; for (int i = 0; i < size; i++) if (array[(((int) i) << 1) + 1] != 0) result++; return result; } public int build(char _str[][], int _len[], int _val[]) { return build(_str, _len, _val, _str.length); } public int build(char _str[][], int _len[], int _val[], int size) { if (_str == null) return 0; if (_str.length != _val.length) { log.warn("index and text should be same size."); return 0; } str = _str; len = _len; str_size = size; val = _val; resize(1024 * 10); array[((int) 0) << 1] = 1; next_check_pos = 0; Node root_node = new Node(); root_node.left = 0; root_node.right = str_size; root_node.depth = 0; Vector siblings = new Vector(); log.trace("---fetch---"); fetch(root_node, siblings); log.trace("---insert---"); insert(siblings); used = null; return size; } public int search(char key[], int pos, int len) { if (len == 0) len = key.length; int b = array[((int) 0) << 1]; int p; for (int i = pos; i < len; i++) { p = b + (char) (key[i]) + 1; if ((int) b == array[(((int) p) << 1) + 1]) b = array[((int) p) << 1]; else return -1; } p = b; int n = array[((int) p) << 1]; if ((int) b == array[(((int) p) << 1) + 1] && n < 0) return (int) (-n - 1); return -1; } public int commonPrefixSearch(char key[], int result[], int pos, int len) { if (len == 0) len = key.length; int b = array[((int) 0) << 1]; int num = 0; int n; int p; for (int i = pos; i < len; i++) { p = b; n = array[((int) p) << 1]; if ((int) b == array[(((int) p) << 1) + 1] && n < 0) { if (log.isTraceEnabled()) log.trace("result[" + num + "]=" + (-n - 1)); if (num < result.length) { result[num] = -n - 1; } else { log.warn("result array size may not enough"); } num++; } p = b + (char) (key[i]) + 1; if ((p << 1) > array.length) { log.warn("p range is over."); log.warn("(p<<1,array.length)=(" + (p << 1) + "," + array.length + ")"); return num; } if ((int) b == array[(((int) p) << 1) + 1]) { b = array[((int) p) << 1]; } else { return num; } } p = b; n = array[((int) p) << 1]; if ((int) b == array[(((int) p) << 1) + 1] && n < 0) { if (log.isTraceEnabled()) log.trace("result[" + num + "]=" + (-n - 1)); if (num < result.length) { result[num] = -n - 1; } else { log.warn("result array size may not enough"); } num++; } return num; } public void save(String file) throws IOException { long start = System.currentTimeMillis(); DataOutputStream out = new DataOutputStream(new BufferedOutputStream( new FileOutputStream(file))); int dsize = alloc_size << 1; for (int i = 0; i < dsize; i++) { out.writeInt(array[i]); } out.close(); log.info("save time = " + (((double) (System.currentTimeMillis() - start)) / 1000) + "[s]"); } public static void dumpChar(char c[], String message) { System.err.println("message=" + message); for (int i = 0; i < c.length; i++) { System.err.print(c[i] + ","); } System.err.println(); } public static void main(String args[]) { } }
package com.oracle.pts.salesparty.wsclient.generated; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlElement; import javax.xml.bind.annotation.XmlRootElement; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element name="findCriteria" type="{http://xmlns.oracle.com/adf/svc/types/}FindCriteria"/> * &lt;element name="BindPartyUniqueName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindOwner" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindPartyNumber" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindAddress1" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindAddress2" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindCity" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindState" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindPostalCode" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindCountry" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindResourcePartyName" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="BindTerritory" type="{http://www.w3.org/2001/XMLSchema}string"/> * &lt;element name="findControl" type="{http://xmlns.oracle.com/adf/svc/types/}FindControl"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "", propOrder = { "findCriteria", "bindPartyUniqueName", "bindOwner", "bindPartyNumber", "bindAddress1", "bindAddress2", "bindCity", "bindState", "bindPostalCode", "bindCountry", "bindResourcePartyName", "bindTerritory", "findControl" }) @XmlRootElement(name = "findSalesAccountMyFavoriteSalesAccountsVCAsync") public class FindSalesAccountMyFavoriteSalesAccountsVCAsync { @XmlElement(required = true) protected FindCriteria findCriteria; @XmlElement(name = "BindPartyUniqueName", required = true) protected String bindPartyUniqueName; @XmlElement(name = "BindOwner", required = true) protected String bindOwner; @XmlElement(name = "BindPartyNumber", required = true) protected String bindPartyNumber; @XmlElement(name = "BindAddress1", required = true) protected String bindAddress1; @XmlElement(name = "BindAddress2", required = true) protected String bindAddress2; @XmlElement(name = "BindCity", required = true) protected String bindCity; @XmlElement(name = "BindState", required = true) protected String bindState; @XmlElement(name = "BindPostalCode", required = true) protected String bindPostalCode; @XmlElement(name = "BindCountry", required = true) protected String bindCountry; @XmlElement(name = "BindResourcePartyName", required = true) protected String bindResourcePartyName; @XmlElement(name = "BindTerritory", required = true) protected String bindTerritory; @XmlElement(required = true) protected FindControl findControl; /** * Gets the value of the findCriteria property. * * @return * possible object is * {@link FindCriteria } * */ public FindCriteria getFindCriteria() { return findCriteria; } /** * Sets the value of the findCriteria property. * * @param value * allowed object is * {@link FindCriteria } * */ public void setFindCriteria(FindCriteria value) { this.findCriteria = value; } /** * Gets the value of the bindPartyUniqueName property. * * @return * possible object is * {@link String } * */ public String getBindPartyUniqueName() { return bindPartyUniqueName; } /** * Sets the value of the bindPartyUniqueName property. * * @param value * allowed object is * {@link String } * */ public void setBindPartyUniqueName(String value) { this.bindPartyUniqueName = value; } /** * Gets the value of the bindOwner property. * * @return * possible object is * {@link String } * */ public String getBindOwner() { return bindOwner; } /** * Sets the value of the bindOwner property. * * @param value * allowed object is * {@link String } * */ public void setBindOwner(String value) { this.bindOwner = value; } /** * Gets the value of the bindPartyNumber property. * * @return * possible object is * {@link String } * */ public String getBindPartyNumber() { return bindPartyNumber; } /** * Sets the value of the bindPartyNumber property. * * @param value * allowed object is * {@link String } * */ public void setBindPartyNumber(String value) { this.bindPartyNumber = value; } /** * Gets the value of the bindAddress1 property. * * @return * possible object is * {@link String } * */ public String getBindAddress1() { return bindAddress1; } /** * Sets the value of the bindAddress1 property. * * @param value * allowed object is * {@link String } * */ public void setBindAddress1(String value) { this.bindAddress1 = value; } /** * Gets the value of the bindAddress2 property. * * @return * possible object is * {@link String } * */ public String getBindAddress2() { return bindAddress2; } /** * Sets the value of the bindAddress2 property. * * @param value * allowed object is * {@link String } * */ public void setBindAddress2(String value) { this.bindAddress2 = value; } /** * Gets the value of the bindCity property. * * @return * possible object is * {@link String } * */ public String getBindCity() { return bindCity; } /** * Sets the value of the bindCity property. * * @param value * allowed object is * {@link String } * */ public void setBindCity(String value) { this.bindCity = value; } /** * Gets the value of the bindState property. * * @return * possible object is * {@link String } * */ public String getBindState() { return bindState; } /** * Sets the value of the bindState property. * * @param value * allowed object is * {@link String } * */ public void setBindState(String value) { this.bindState = value; } /** * Gets the value of the bindPostalCode property. * * @return * possible object is * {@link String } * */ public String getBindPostalCode() { return bindPostalCode; } /** * Sets the value of the bindPostalCode property. * * @param value * allowed object is * {@link String } * */ public void setBindPostalCode(String value) { this.bindPostalCode = value; } /** * Gets the value of the bindCountry property. * * @return * possible object is * {@link String } * */ public String getBindCountry() { return bindCountry; } /** * Sets the value of the bindCountry property. * * @param value * allowed object is * {@link String } * */ public void setBindCountry(String value) { this.bindCountry = value; } /** * Gets the value of the bindResourcePartyName property. * * @return * possible object is * {@link String } * */ public String getBindResourcePartyName() { return bindResourcePartyName; } /** * Sets the value of the bindResourcePartyName property. * * @param value * allowed object is * {@link String } * */ public void setBindResourcePartyName(String value) { this.bindResourcePartyName = value; } /** * Gets the value of the bindTerritory property. * * @return * possible object is * {@link String } * */ public String getBindTerritory() { return bindTerritory; } /** * Sets the value of the bindTerritory property. * * @param value * allowed object is * {@link String } * */ public void setBindTerritory(String value) { this.bindTerritory = value; } /** * Gets the value of the findControl property. * * @return * possible object is * {@link FindControl } * */ public FindControl getFindControl() { return findControl; } /** * Sets the value of the findControl property. * * @param value * allowed object is * {@link FindControl } * */ public void setFindControl(FindControl value) { this.findControl = value; } }
package me.ykrank.s1next; import android.content.Context; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.ykrank.androidtools.widget.EditorDiskCache; import com.github.ykrank.androidtools.widget.RxBus; import java.io.File; import java.util.concurrent.TimeUnit; import dagger.Module; import dagger.Provides; import io.rx_cache2.internal.RxCache; import io.victoralbertos.jolyglot.JacksonSpeaker; import me.ykrank.s1next.data.User; import me.ykrank.s1next.data.api.Api; import me.ykrank.s1next.data.api.ApiCacheProvider; import me.ykrank.s1next.data.api.ApiVersionInterceptor; import me.ykrank.s1next.data.api.S1Service; import me.ykrank.s1next.data.api.UserValidator; import me.ykrank.s1next.data.api.app.AppApi; import me.ykrank.s1next.data.api.app.AppService; import me.ykrank.s1next.data.api.app.AppTokenInterceptor; import me.ykrank.s1next.data.pref.AppDataPreferencesManager; import me.ykrank.s1next.data.pref.DownloadPreferencesManager; import me.ykrank.s1next.data.pref.NetworkPreferencesManager; import me.ykrank.s1next.task.AutoSignTask; import me.ykrank.s1next.viewmodel.UserViewModel; import me.ykrank.s1next.widget.RawJsonConverterFactory; import me.ykrank.s1next.widget.download.ImageDownloadManager; import me.ykrank.s1next.widget.glide.AvatarUrlsCache; import me.ykrank.s1next.widget.glide.OkHttpNoAvatarInterceptor; import me.ykrank.s1next.widget.hostcheck.AppHostUrl; import me.ykrank.s1next.widget.hostcheck.AppMultiHostInterceptor; import me.ykrank.s1next.widget.hostcheck.NoticeCheckTask; import me.ykrank.s1next.widget.net.AppData; import me.ykrank.s1next.widget.net.AppDns; import me.ykrank.s1next.widget.net.Data; import me.ykrank.s1next.widget.net.Image; import okhttp3.CookieJar; import okhttp3.Dns; import okhttp3.OkHttpClient; import retrofit2.Retrofit; import retrofit2.adapter.rxjava2.RxJava2CallAdapterFactory; import retrofit2.converter.jackson.JacksonConverterFactory; import retrofit2.converter.scalars.ScalarsConverterFactory; /** * Provides instances of the objects when we need to inject. */ @Module(includes = BuildTypeModule.class) public final class AppModule { AppModule() { } @Provides @AppLife AppHostUrl provideBaseHostUrl(NetworkPreferencesManager networkPreferencesManager) { return new AppHostUrl(networkPreferencesManager); } @Provides @AppLife Dns provideHttpDns(Context context, AppHostUrl baseHostUrl) { return new AppDns(context, baseHostUrl); } @Data @Provides @AppLife OkHttpClient.Builder providerDataOkHttpClientBuilder(CookieJar cookieJar, AppHostUrl baseHostUrl, Dns dns) { OkHttpClient.Builder builder = new OkHttpClient.Builder(); builder.dns(dns); builder.connectTimeout(10, TimeUnit.SECONDS); builder.writeTimeout(20, TimeUnit.SECONDS); builder.readTimeout(10, TimeUnit.SECONDS); builder.retryOnConnectionFailure(true); builder.cookieJar(cookieJar); builder.addInterceptor(new ApiVersionInterceptor()); builder.addInterceptor(new AppMultiHostInterceptor(baseHostUrl)); return builder; } @AppData @Provides @AppLife OkHttpClient.Builder providerAppDataOkHttpClientBuilder(CookieJar cookieJar, User user) { OkHttpClient.Builder builder = new OkHttpClient.Builder(); builder.connectTimeout(10, TimeUnit.SECONDS); builder.writeTimeout(20, TimeUnit.SECONDS); builder.readTimeout(10, TimeUnit.SECONDS); builder.retryOnConnectionFailure(true); builder.cookieJar(cookieJar); builder.addNetworkInterceptor(new AppTokenInterceptor(user)); return builder; } @Image @Provides @AppLife OkHttpClient.Builder providerImageOkHttpClientBuilder(CookieJar cookieJar, AppHostUrl baseHostUrl, Dns dns) { OkHttpClient.Builder builder = new OkHttpClient.Builder(); builder.dns(dns); builder.connectTimeout(17, TimeUnit.SECONDS); builder.writeTimeout(17, TimeUnit.SECONDS); builder.readTimeout(77, TimeUnit.SECONDS); builder.retryOnConnectionFailure(true); builder.cookieJar(cookieJar); builder.addNetworkInterceptor(new OkHttpNoAvatarInterceptor()); builder.addInterceptor(new AppMultiHostInterceptor(baseHostUrl)); return builder; } @Provides @AppLife S1Service providerRetrofit(@Data OkHttpClient okHttpClient, ObjectMapper mapper) { return new Retrofit.Builder() .client(okHttpClient) .baseUrl(Api.BASE_API_URL) .addConverterFactory(RawJsonConverterFactory.Companion.create()) .addConverterFactory(ScalarsConverterFactory.create()) .addConverterFactory(JacksonConverterFactory.create(mapper)) .addCallAdapterFactory(RxJava2CallAdapterFactory.create()) .build() .create(S1Service.class); } @Provides @AppLife AppService providerAppRetrofit(@AppData OkHttpClient okHttpClient) { return new Retrofit.Builder() .client(okHttpClient) .baseUrl(AppApi.BASE_URL) .addConverterFactory(ScalarsConverterFactory.create()) .addConverterFactory(JacksonConverterFactory.create()) .addCallAdapterFactory(RxJava2CallAdapterFactory.create()) .build() .create(AppService.class); } @Provides @AppLife ApiCacheProvider providerApiCacheProvider(Context context, DownloadPreferencesManager downloadPreferencesManager) { String cachePath = context.getCacheDir().getAbsolutePath() + "/rx_cache"; File cacheDir = new File(cachePath); if (!cacheDir.exists()) { cacheDir.mkdirs(); } return new RxCache.Builder() .useExpiredDataIfLoaderNotAvailable(true) .setMaxMBPersistenceCache(downloadPreferencesManager.getTotalDataCacheSize()) .persistence(new File(cachePath), new JacksonSpeaker()) .using(ApiCacheProvider.class); } @Provides @AppLife User providerUser(UserViewModel userViewModel) { return userViewModel.getUser(); } @Provides @AppLife AutoSignTask provideAutoSignTask(S1Service s1Service, User user) { return new AutoSignTask(s1Service, user); } @Provides @AppLife UserValidator providerUserValidator(User user, AutoSignTask autoSignTask) { return new UserValidator(user, autoSignTask); } @Provides @AppLife UserViewModel providerUserViewModel(AppDataPreferencesManager appDataPreferencesManager) { return new UserViewModel(appDataPreferencesManager); } @Provides @AppLife NoticeCheckTask provideNoticeCheckTask(RxBus rxBus, S1Service s1Service, User user) { return new NoticeCheckTask(rxBus, s1Service, user); } @Provides @AppLife EditorDiskCache provideEditorDiskCache(Context context) { return new EditorDiskCache(context.getCacheDir().getPath() + File.separator + "editor_disk_cache"); } @Provides @AppLife AvatarUrlsCache provideAvatarUrlsCache() { return new AvatarUrlsCache(); } @Provides @AppLife ImageDownloadManager provideImageDownloadManager(@Image OkHttpClient.Builder okHttpClientBuilder) { return new ImageDownloadManager(okHttpClientBuilder); } }
package com.robotium.solo; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.Iterator; import java.util.Stack; import java.util.Timer; import com.robotium.solo.Solo.Config; import junit.framework.Assert; import android.app.Activity; import android.app.Instrumentation; import android.app.Instrumentation.ActivityMonitor; import android.content.IntentFilter; import android.os.SystemClock; import android.util.Log; import android.view.KeyEvent; /** * Contains activity related methods. Examples are: * getCurrentActivity(), getActivityMonitor(), setActivityOrientation(int orientation). * * @author Renas Reda, renas.reda@robotium.com * */ class ActivityUtils { private final Config config; private final Instrumentation inst; private ActivityMonitor activityMonitor; private Activity activity; private final Sleeper sleeper; private final String LOG_TAG = "Robotium"; private final int MINISLEEP = 100; private Stack<WeakReference<Activity>> activityStack; private WeakReference<Activity> weakActivityReference; private Stack<String> activitiesStoredInActivityStack; private Timer activitySyncTimer; private volatile boolean registerActivities; Thread activityThread; /** * Constructs this object. * * @param config the {@code Config} instance * @param inst the {@code Instrumentation} instance. * @param activity the start {@code Activity} * @param sleeper the {@code Sleeper} instance */ public ActivityUtils(Config config, Instrumentation inst, Activity activity, Sleeper sleeper) { this.config = config; this.inst = inst; this.activity = activity; this.sleeper = sleeper; createStackAndPushStartActivity(); activitySyncTimer = new Timer(); activitiesStoredInActivityStack = new Stack<String>(); setupActivityMonitor(); setupActivityStackListener(); } /** * Creates a new activity stack and pushes the start activity. */ private void createStackAndPushStartActivity(){ activityStack = new Stack<WeakReference<Activity>>(); if (activity != null && config.trackActivities){ WeakReference<Activity> weakReference = new WeakReference<Activity>(activity); activity = null; activityStack.push(weakReference); } } /** * Returns a {@code List} of all the opened/active activities. * * @return a {@code List} of all the opened/active activities */ public ArrayList<Activity> getAllOpenedActivities() { ArrayList<Activity> activities = new ArrayList<Activity>(); Iterator<WeakReference<Activity>> activityStackIterator = activityStack.iterator(); while(activityStackIterator.hasNext()){ Activity activity = activityStackIterator.next().get(); if(activity!=null) activities.add(activity); } return activities; } /** * This is were the activityMonitor is set up. The monitor will keep check * for the currently active activity. */ private void setupActivityMonitor() { if(config.trackActivities){ try { IntentFilter filter = null; activityMonitor = inst.addMonitor(filter, null, false); } catch (Exception e) { e.printStackTrace(); } } } /** * Returns true if registration of Activites should be performed * * @return true if registration of Activities should be performed */ public boolean shouldRegisterActivities() { return registerActivities; } /** * Set true if registration of Activities should be performed * @param registerActivities true if registration of Activities should be performed * */ public void setRegisterActivities(boolean registerActivities) { this.registerActivities = registerActivities; } /** * This is were the activityStack listener is set up. The listener will keep track of the * opened activities and their positions. */ private void setupActivityStackListener() { if(activityMonitor == null){ return; } setRegisterActivities(true); activityThread = new RegisterActivitiesThread(this); activityThread.start(); } void monitorActivities() { if(activityMonitor != null){ Activity activity = activityMonitor.waitForActivityWithTimeout(2000L); if(activity != null){ if (activitiesStoredInActivityStack.remove(activity.toString())){ removeActivityFromStack(activity); } if(!activity.isFinishing()){ addActivityToStack(activity); } } } } /** * Removes a given activity from the activity stack * * @param activity the activity to remove */ private void removeActivityFromStack(Activity activity){ Iterator<WeakReference<Activity>> activityStackIterator = activityStack.iterator(); while(activityStackIterator.hasNext()){ Activity activityFromWeakReference = activityStackIterator.next().get(); if(activityFromWeakReference == null){ activityStackIterator.remove(); } if(activity != null && activityFromWeakReference != null && activityFromWeakReference.equals(activity)){ activityStackIterator.remove(); } } } /** * Returns the ActivityMonitor used by Robotium. * * @return the ActivityMonitor used by Robotium */ public ActivityMonitor getActivityMonitor(){ return activityMonitor; } /** * Sets the Orientation (Landscape/Portrait) for the current activity. * * @param orientation An orientation constant such as {@link android.content.pm.ActivityInfo#SCREEN_ORIENTATION_LANDSCAPE} or {@link android.content.pm.ActivityInfo#SCREEN_ORIENTATION_PORTRAIT} */ public void setActivityOrientation(int orientation) { Activity activity = getCurrentActivity(); if(activity != null){ activity.setRequestedOrientation(orientation); } } /** * Returns the current {@code Activity}, after sleeping a default pause length. * * @param shouldSleepFirst whether to sleep a default pause first * @return the current {@code Activity} */ public Activity getCurrentActivity(boolean shouldSleepFirst) { return getCurrentActivity(shouldSleepFirst, true); } /** * Returns the current {@code Activity}, after sleeping a default pause length. * * @return the current {@code Activity} */ public Activity getCurrentActivity() { return getCurrentActivity(true, true); } /** * Adds an activity to the stack * * @param activity the activity to add */ private void addActivityToStack(Activity activity){ activitiesStoredInActivityStack.push(activity.toString()); weakActivityReference = new WeakReference<Activity>(activity); activity = null; activityStack.push(weakActivityReference); } /** * Waits for an activity to be started if one is not provided * by the constructor. */ private final void waitForActivityIfNotAvailable(){ if(activityStack.isEmpty() || activityStack.peek().get() == null){ if (activityMonitor != null) { Activity activity = activityMonitor.getLastActivity(); while (activity == null){ sleeper.sleepMini(); activity = activityMonitor.getLastActivity(); } addActivityToStack(activity); } else if(config.trackActivities){ sleeper.sleepMini(); setupActivityMonitor(); waitForActivityIfNotAvailable(); } } } /** * Returns the name of the most recent Activity * * @return the name of the current {@code Activity} */ public String getCurrentActivityName(){ if(!activitiesStoredInActivityStack.isEmpty()){ return activitiesStoredInActivityStack.peek(); } return ""; } /** * Returns the current {@code Activity}. * * @param shouldSleepFirst whether to sleep a default pause first * @param waitForActivity whether to wait for the activity * @return the current {@code Activity} */ public Activity getCurrentActivity(boolean shouldSleepFirst, boolean waitForActivity) { if(shouldSleepFirst){ sleeper.sleep(); } if(!config.trackActivities){ return activity; } if(waitForActivity){ waitForActivityIfNotAvailable(); } if(!activityStack.isEmpty()){ activity=activityStack.peek().get(); } return activity; } /** * Check if activity stack is empty. * * @return true if activity stack is empty */ public boolean isActivityStackEmpty() { return activityStack.isEmpty(); } /** * Returns to the given {@link Activity}. * * @param name the name of the {@code Activity} to return to, e.g. {@code "MyActivity"} */ public void goBackToActivity(String name) { ArrayList<Activity> activitiesOpened = getAllOpenedActivities(); boolean found = false; for(int i = 0; i < activitiesOpened.size(); i++){ if(activitiesOpened.get(i).getClass().getSimpleName().equals(name)){ found = true; break; } } if(found){ while(!getCurrentActivity().getClass().getSimpleName().equals(name)) { try{ inst.sendKeyDownUpSync(KeyEvent.KEYCODE_BACK); }catch(SecurityException ignored){} } } else{ for (int i = 0; i < activitiesOpened.size(); i++){ Log.d(LOG_TAG, "Activity priorly opened: "+ activitiesOpened.get(i).getClass().getSimpleName()); } Assert.fail("No Activity named: '" + name + "' has been priorly opened"); } } /** * Returns a localized string. * * @param resId the resource ID for the string * @return the localized string */ public String getString(int resId) { Activity activity = getCurrentActivity(false); if(activity == null){ return ""; } return activity.getString(resId); } /** * Finalizes the solo object. */ @Override public void finalize() throws Throwable { activitySyncTimer.cancel(); stopActivityMonitor(); super.finalize(); } /** * Removes the ActivityMonitor */ private void stopActivityMonitor(){ try { // Remove the monitor added during startup if (activityMonitor != null) { inst.removeMonitor(activityMonitor); activityMonitor = null; } } catch (Exception ignored) {} } /** * All activites that have been opened are finished. */ public void finishOpenedActivities(){ // Stops the activityStack listener activitySyncTimer.cancel(); if(!config.trackActivities){ useGoBack(3); return; } ArrayList<Activity> activitiesOpened = getAllOpenedActivities(); // Finish all opened activities for (int i = activitiesOpened.size()-1; i >= 0; i--) { sleeper.sleep(MINISLEEP); finishActivity(activitiesOpened.get(i)); } activitiesOpened = null; sleeper.sleep(MINISLEEP); // Finish the initial activity, pressing Back for good measure finishActivity(getCurrentActivity(true, false)); stopActivityMonitor(); setRegisterActivities(false); this.activity = null; sleeper.sleepMini(); useGoBack(1); clearActivityStack(); } /** * Sends the back button command a given number of times * * @param numberOfTimes the number of times to press "back" */ private void useGoBack(int numberOfTimes){ for(int i = 0; i < numberOfTimes; i++){ try { inst.sendKeyDownUpSync(KeyEvent.KEYCODE_BACK); sleeper.sleep(MINISLEEP); inst.sendKeyDownUpSync(KeyEvent.KEYCODE_BACK); } catch (Throwable ignored) { // Guard against lack of INJECT_EVENT permission } } } /** * Clears the activity stack. */ private void clearActivityStack(){ activityStack.clear(); activitiesStoredInActivityStack.clear(); } /** * Finishes an activity. * * @param activity the activity to finish */ private void finishActivity(Activity activity){ if(activity != null) { try{ activity.finish(); }catch(Throwable e){ e.printStackTrace(); } } } private static final class RegisterActivitiesThread extends Thread { public static final long REGISTER_ACTIVITY_THREAD_SLEEP_MS = 16L; private final WeakReference<ActivityUtils> activityUtilsWR; RegisterActivitiesThread(ActivityUtils activityUtils) { super("activityMonitorThread"); activityUtilsWR = new WeakReference<ActivityUtils>(activityUtils); setPriority(Thread.MIN_PRIORITY); } @Override public void run() { while (shouldMonitor()) { monitorActivities(); SystemClock.sleep(REGISTER_ACTIVITY_THREAD_SLEEP_MS); } } private boolean shouldMonitor() { ActivityUtils activityUtils = activityUtilsWR.get(); return activityUtils != null && activityUtils.shouldRegisterActivities(); } private void monitorActivities() { ActivityUtils activityUtils = activityUtilsWR.get(); if (activityUtils != null) { activityUtils.monitorActivities(); } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.webmonitor.handlers; import org.apache.flink.api.common.time.Time; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.dispatcher.DispatcherGateway; import org.apache.flink.runtime.messages.Acknowledge; import org.apache.flink.runtime.rest.handler.HandlerRequest; import org.apache.flink.runtime.rest.messages.EmptyMessageParameters; import org.apache.flink.runtime.rest.messages.EmptyRequestBody; import org.apache.flink.runtime.rest.messages.JobPlanInfo; import org.apache.flink.runtime.testingUtils.TestingUtils; import org.apache.flink.runtime.util.BlobServerResource; import org.apache.flink.runtime.webmonitor.RestfulGateway; import org.apache.flink.runtime.webmonitor.TestingDispatcherGateway; import org.apache.flink.runtime.webmonitor.retriever.GatewayRetriever; import org.apache.flink.util.OperatingSystem; import org.apache.flink.util.TestLogger; import org.junit.Assert; import org.junit.Assume; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Collections; import java.util.Map; import java.util.concurrent.CompletableFuture; import java.util.concurrent.Executor; import static org.hamcrest.Matchers.containsString; /** * Tests the entire lifecycle of a jar submission. */ public class JarSubmissionITCase extends TestLogger { @Rule public final TemporaryFolder temporaryFolder = new TemporaryFolder(); @Rule public final BlobServerResource blobServerResource = new BlobServerResource(); @BeforeClass public static void checkOS() { Assume.assumeFalse("This test fails on Windows due to unclosed JarFiles, see FLINK-9844.", OperatingSystem.isWindows()); } @Test public void testJarSubmission() throws Exception { final TestingDispatcherGateway restfulGateway = new TestingDispatcherGateway.Builder() .setBlobServerPort(blobServerResource.getBlobServerPort()) .setSubmitFunction(jobGraph -> CompletableFuture.completedFuture(Acknowledge.get())) .build(); final JarHandlers handlers = new JarHandlers(temporaryFolder.newFolder().toPath(), restfulGateway); final JarUploadHandler uploadHandler = handlers.uploadHandler; final JarListHandler listHandler = handlers.listHandler; final JarPlanHandler planHandler = handlers.planHandler; final JarRunHandler runHandler = handlers.runHandler; final JarDeleteHandler deleteHandler = handlers.deleteHandler; // targetDir property is set via surefire configuration final Path originalJar = Paths.get(System.getProperty("targetDir")).resolve("test-program.jar"); final Path jar = Files.copy(originalJar, temporaryFolder.getRoot().toPath().resolve("test-program.jar")); final String storedJarPath = uploadJar(uploadHandler, jar, restfulGateway); final String storedJarName = Paths.get(storedJarPath).getFileName().toString(); final JarListInfo postUploadListResponse = listJars(listHandler, restfulGateway); Assert.assertEquals(1, postUploadListResponse.jarFileList.size()); final JarListInfo.JarFileInfo listEntry = postUploadListResponse.jarFileList.iterator().next(); Assert.assertEquals(jar.getFileName().toString(), listEntry.name); Assert.assertEquals(storedJarName, listEntry.id); final JobPlanInfo planResponse = showPlan(planHandler, storedJarName, restfulGateway); // we're only interested in the core functionality so checking for a small detail is sufficient Assert.assertThat(planResponse.getJsonPlan(), containsString("TestProgram.java:29")); runJar(runHandler, storedJarName, restfulGateway); deleteJar(deleteHandler, storedJarName, restfulGateway); final JarListInfo postDeleteListResponse = listJars(listHandler, restfulGateway); Assert.assertEquals(0, postDeleteListResponse.jarFileList.size()); } private static String uploadJar(JarUploadHandler handler, Path jar, RestfulGateway restfulGateway) throws Exception { HandlerRequest<EmptyRequestBody, EmptyMessageParameters> uploadRequest = new HandlerRequest<>( EmptyRequestBody.getInstance(), EmptyMessageParameters.getInstance(), Collections.emptyMap(), Collections.emptyMap(), Collections.singletonList(jar.toFile())); final JarUploadResponseBody uploadResponse = handler.handleRequest(uploadRequest, restfulGateway) .get(); return uploadResponse.getFilename(); } private static JarListInfo listJars(JarListHandler handler, RestfulGateway restfulGateway) throws Exception { HandlerRequest<EmptyRequestBody, EmptyMessageParameters> listRequest = new HandlerRequest<>( EmptyRequestBody.getInstance(), EmptyMessageParameters.getInstance()); return handler.handleRequest(listRequest, restfulGateway) .get(); } private static JobPlanInfo showPlan(JarPlanHandler handler, String jarName, RestfulGateway restfulGateway) throws Exception { JarPlanMessageParameters planParameters = JarPlanGetHeaders.getInstance().getUnresolvedMessageParameters(); HandlerRequest<JarPlanRequestBody, JarPlanMessageParameters> planRequest = new HandlerRequest<>( new JarPlanRequestBody(), planParameters, Collections.singletonMap(planParameters.jarIdPathParameter.getKey(), jarName), Collections.emptyMap(), Collections.emptyList()); return handler.handleRequest(planRequest, restfulGateway) .get(); } private static JarRunResponseBody runJar(JarRunHandler handler, String jarName, DispatcherGateway restfulGateway) throws Exception { final JarRunMessageParameters runParameters = JarRunHeaders.getInstance().getUnresolvedMessageParameters(); HandlerRequest<JarRunRequestBody, JarRunMessageParameters> runRequest = new HandlerRequest<>( new JarRunRequestBody(), runParameters, Collections.singletonMap(runParameters.jarIdPathParameter.getKey(), jarName), Collections.emptyMap(), Collections.emptyList()); return handler.handleRequest(runRequest, restfulGateway) .get(); } private static void deleteJar(JarDeleteHandler handler, String jarName, RestfulGateway restfulGateway) throws Exception { JarDeleteMessageParameters deleteParameters = JarDeleteHeaders.getInstance().getUnresolvedMessageParameters(); HandlerRequest<EmptyRequestBody, JarDeleteMessageParameters> deleteRequest = new HandlerRequest<>( EmptyRequestBody.getInstance(), deleteParameters, Collections.singletonMap(deleteParameters.jarIdPathParameter.getKey(), jarName), Collections.emptyMap(), Collections.emptyList()); handler.handleRequest(deleteRequest, restfulGateway) .get(); } private static class JarHandlers { final JarUploadHandler uploadHandler; final JarListHandler listHandler; final JarPlanHandler planHandler; final JarRunHandler runHandler; final JarDeleteHandler deleteHandler; JarHandlers(final Path jarDir, final TestingDispatcherGateway restfulGateway) { final GatewayRetriever<TestingDispatcherGateway> gatewayRetriever = () -> CompletableFuture.completedFuture(restfulGateway); final Time timeout = Time.seconds(10); final Map<String, String> responseHeaders = Collections.emptyMap(); final Executor executor = TestingUtils.defaultExecutor(); uploadHandler = new JarUploadHandler( gatewayRetriever, timeout, responseHeaders, JarUploadHeaders.getInstance(), jarDir, executor); listHandler = new JarListHandler( gatewayRetriever, timeout, responseHeaders, JarListHeaders.getInstance(), CompletableFuture.completedFuture("shazam://localhost:12345"), jarDir.toFile(), executor); planHandler = new JarPlanHandler( gatewayRetriever, timeout, responseHeaders, JarPlanGetHeaders.getInstance(), jarDir, new Configuration(), executor); runHandler = new JarRunHandler( gatewayRetriever, timeout, responseHeaders, JarRunHeaders.getInstance(), jarDir, new Configuration(), executor); deleteHandler = new JarDeleteHandler( gatewayRetriever, timeout, responseHeaders, JarDeleteHeaders.getInstance(), jarDir, executor); } } }
/* * Copyright 2015 Hemant Padmanabhan * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.lizalab.util.jasypt.h4.ext.connectionprovider; import static org.junit.Assert.assertEquals; import java.util.HashMap; import java.util.Map; import org.hibernate.cfg.AvailableSettings; import org.hibernate.service.classloading.spi.ClassLoaderService; import org.hibernate.service.spi.ServiceRegistryImplementor; import org.jasypt.encryption.pbe.PBEStringEncryptor; import org.jasypt.exceptions.EncryptionInitializationException; import org.jasypt.hibernate4.connectionprovider.ParameterNaming; import org.jasypt.hibernate4.encryptor.HibernatePBEEncryptorRegistry; import org.jmock.Expectations; import org.jmock.Sequence; import org.jmock.auto.Mock; import org.jmock.integration.junit4.JUnitRuleMockery; import org.junit.Before; import org.junit.Rule; import org.junit.Test; /** * Unit tests for {@link EncryptedC3P0ConnectionProvider}. * * @author Hemant Padmanabhan * @since 1.0.0 */ public class EncryptedC3P0ConnectionProviderTest { @Rule public JUnitRuleMockery context = new JUnitRuleMockery(); @Mock private PBEStringEncryptor mockEncryptor; @Mock private ClassLoaderService mockCLService; @Mock private ServiceRegistryImplementor mockSRImpl; private String testEncryptedCfgDriver = "ENC(0D347740A70CA9E28117EA25D4692D538C053B57E5125331CB9F7238C59BD967)"; private String testEncryptedDriverText = "0D347740A70CA9E28117EA25D4692D538C053B57E5125331CB9F7238C59BD967"; private String testDriver = "org.hsqldb.jdbcDriver"; private String testEncryptedCfgUrl = "ENC(FC3970FF1E22528BD9B3FEF4F103C1A388E9B212D158766F5A989C79B1FF364F9116342E9A41413D4A605EB819C41E35AE8A6FCF7AB7FFEFEDF671C758ECBDCD)"; private String testEncryptedUrlText = "FC3970FF1E22528BD9B3FEF4F103C1A388E9B212D158766F5A989C79B1FF364F9116342E9A41413D4A605EB819C41E35AE8A6FCF7AB7FFEFEDF671C758ECBDCD"; private String testUrl = "jdbc:hsqldb:file:target/data/testdb;shutdown=true"; private String testEncryptedCfgUser = "ENC(4BB1376E8D44E1DFE39F253F5390E7A0D68C1D7FA36E876A)"; private String testEncryptedUserText = "4BB1376E8D44E1DFE39F253F5390E7A0D68C1D7FA36E876A"; private String testUser = "TESTUSER"; private String testEncryptedCfgPass = "ENC(05E8878D8EACD8B63F1E4AEC09649BAE7E9ED3093A8A4998)"; private String testEncryptedPassText = "05E8878D8EACD8B63F1E4AEC09649BAE7E9ED3093A8A4998"; private String testPassword = "Test$Passwo73"; private String testEncryptorCfgName = "configHibernateEncrypter"; private EncryptedC3P0ConnectionProvider testCPImpl; /** * Registers mock encryptor. */ @Before public final void initialize() { HibernatePBEEncryptorRegistry encryptorRegistry = HibernatePBEEncryptorRegistry .getInstance(); encryptorRegistry.registerPBEStringEncryptor(testEncryptorCfgName, mockEncryptor); } /** * Verifies requirement for configuration of name encryptor is registered * with is enforced. */ @Test(expected=EncryptionInitializationException.class) public final void testEncryptorRegisteredNameReqd() { // Initialize config values without encryptor registered name. Map<String, String> testConfigValues = new HashMap<String, String>(); testConfigValues.put(AvailableSettings.DRIVER, testEncryptedCfgDriver); testConfigValues.put(AvailableSettings.URL, testEncryptedCfgUrl); testConfigValues.put(AvailableSettings.USER, testEncryptedCfgUser); testConfigValues.put(AvailableSettings.PASS, testEncryptedCfgPass); // Initialize test instance and invoke target method which should // fail with the expected exception. testCPImpl = new EncryptedC3P0ConnectionProvider(); testCPImpl.configure(testConfigValues); } /** * Verifies decryption of encrypted configuration values for supported * configuration parameters and confirms there are no other side effects. */ @Test public final void testConfigureDecryption() { // Set mock expectations to be auto-validated. final Sequence decryptSequence = context.sequence("decryptSequence"); context.checking(new Expectations(){{ oneOf(mockEncryptor).decrypt(testEncryptedDriverText); inSequence(decryptSequence); will(returnValue(testDriver)); oneOf(mockEncryptor).decrypt(testEncryptedUrlText); inSequence(decryptSequence); will(returnValue(testUrl)); oneOf(mockEncryptor).decrypt(testEncryptedUserText); inSequence(decryptSequence); will(returnValue(testUser)); oneOf(mockEncryptor).decrypt(testEncryptedPassText); inSequence(decryptSequence); will(returnValue(testPassword)); oneOf(mockSRImpl).getService(ClassLoaderService.class); inSequence(decryptSequence); will(returnValue(mockCLService)); oneOf(mockCLService).classForName(testDriver); inSequence(decryptSequence); }}); // Setup test configuration. Map<String, String> testConfigValues = new HashMap<String, String>(); testConfigValues.put(ParameterNaming.ENCRYPTOR_REGISTERED_NAME, testEncryptorCfgName); testConfigValues.put(AvailableSettings.DRIVER, testEncryptedCfgDriver); testConfigValues.put(AvailableSettings.URL, testEncryptedCfgUrl); testConfigValues.put(AvailableSettings.USER, testEncryptedCfgUser); testConfigValues.put(AvailableSettings.PASS, testEncryptedCfgPass); // Track values to be asserted later in the test. int cfgsCt = testConfigValues.size(); // Initialize the test instance and invoke target method to trigger mock expectation assertion. testCPImpl = new EncryptedC3P0ConnectionProvider(); testCPImpl.injectServices(mockSRImpl); testCPImpl.configure(testConfigValues); // Mock expectations should have already verified decryption, now proceed // with manual assertions. // First, verify that the decrypted values were set in the cfg values map // since it gets passed to super to complete actual configuration. assertEquals("Cfg values map not updated with decrypted driver!", testDriver, testConfigValues.get(AvailableSettings.DRIVER)); assertEquals("Cfg values map not updated with decrypted url!", testUrl, testConfigValues.get(AvailableSettings.URL)); assertEquals("Cfg values map not updated with decrypted user!", testUser, testConfigValues.get(AvailableSettings.USER)); assertEquals("Cfg values map not updated with decrypted password!", testPassword, testConfigValues.get(AvailableSettings.PASS)); // Now verify there were no other modifications to config values. assertEquals("Unexpected no. of config values!", cfgsCt, testConfigValues.size()); assertEquals("Config value other than encryptable set modified!", testEncryptorCfgName, testConfigValues.get(ParameterNaming.ENCRYPTOR_REGISTERED_NAME)); } /** * Verifies that unencrypted values for supported configuration parameters * are passed through unchanged. */ @Test public final void testConfigurePassThrough() { // Set mock expectations to be auto-validated. final Sequence passThroughSequence = context.sequence("passThroughSequence"); context.checking(new Expectations(){{ oneOf(mockSRImpl).getService(ClassLoaderService.class); inSequence(passThroughSequence); will(returnValue(mockCLService)); oneOf(mockCLService).classForName(testDriver); inSequence(passThroughSequence); }}); // Setup test configuration. Map<String, String> testConfigValues = new HashMap<String, String>(); testConfigValues.put(ParameterNaming.ENCRYPTOR_REGISTERED_NAME, testEncryptorCfgName); testConfigValues.put(AvailableSettings.DRIVER, testDriver); testConfigValues.put(AvailableSettings.URL, testUrl); testConfigValues.put(AvailableSettings.USER, testUser); testConfigValues.put(AvailableSettings.PASS, testPassword); // Track values to be asserted later in the test. int cfgsCt = testConfigValues.size(); // Initialize the test instance and invoke target method. Mock // expectations aren't set for encryptor since encryptor should // never be invoked in this case. testCPImpl = new EncryptedC3P0ConnectionProvider(); testCPImpl.injectServices(mockSRImpl); testCPImpl.configure(testConfigValues); // Verify that the specified values are unchanged. assertEquals("Cfg values map for driver modified!", testDriver, testConfigValues.get(AvailableSettings.DRIVER)); assertEquals("Cfg values map for url modified!", testUrl, testConfigValues.get(AvailableSettings.URL)); assertEquals("Cfg values map for user modified!", testUser, testConfigValues.get(AvailableSettings.USER)); assertEquals("Cfg values map for password modified!", testPassword, testConfigValues.get(AvailableSettings.PASS)); // Now verify there were no other modifications to config values. assertEquals("Unexpected no. of config values!", cfgsCt, testConfigValues.size()); assertEquals("Config value other than encryptable set modified!", testEncryptorCfgName, testConfigValues.get(ParameterNaming.ENCRYPTOR_REGISTERED_NAME)); } }
package cn.shiyanjun.running.platform.component.worker; import java.util.Optional; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import com.alibaba.fastjson.JSONObject; import cn.shiyanjun.platform.api.utils.NamedThreadFactory; import cn.shiyanjun.platform.network.common.AbstractMessageDispatcher; import cn.shiyanjun.platform.network.common.PeerMessage; import cn.shiyanjun.platform.network.common.RpcMessage; import cn.shiyanjun.platform.network.common.RunnableMessageListener; import cn.shiyanjun.running.platform.api.TaskLauncher; import cn.shiyanjun.running.platform.constants.JsonKeys; import cn.shiyanjun.running.platform.constants.MessageType; import cn.shiyanjun.running.platform.constants.RunpConfigKeys; import cn.shiyanjun.running.platform.constants.Status; import cn.shiyanjun.running.platform.utils.Time; import cn.shiyanjun.running.platform.utils.Utils; public class WorkerMessageDispatcher extends AbstractMessageDispatcher { private static final Log LOG = LogFactory.getLog(WorkerMessageDispatcher.class); private final HeartbeatReporter heartbeatReporter; private final RunnableMessageListener<PeerMessage> taskProgressReporter; private final RemoteMessageReceiver remoteMessageReceiver; private final WorkerContext workerContext; private final String workerId; private volatile String masterId; private final String workerHost; private final int heartbeatIntervalMillis; private ScheduledExecutorService scheduledExecutorService; private ClientConnectionManager clientConnectionManager; public WorkerMessageDispatcher(WorkerContext workerContext) { super(workerContext.getContext()); this.workerContext = workerContext; masterId = workerContext.getMasterId(); workerId = workerContext.getPeerId(); workerHost = workerContext.getContext().get(RunpConfigKeys.WORKER_HOST); heartbeatIntervalMillis = workerContext.getContext().getInt(RunpConfigKeys.WORKER_HEARTBEAT_INTERVALMILLIS, 60000); // create & register message listener heartbeatReporter = new HeartbeatReporter(MessageType.WORKER_REGISTRATION, MessageType.HEART_BEAT); taskProgressReporter = new TaskProgressReporter(MessageType.TASK_PROGRESS); remoteMessageReceiver = new RemoteMessageReceiver(MessageType.TASK_ASSIGNMENT, MessageType.ACK_WORKER_REGISTRATION); register(taskProgressReporter); register(heartbeatReporter); register(remoteMessageReceiver); // create task launcher instances workerContext.getResourceTypes().keySet().stream().forEach(taskType -> { Optional<String> c = workerContext.getTaskLauncherClass(taskType); c.ifPresent(clazz -> { workerContext.getTaskLauncherFactory().registerObject(context, taskType.getCode(), clazz); }); }); } @Override public void start() { super.start(); clientConnectionManager = workerContext.getClientConnectionManager(); // try to register to master clientConnectionManager.registerToMaster(); // start task launchers workerContext.getResourceTypes().keySet().forEach(taskType -> { TaskLauncher launcher = workerContext.getTaskLauncherFactory().getObject(taskType.getCode()); launcher.setTaskProgressReporter(taskProgressReporter); launcher.setType(taskType.getCode()); launcher.start(); }); // send heartbeat to master periodically scheduledExecutorService = Executors.newScheduledThreadPool(1, new NamedThreadFactory("HEARTBEAT")); scheduledExecutorService.scheduleAtFixedRate(new Runnable() { @Override public void run() { PeerMessage message = prepareHeartbeatMessage(); heartbeatReporter.addMessage(message); LOG.debug("Heartbeart prepared: id=" + message.getRpcMessage().getId() + ", body=" + message.getRpcMessage().getBody()); } private PeerMessage prepareHeartbeatMessage() { RpcMessage hb = new RpcMessage(workerContext.getMessageidGenerator().incrementAndGet(), MessageType.HEART_BEAT.getCode()); hb.setNeedReply(false); JSONObject body = new JSONObject(); body.put(JsonKeys.WORKER_ID, workerId); body.put(JsonKeys.WORKER_HOST, workerHost); hb.setBody(body.toJSONString()); hb.setTimestamp(Time.now()); PeerMessage message = new PeerMessage(); message.setRpcMessage(hb); message.setFromEndpointId(workerId); message.setToEndpointId(masterId); message.setChannel(workerContext.getChannel(masterId)); return message; } }, 3000, heartbeatIntervalMillis, TimeUnit.MILLISECONDS); } final class RemoteMessageReceiver extends RunnableMessageListener<PeerMessage> { public RemoteMessageReceiver(MessageType... messageTypes) { super(Utils.toIntegerArray(messageTypes)); } @Override public void handle(PeerMessage message) { RpcMessage rpcMessage = message.getRpcMessage(); JSONObject body = JSONObject.parseObject(message.getRpcMessage().getBody()); Optional<MessageType> messageType = MessageType.fromCode(rpcMessage.getType()); messageType.ifPresent(mt -> { switch(mt) { case TASK_ASSIGNMENT: LOG.info("Assigned task received: message=" + rpcMessage); int taskTypeCode = body.getIntValue(JsonKeys.TASK_TYPE); TaskLauncher launcher = workerContext.getTaskLauncherFactory().getObject(taskTypeCode); long internalTaskId = rpcMessage.getId(); launcher.launchTask(internalTaskId, body.getJSONObject(JsonKeys.TASK_PARAMS)); break; case ACK_WORKER_REGISTRATION: String status = body.getString(JsonKeys.STATUS); if(Status.SUCCEES.toString().equals(status)) { masterId = body.getString(JsonKeys.MASTER_ID); clientConnectionManager.notifyRegistrationSucceeded(); LOG.debug("Succeeded to notify dispatcher."); LOG.info("Worker registered: id=" + rpcMessage.getId() + ", type=" + rpcMessage.getType() + ", body=" + rpcMessage.getBody()); } else { LOG.info("Worker registration failed: id=" + rpcMessage.getId() + ", type=" + rpcMessage.getType() + ", body=" + rpcMessage.getBody()); clientConnectionManager.notifyRegistrationFailed(); } break; default: } }); } } /** * Send task progress report messages to remote <code>Master</code>. * * @author yanjun */ final class TaskProgressReporter extends RunnableMessageListener<PeerMessage> { public TaskProgressReporter(MessageType messageType) { super(messageType.getCode()); } @Override public void handle(PeerMessage message) { workerContext.getRpcService().send(message); } } /** * Send heartbeat messages to remote <code>Master</code>. * * @author yanjun */ final class HeartbeatReporter extends RunnableMessageListener<PeerMessage> { public HeartbeatReporter(MessageType... messageTypes) { super(Utils.toIntegerArray(messageTypes)); } @Override public void start() { super.start(); } @Override public void stop() { super.stop(); } @Override public void handle(PeerMessage message) { RpcMessage m = message.getRpcMessage(); try { Optional<MessageType> messageType = MessageType.fromCode(m.getType()); if(messageType.isPresent()) { switch(messageType.get()) { case WORKER_REGISTRATION: workerContext.getRpcService().ask(message); LOG.info("Registration message sent: message=" + m); break; case HEART_BEAT: workerContext.getRpcService().send(message); LOG.debug("Heartbeart sent: message=" + m); break; default: LOG.warn("Unknown received message: messageType=" + messageType); } } } catch (Exception e) { LOG.warn("Fail to handle received message: rpcMessage=" + m, e); } } } }
/** * Apache Licence Version 2.0 * Please read the LICENCE file */ package org.hypothesis.slide.ui; import com.vaadin.ui.Component; import com.vaadin.ui.HasComponents; import com.vaadin.ui.UI; import java.io.Serializable; import java.lang.reflect.Method; import java.util.ArrayList; import java.util.List; /** * @author Kamil Morong, Tilioteo Ltd * * Hypothesis * */ @SuppressWarnings("serial") public class Window extends com.vaadin.ui.Window { private boolean initialized = false; private boolean opened = false; private final List<CloseListener> closeListeners = new ArrayList<>(); private UI futureUI = null; public Window() { super(); } public void setFutureUI(UI ui) { this.futureUI = ui; } protected void fireOpen() { if (!initialized) { initialized = true; fireEvent(new InitEvent(this)); } fireEvent(new OpenEvent(this)); } @Override public void setParent(HasComponents parent) { super.setParent(parent); if (getParent() != null) { opened = true; } } public void open() { if (!opened && futureUI != null) { futureUI.addWindow(this); fireOpen(); } } @Override public void close() { super.close(); opened = false; } public boolean isOpened() { return opened; } /** * Add an init listener to the component. The listener is called when the * window is opened for the first time. * * Use {@link #removeInitListener(InitListener)} to remove the listener. * * @param listener * The listener to add */ public void addInitListener(InitListener listener) { addListener(InitEvent.class, listener, WINDOW_INIT_METHOD); } /** * Remove an init listener from the component. The listener should earlier * have been added using {@link #addInitListener(InitListener)}. * * @param listener * The listener to remove */ public void removeInitListener(InitListener listener) { removeListener(InitEvent.class, listener, WINDOW_INIT_METHOD); } /** * Add an open listener to the component. The listener is called whenever * the window is opened. * * Use {@link #removeOpenListener(OpenListener)} to remove the listener. * * @param listener * The listener to add */ public void addOpenListener(OpenListener listener) { addListener(OpenEvent.class, listener, WINDOW_OPEN_METHOD); } /** * Remove an open listener from the component. The listener should earlier * have been added using {@link #addOpenListener(OpenListener)}. * * @param listener * The listener to remove */ public void removeOpenListener(OpenListener listener) { removeListener(OpenEvent.class, listener, WINDOW_OPEN_METHOD); } @Override public void addCloseListener(CloseListener listener) { super.addCloseListener(listener); closeListeners.add(listener); } public void removeAllCloseListeners() { for (CloseListener listener : closeListeners) { removeCloseListener(listener); } closeListeners.clear(); } private static final Method WINDOW_INIT_METHOD; static { try { WINDOW_INIT_METHOD = InitListener.class.getDeclaredMethod("initWindow", InitEvent.class); } catch (final java.lang.NoSuchMethodException e) { // This should never happen throw new java.lang.RuntimeException("Internal error, window init method not found"); } } /** * Class for holding information about a window init event. An * {@link InitEvent} is fired when the <code>Window</code> is opened for the * first time. * * @author kamil. * @see InitListener */ public class InitEvent extends Component.Event { public InitEvent(Component source) { super(source); } /** * Gets the Window. * * @return the window. */ public Window getWindow() { return (Window) getSource(); } } /** * Interface for listening for a {@link InitEvent} fired by a {@link Window} * when user opens the window for the first time. * * @see InitEvent * @author kamil */ public interface InitListener extends Serializable { /** * Called when the user opens a window for first time. A reference to * the window is given by {@link InitEvent#getWindow()}. * * @param event * An event containing information about the window. */ void initWindow(InitEvent event); } private static final Method WINDOW_OPEN_METHOD; static { try { WINDOW_OPEN_METHOD = OpenListener.class.getDeclaredMethod("openWindow", OpenEvent.class); } catch (final java.lang.NoSuchMethodException e) { // This should never happen throw new java.lang.RuntimeException("Internal error, window open method not found"); } } /** * Class for holding information about a window open event. An * {@link OpenEvent} is fired whenever the <code>Window</code> is opened. * * @author kamil. * @see OpenListener */ public class OpenEvent extends Component.Event { public OpenEvent(Component source) { super(source); } /** * Gets the Window. * * @return the window. */ public Window getWindow() { return (Window) getSource(); } } /** * Interface for listening for a {@link OpenEvent} fired by a {@link Window} * whenever the user opens the window. * * @see OpenEvent * @author kamil */ public interface OpenListener extends Serializable { /** * Called whenever the user opens a window. A reference to the window is * given by {@link OpenEvent#getWindow()}. * * @param event * An event containing information about the window. */ void openWindow(OpenEvent event); } }
/* * Copyright 2016-2020 Crown Copyright * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package uk.gov.gchq.gaffer.commonutil; import com.google.common.collect.Sets; import org.apache.commons.lang3.StringUtils; import org.reflections.Reflections; import org.reflections.scanners.ResourcesScanner; import org.reflections.util.ClasspathHelper; import org.reflections.util.ConfigurationBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.util.HashSet; import java.util.regex.Pattern; /** * Utility methods for opening {@link InputStream}s. */ public final class StreamUtil { public static final String VIEW = "/view.json"; public static final String SCHEMA_FOLDER = "/schema/"; public static final String SCHEMA = SCHEMA_FOLDER + "schema.json"; public static final String ELEMENTS_SCHEMA = SCHEMA_FOLDER + "elements.json"; public static final String TYPES_SCHEMA = SCHEMA_FOLDER + "types.json"; public static final String STORE_PROPERTIES = "/store.properties"; public static final String GRAPH_CONFIG = "/graphConfig.json"; public static final String FAILED_TO_CREATE_INPUT_STREAM_FOR_PATH = "Failed to create input stream for path: "; public static final String LOG_FAILED_TO_CREATE_INPUT_STREAM_FOR_PATH = FAILED_TO_CREATE_INPUT_STREAM_FOR_PATH + "{}"; private static final Logger LOGGER = LoggerFactory.getLogger(StreamUtil.class); private StreamUtil() { // Private constructor to prevent instantiation. } /** * Open the graph config file located at the same location as the provided * class. * * @param clazz the class determining the location of the graph config file * @return an {@link InputStream} representing the graph config file */ public static InputStream graphConfig(final Class clazz) { return openStream(clazz, GRAPH_CONFIG); } /** * Open the view file located at the same location as the provided class. * * @param clazz the class determining the location of the view file * @return an {@link InputStream} representing the view file */ public static InputStream view(final Class clazz) { return openStream(clazz, VIEW); } /** * Open the schema files located at the same location as the provided * class. * * @param clazz the class determining the location of the schema files * @return an array of {@link InputStream}s representing the schema files */ public static InputStream[] schemas(final Class clazz) { return openStreams(clazz, SCHEMA_FOLDER); } /** * Open the schema file located at the same location as the provided * class. * * @param clazz the class determining the location of the schema file * @return an {@link InputStream} representing the schema file */ public static InputStream schema(final Class clazz) { return openStream(clazz, SCHEMA); } /** * Open the elements schema file located at the same location as the provided * class. * * @param clazz the class determining the location of the elements schema file * @return an {@link InputStream} representing the elements schema file */ public static InputStream elementsSchema(final Class clazz) { return openStream(clazz, ELEMENTS_SCHEMA); } /** * Open the types schema file located at the same location as the provided * class. * * @param clazz the class determining the location of the types schema file * @return an {@link InputStream} representing the types schema file */ public static InputStream typesSchema(final Class clazz) { return openStream(clazz, TYPES_SCHEMA); } /** * Open the store properties file located at the same location as the provided * class. * * @param clazz the class determining the location of the store properties file * @return an {@link InputStream} representing the store properties file */ public static InputStream storeProps(final Class clazz) { return openStream(clazz, STORE_PROPERTIES); } /** * Open all of the files found in the specified subdirectory of the provided * class. * * @param clazz the class location * @param folderPath the subdirectory in the class location * @return an array of {@link InputStream}s representing the files found */ public static InputStream[] openStreams(final Class clazz, final String folderPath) { if (null == folderPath) { return new InputStream[0]; } String folderPathChecked = getFormattedPath(folderPath); final HashSet<InputStream> inputStreams = Sets.newHashSet(); new Reflections(new ConfigurationBuilder() .setScanners(new ResourcesScanner()) .setUrls(ClasspathHelper.forClass(clazz))) .getResources(Pattern.compile(".*")) .stream() .filter(file -> file.startsWith(folderPathChecked)) .forEach(file -> { try { inputStreams.add(openStream(clazz, file)); } catch (final Exception e) { int closedStreamsCount = closeStreams(inputStreams.toArray(new InputStream[inputStreams.size()])); LOGGER.info(String.format("Closed %s input streams", closedStreamsCount)); } } ); if (inputStreams.isEmpty()) { throw new IllegalArgumentException("No file could be found in path: " + folderPath); } return inputStreams.toArray(new InputStream[inputStreams.size()]); } private static String getFormattedPath(final String folderPath) { String folderPathChecked = folderPath.endsWith("/") ? folderPath : folderPath + "/"; if (folderPathChecked.startsWith("/")) { folderPathChecked = folderPathChecked.substring(1); } return folderPathChecked; } /** * Create an array of {@link InputStream}s from the provided list of {@link URI}s. * * @param uris the URIs to open as input streams * @return an array of input streams * @throws IOException if there was an error opening the streams */ public static InputStream[] openStreams(final URI... uris) throws IOException { final InputStream[] schemas = new InputStream[uris.length]; for (int pos = 0; pos < uris.length; pos++) { try { schemas[pos] = openStream(uris[pos]); } catch (final Exception e) { int closedStreamsCount = closeStreams(schemas); LOGGER.info("Closed {} input streams", closedStreamsCount); throw e; } } return schemas; } /** * Create an {@link InputStream}s from the provided {@link URI}. * * @param uri the URI to open as an input stream * @return an input streams * @throws IOException if there was an error opening the stream */ public static InputStream openStream(final URI uri) throws IOException { try { return uri.toURL().openStream(); } catch (final IOException e) { LOGGER.error("Failed to create input stream: {}", uri, e); throw e; } } /** * Safely close the supplied list of {@link InputStream}s. * * @param inputStreams the input streams to close * @return an integer indicating the number of streams which were successfully closed. */ public static int closeStreams(final InputStream... inputStreams) { int closedStreamsCount = 0; for (final InputStream stream : inputStreams) { try { stream.close(); } catch (final Exception e) { LOGGER.debug("Exception while closing input streams", e); } closedStreamsCount++; } return closedStreamsCount; } /** * Open the file found at the the specified path under the location of the given * class. * * @param clazz the class location * @param path the path in the class location * @return an input stream representating the requested file * @throws IllegalArgumentException if there was an error opening the stream */ public static InputStream openStream(final Class clazz, final String path) throws IllegalArgumentException { final String checkedPath = formatPathForOpenStream(path); final InputStream resourceAsStream = clazz.getResourceAsStream(checkedPath); return (null != resourceAsStream) ? resourceAsStream : processException(path); } private static InputStream processException(final String path) throws IllegalArgumentException { LOGGER.error(LOG_FAILED_TO_CREATE_INPUT_STREAM_FOR_PATH, path); throw new IllegalArgumentException(FAILED_TO_CREATE_INPUT_STREAM_FOR_PATH + path); } /** * Format a path to ensure that it begins with a '/' character. * * @param path the path to format * @return a correctly formatted path string */ public static String formatPathForOpenStream(final String path) { if (StringUtils.isEmpty(path)) { processException(path); } return path.startsWith("/") ? path : "/" + path; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.spark.examples.sql; // $example on:schema_merging$ import java.io.Serializable; import java.util.ArrayList; import java.util.Arrays; import java.util.List; // $example off:schema_merging$ import java.util.Properties; // $example on:basic_parquet_example$ import org.apache.spark.api.java.function.MapFunction; import org.apache.spark.sql.Encoders; // $example on:schema_merging$ // $example on:json_dataset$ // $example on:csv_dataset$ // $example on:text_dataset$ import org.apache.spark.sql.Dataset; import org.apache.spark.sql.Row; // $example off:text_dataset$ // $example off:csv_dataset$ // $example off:json_dataset$ // $example off:schema_merging$ // $example off:basic_parquet_example$ import org.apache.spark.sql.SparkSession; public class JavaSQLDataSourceExample { // $example on:schema_merging$ public static class Square implements Serializable { private int value; private int square; // Getters and setters... // $example off:schema_merging$ public int getValue() { return value; } public void setValue(int value) { this.value = value; } public int getSquare() { return square; } public void setSquare(int square) { this.square = square; } // $example on:schema_merging$ } // $example off:schema_merging$ // $example on:schema_merging$ public static class Cube implements Serializable { private int value; private int cube; // Getters and setters... // $example off:schema_merging$ public int getValue() { return value; } public void setValue(int value) { this.value = value; } public int getCube() { return cube; } public void setCube(int cube) { this.cube = cube; } // $example on:schema_merging$ } // $example off:schema_merging$ public static void main(String[] args) { SparkSession spark = SparkSession .builder() .appName("Java Spark SQL data sources example") .config("spark.some.config.option", "some-value") .getOrCreate(); runBasicDataSourceExample(spark); runGenericFileSourceOptionsExample(spark); runBasicParquetExample(spark); runParquetSchemaMergingExample(spark); runJsonDatasetExample(spark); runCsvDatasetExample(spark); runTextDatasetExample(spark); runJdbcDatasetExample(spark); spark.stop(); } private static void runGenericFileSourceOptionsExample(SparkSession spark) { // $example on:ignore_corrupt_files$ // enable ignore corrupt files spark.sql("set spark.sql.files.ignoreCorruptFiles=true"); // dir1/file3.json is corrupt from parquet's view Dataset<Row> testCorruptDF = spark.read().parquet( "examples/src/main/resources/dir1/", "examples/src/main/resources/dir1/dir2/"); testCorruptDF.show(); // +-------------+ // | file| // +-------------+ // |file1.parquet| // |file2.parquet| // +-------------+ // $example off:ignore_corrupt_files$ // $example on:recursive_file_lookup$ Dataset<Row> recursiveLoadedDF = spark.read().format("parquet") .option("recursiveFileLookup", "true") .load("examples/src/main/resources/dir1"); recursiveLoadedDF.show(); // +-------------+ // | file| // +-------------+ // |file1.parquet| // |file2.parquet| // +-------------+ // $example off:recursive_file_lookup$ spark.sql("set spark.sql.files.ignoreCorruptFiles=false"); // $example on:load_with_path_glob_filter$ Dataset<Row> testGlobFilterDF = spark.read().format("parquet") .option("pathGlobFilter", "*.parquet") // json file should be filtered out .load("examples/src/main/resources/dir1"); testGlobFilterDF.show(); // +-------------+ // | file| // +-------------+ // |file1.parquet| // +-------------+ // $example off:load_with_path_glob_filter$ // $example on:load_with_modified_time_filter$ Dataset<Row> beforeFilterDF = spark.read().format("parquet") // Only load files modified before 7/1/2020 at 05:30 .option("modifiedBefore", "2020-07-01T05:30:00") // Only load files modified after 6/1/2020 at 05:30 .option("modifiedAfter", "2020-06-01T05:30:00") // Interpret both times above relative to CST timezone .option("timeZone", "CST") .load("examples/src/main/resources/dir1"); beforeFilterDF.show(); // +-------------+ // | file| // +-------------+ // |file1.parquet| // +-------------+ // $example off:load_with_modified_time_filter$ } private static void runBasicDataSourceExample(SparkSession spark) { // $example on:generic_load_save_functions$ Dataset<Row> usersDF = spark.read().load("examples/src/main/resources/users.parquet"); usersDF.select("name", "favorite_color").write().save("namesAndFavColors.parquet"); // $example off:generic_load_save_functions$ // $example on:manual_load_options$ Dataset<Row> peopleDF = spark.read().format("json").load("examples/src/main/resources/people.json"); peopleDF.select("name", "age").write().format("parquet").save("namesAndAges.parquet"); // $example off:manual_load_options$ // $example on:manual_load_options_csv$ Dataset<Row> peopleDFCsv = spark.read().format("csv") .option("sep", ";") .option("inferSchema", "true") .option("header", "true") .load("examples/src/main/resources/people.csv"); // $example off:manual_load_options_csv$ // $example on:manual_save_options_orc$ usersDF.write().format("orc") .option("orc.bloom.filter.columns", "favorite_color") .option("orc.dictionary.key.threshold", "1.0") .option("orc.column.encoding.direct", "name") .save("users_with_options.orc"); // $example off:manual_save_options_orc$ // $example on:manual_save_options_parquet$ usersDF.write().format("parquet") .option("parquet.bloom.filter.enabled#favorite_color", "true") .option("parquet.bloom.filter.expected.ndv#favorite_color", "1000000") .option("parquet.enable.dictionary", "true") .option("parquet.page.write-checksum.enabled", "false") .save("users_with_options.parquet"); // $example off:manual_save_options_parquet$ // $example on:direct_sql$ Dataset<Row> sqlDF = spark.sql("SELECT * FROM parquet.`examples/src/main/resources/users.parquet`"); // $example off:direct_sql$ // $example on:write_sorting_and_bucketing$ peopleDF.write().bucketBy(42, "name").sortBy("age").saveAsTable("people_bucketed"); // $example off:write_sorting_and_bucketing$ // $example on:write_partitioning$ usersDF .write() .partitionBy("favorite_color") .format("parquet") .save("namesPartByColor.parquet"); // $example off:write_partitioning$ // $example on:write_partition_and_bucket$ usersDF .write() .partitionBy("favorite_color") .bucketBy(42, "name") .saveAsTable("users_partitioned_bucketed"); // $example off:write_partition_and_bucket$ spark.sql("DROP TABLE IF EXISTS people_bucketed"); spark.sql("DROP TABLE IF EXISTS users_partitioned_bucketed"); } private static void runBasicParquetExample(SparkSession spark) { // $example on:basic_parquet_example$ Dataset<Row> peopleDF = spark.read().json("examples/src/main/resources/people.json"); // DataFrames can be saved as Parquet files, maintaining the schema information peopleDF.write().parquet("people.parquet"); // Read in the Parquet file created above. // Parquet files are self-describing so the schema is preserved // The result of loading a parquet file is also a DataFrame Dataset<Row> parquetFileDF = spark.read().parquet("people.parquet"); // Parquet files can also be used to create a temporary view and then used in SQL statements parquetFileDF.createOrReplaceTempView("parquetFile"); Dataset<Row> namesDF = spark.sql("SELECT name FROM parquetFile WHERE age BETWEEN 13 AND 19"); Dataset<String> namesDS = namesDF.map( (MapFunction<Row, String>) row -> "Name: " + row.getString(0), Encoders.STRING()); namesDS.show(); // +------------+ // | value| // +------------+ // |Name: Justin| // +------------+ // $example off:basic_parquet_example$ } private static void runParquetSchemaMergingExample(SparkSession spark) { // $example on:schema_merging$ List<Square> squares = new ArrayList<>(); for (int value = 1; value <= 5; value++) { Square square = new Square(); square.setValue(value); square.setSquare(value * value); squares.add(square); } // Create a simple DataFrame, store into a partition directory Dataset<Row> squaresDF = spark.createDataFrame(squares, Square.class); squaresDF.write().parquet("data/test_table/key=1"); List<Cube> cubes = new ArrayList<>(); for (int value = 6; value <= 10; value++) { Cube cube = new Cube(); cube.setValue(value); cube.setCube(value * value * value); cubes.add(cube); } // Create another DataFrame in a new partition directory, // adding a new column and dropping an existing column Dataset<Row> cubesDF = spark.createDataFrame(cubes, Cube.class); cubesDF.write().parquet("data/test_table/key=2"); // Read the partitioned table Dataset<Row> mergedDF = spark.read().option("mergeSchema", true).parquet("data/test_table"); mergedDF.printSchema(); // The final schema consists of all 3 columns in the Parquet files together // with the partitioning column appeared in the partition directory paths // root // |-- value: int (nullable = true) // |-- square: int (nullable = true) // |-- cube: int (nullable = true) // |-- key: int (nullable = true) // $example off:schema_merging$ } private static void runJsonDatasetExample(SparkSession spark) { // $example on:json_dataset$ // A JSON dataset is pointed to by path. // The path can be either a single text file or a directory storing text files Dataset<Row> people = spark.read().json("examples/src/main/resources/people.json"); // The inferred schema can be visualized using the printSchema() method people.printSchema(); // root // |-- age: long (nullable = true) // |-- name: string (nullable = true) // Creates a temporary view using the DataFrame people.createOrReplaceTempView("people"); // SQL statements can be run by using the sql methods provided by spark Dataset<Row> namesDF = spark.sql("SELECT name FROM people WHERE age BETWEEN 13 AND 19"); namesDF.show(); // +------+ // | name| // +------+ // |Justin| // +------+ // Alternatively, a DataFrame can be created for a JSON dataset represented by // a Dataset<String> storing one JSON object per string. List<String> jsonData = Arrays.asList( "{\"name\":\"Yin\",\"address\":{\"city\":\"Columbus\",\"state\":\"Ohio\"}}"); Dataset<String> anotherPeopleDataset = spark.createDataset(jsonData, Encoders.STRING()); Dataset<Row> anotherPeople = spark.read().json(anotherPeopleDataset); anotherPeople.show(); // +---------------+----+ // | address|name| // +---------------+----+ // |[Columbus,Ohio]| Yin| // +---------------+----+ // $example off:json_dataset$ } private static void runCsvDatasetExample(SparkSession spark) { // $example on:csv_dataset$ // A CSV dataset is pointed to by path. // The path can be either a single CSV file or a directory of CSV files String path = "examples/src/main/resources/people.csv"; Dataset<Row> df = spark.read().csv(path); df.show(); // +------------------+ // | _c0| // +------------------+ // | name;age;job| // |Jorge;30;Developer| // | Bob;32;Developer| // +------------------+ // Read a csv with delimiter, the default delimiter is "," Dataset<Row> df2 = spark.read().option("delimiter", ";").csv(path); df2.show(); // +-----+---+---------+ // | _c0|_c1| _c2| // +-----+---+---------+ // | name|age| job| // |Jorge| 30|Developer| // | Bob| 32|Developer| // +-----+---+---------+ // Read a csv with delimiter and a header Dataset<Row> df3 = spark.read().option("delimiter", ";").option("header", "true").csv(path); df3.show(); // +-----+---+---------+ // | name|age| job| // +-----+---+---------+ // |Jorge| 30|Developer| // | Bob| 32|Developer| // +-----+---+---------+ // You can also use options() to use multiple options java.util.Map<String, String> optionsMap = new java.util.HashMap<String, String>(); optionsMap.put("delimiter",";"); optionsMap.put("header","true"); Dataset<Row> df4 = spark.read().options(optionsMap).csv(path); // "output" is a folder which contains multiple csv files and a _SUCCESS file. df3.write().csv("output"); // Read all files in a folder, please make sure only CSV files should present in the folder. String folderPath = "examples/src/main/resources"; Dataset<Row> df5 = spark.read().csv(folderPath); df5.show(); // Wrong schema because non-CSV files are read // +-----------+ // | _c0| // +-----------+ // |238val_238| // | 86val_86| // |311val_311| // | 27val_27| // |165val_165| // +-----------+ // $example off:csv_dataset$ } private static void runTextDatasetExample(SparkSession spark) { // $example on:text_dataset$ // A text dataset is pointed to by path. // The path can be either a single text file or a directory of text files String path = "examples/src/main/resources/people.txt"; Dataset<Row> df1 = spark.read().text(path); df1.show(); // +-----------+ // | value| // +-----------+ // |Michael, 29| // | Andy, 30| // | Justin, 19| // +-----------+ // You can use 'lineSep' option to define the line separator. // The line separator handles all `\r`, `\r\n` and `\n` by default. Dataset<Row> df2 = spark.read().option("lineSep", ",").text(path); df2.show(); // +-----------+ // | value| // +-----------+ // | Michael| // | 29\nAndy| // | 30\nJustin| // | 19\n| // +-----------+ // You can also use 'wholetext' option to read each input file as a single row. Dataset<Row> df3 = spark.read().option("wholetext", "true").text(path); df3.show(); // +--------------------+ // | value| // +--------------------+ // |Michael, 29\nAndy...| // +--------------------+ // "output" is a folder which contains multiple text files and a _SUCCESS file. df1.write().text("output"); // You can specify the compression format using the 'compression' option. df1.write().option("compression", "gzip").text("output_compressed"); // $example off:text_dataset$ } private static void runJdbcDatasetExample(SparkSession spark) { // $example on:jdbc_dataset$ // Note: JDBC loading and saving can be achieved via either the load/save or jdbc methods // Loading data from a JDBC source Dataset<Row> jdbcDF = spark.read() .format("jdbc") .option("url", "jdbc:postgresql:dbserver") .option("dbtable", "schema.tablename") .option("user", "username") .option("password", "password") .load(); Properties connectionProperties = new Properties(); connectionProperties.put("user", "username"); connectionProperties.put("password", "password"); Dataset<Row> jdbcDF2 = spark.read() .jdbc("jdbc:postgresql:dbserver", "schema.tablename", connectionProperties); // Saving data to a JDBC source jdbcDF.write() .format("jdbc") .option("url", "jdbc:postgresql:dbserver") .option("dbtable", "schema.tablename") .option("user", "username") .option("password", "password") .save(); jdbcDF2.write() .jdbc("jdbc:postgresql:dbserver", "schema.tablename", connectionProperties); // Specifying create table column data types on write jdbcDF.write() .option("createTableColumnTypes", "name CHAR(64), comments VARCHAR(1024)") .jdbc("jdbc:postgresql:dbserver", "schema.tablename", connectionProperties); // $example off:jdbc_dataset$ } }
package com.cordova.plugin.localNotification; import org.apache.cordova.CallbackContext; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.CordovaWebView; import org.json.JSONArray; import org.json.JSONException; import android.content.Context; import android.util.Log; import java.lang.Exception; /** * This plugin utilizes the Android AlarmManager in combination with StatusBar * notifications. When a local notification is scheduled the alarm manager takes * care of firing the event. When the event is processed, a notification is put * in the Android status bar. * * @author Daniel van 't Oever * @author Updated By Ally Ogilvie */ public class LocalNotification extends CordovaPlugin { public static final String TAG = "LocalNotification"; public static CordovaWebView _webview; private AlarmHelper alarm = null; @Override public void initialize(org.apache.cordova.CordovaInterface cordova, org.apache.cordova.CordovaWebView webView) { // Keep a pointer to the WebView so we can emit JS Event when getting a notification _webview = webView; super.initialize(cordova, webView); // If we received notification when the app was cold send them to JS now String notificationTapped = cordova.getActivity().getApplicationContext() .getSharedPreferences(LocalNotification.TAG, Context.MODE_PRIVATE) .getString("notificationTapped", null); if (notificationTapped != null) { LocalNotification.getCordovaWebView().sendJavascript("cordova.fireDocumentEvent('receivedLocalNotification', { active : false, notificationId : " + notificationTapped + " })"); cordova.getActivity().getApplicationContext() .getSharedPreferences(LocalNotification.TAG, Context.MODE_PRIVATE) .edit() .clear() .commit(); } } @Override public boolean execute(String action, JSONArray args, CallbackContext callbackContext) throws JSONException { String alarmId; this.alarm = new AlarmHelper(); this.alarm.setContext(cordova.getActivity().getApplicationContext()); try { alarmId = args.getString(0); } catch (Exception e) { Log.d(TAG, "Unable to process alarm with string id: " + args.getString(0)); callbackContext.error("Cannot use string for notification id."); return true; } if (action.equalsIgnoreCase("addNotification")) { try { long seconds = System.currentTimeMillis() + (args.getJSONObject(1).getLong("seconds") * 1000); String title, ticker, message, smallIcon, largeIcon, className; smallIcon = largeIcon = title = ticker = message = className = ""; if (args.getJSONObject(1).has("title")) { title = args.getJSONObject(1).getString("title"); } if (args.getJSONObject(1).has("ticker")) { ticker = args.getJSONObject(1).getString("ticker"); } if (args.getJSONObject(1).has("iconSmall")) { smallIcon = args.getJSONObject(1).getString("iconSmall"); } if (args.getJSONObject(1).has("iconLarge")) { largeIcon = args.getJSONObject(1).getString("iconLarge"); } if (args.getJSONObject(1).has("message")) { message = args.getJSONObject(1).getString("message"); } if (args.getJSONObject(1).has("className")) { className = args.getJSONObject(1).getString("className"); } persistAlarm(alarmId, args); return this.add(callbackContext, title.isEmpty() ? "Notification" : title, message, ticker.isEmpty() ? message : ticker, alarmId, smallIcon, largeIcon, seconds, className); } catch (Exception e) { Log.e(TAG, "Exception: " + e); } } else if (action.equalsIgnoreCase("cancelNotification")) { unpersistAlarm(alarmId); return this.cancelNotification(callbackContext, alarmId); } else if (action.equalsIgnoreCase("cancelAllNotifications")) { unpersistAlarmAll(); return this.cancelAllNotifications(callbackContext); } return false; } /** * Set an alarm * * @param callbackContext * Callback context of the request from Cordova * @param alarmTitle * The title of the alarm as shown in the Android notification * panel * @param alarmSubTitle * The subtitle of the alarm * @param alarmId * The unique ID of the notification * @param seconds * A calendar object that represents the time at which the alarm * should first be started */ public Boolean add(CallbackContext callbackContext, String alarmTitle, String alarmSubTitle, String alarmTicker, String alarmId, String smallIcon, String largeIcon, long seconds, String className) { boolean result = alarm.addAlarm(alarmTitle, alarmSubTitle, alarmTicker, alarmId, smallIcon, largeIcon, seconds, className); if (result) { callbackContext.success(); return true; } else { callbackContext.error("Add notification failed."); return false; } } /** * Cancel a specific notification that was previously registered. * * @param callbackContext * Callback context of the request from Cordova * @param notificationId * The original ID of the notification that was used when it was * registered using addNotification() */ public Boolean cancelNotification(CallbackContext callbackContext, String notificationId) { Log.d(TAG, "cancelNotification: Canceling event with id: " + notificationId); boolean result = alarm.cancelAlarm(notificationId); if (result) { callbackContext.success(); return true; } else { callbackContext.error("Cancel notification failed."); return false; } } /** * @param callbackContext * Callback context of the request from Cordova * Cancel all notifications that were created by this plugin. */ public Boolean cancelAllNotifications(CallbackContext callbackContext) { Log.d(TAG, "cancelAllNotifications: cancelling all events for this application"); /* * Android can only unregister a specific alarm. There is no such thing * as cancelAll. Therefore we rely on the Shared Preferences which holds * all our alarms to loop through these alarms and unregister them one * by one. */ boolean result = alarm.cancelAll(cordova.getActivity() .getApplicationContext() .getSharedPreferences(TAG, Context.MODE_PRIVATE)); if (result) { callbackContext.success(); return true; } else { callbackContext.error("Cancel all notifications failed."); return false; } } public static CordovaWebView getCordovaWebView() { return _webview; } /** * Persist the information of this alarm to the Android Shared Preferences. * This will allow the application to restore the alarm upon device reboot. * Also this is used by the cancelAllNotifications method. * * @see #cancelAllNotifications() * * @param optionsArr * The assumption is that parseOptions has been called already. * * @return true when successful, otherwise false */ private boolean persistAlarm(String alarmId, JSONArray optionsArr) { return cordova.getActivity().getApplicationContext() .getSharedPreferences(TAG, Context.MODE_PRIVATE) .edit() .putString(alarmId, optionsArr.toString()) .commit(); } /** * Remove a specific alarm from the Android shared Preferences * * @param alarmId * The Id of the notification that must be removed. * * @return true when successful, otherwise false */ private boolean unpersistAlarm(String alarmId) { return cordova.getActivity().getApplicationContext() .getSharedPreferences(TAG, Context.MODE_PRIVATE) .edit() .remove(alarmId) .commit(); } /** * Clear all alarms from the Android shared Preferences * * @return true when successful, otherwise false */ private boolean unpersistAlarmAll() { return cordova.getActivity().getApplicationContext() .getSharedPreferences(TAG, Context.MODE_PRIVATE) .edit() .clear() .commit(); } }
/* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package io.netty.handler.codec.http; import static io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; import static io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_LENGTH; import static io.netty.handler.codec.http.HttpHeaders.Names.UPGRADE; import static io.netty.handler.codec.http.HttpResponseStatus.SWITCHING_PROTOCOLS; import static io.netty.handler.codec.http.HttpVersion.HTTP_1_1; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelFutureListener; import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.AsciiString; import io.netty.util.ReferenceCountUtil; import io.netty.util.ReferenceCounted; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeSet; /** * A server-side handler that receives HTTP requests and optionally performs a protocol switch if * the requested protocol is supported. Once an upgrade is performed, this handler removes itself * from the pipeline. */ public class HttpServerUpgradeHandler extends HttpObjectAggregator { /** * The source codec that is used in the pipeline initially. */ public interface SourceCodec { /** * Removes this codec (i.e. all associated handlers) from the pipeline. */ void upgradeFrom(ChannelHandlerContext ctx); } /** * A codec that the source can be upgraded to. */ public interface UpgradeCodec { /** * Returns the name of the protocol supported by this codec, as indicated by the * {@link HttpHeaders.Names#UPGRADE} header. */ String protocol(); /** * Gets all protocol-specific headers required by this protocol for a successful upgrade. * Any supplied header will be required to appear in the {@link HttpHeaders.Names#CONNECTION} header as well. */ Collection<String> requiredUpgradeHeaders(); /** * Adds any headers to the 101 Switching protocols response that are appropriate for this protocol. */ void prepareUpgradeResponse(ChannelHandlerContext ctx, FullHttpRequest upgradeRequest, FullHttpResponse upgradeResponse); /** * Performs an HTTP protocol upgrade from the source codec. This method is responsible for * adding all handlers required for the new protocol. * * @param ctx the context for the current handler. * @param upgradeRequest the request that triggered the upgrade to this protocol. The * upgraded protocol is responsible for sending the response. * @param upgradeResponse a 101 Switching Protocols response that is populated with the * {@link HttpHeaders.Names#CONNECTION} and {@link HttpHeaders.Names#UPGRADE} headers. * The protocol is required to send this before sending any other frames back to the client. * The headers may be augmented as necessary by the protocol before sending. */ void upgradeTo(ChannelHandlerContext ctx, FullHttpRequest upgradeRequest, FullHttpResponse upgradeResponse); } /** * User event that is fired to notify about the completion of an HTTP upgrade * to another protocol. Contains the original upgrade request so that the response * (if required) can be sent using the new protocol. */ public static final class UpgradeEvent implements ReferenceCounted { private final String protocol; private final FullHttpRequest upgradeRequest; private UpgradeEvent(String protocol, FullHttpRequest upgradeRequest) { this.protocol = protocol; this.upgradeRequest = upgradeRequest; } /** * The protocol that the channel has been upgraded to. */ public String protocol() { return protocol; } /** * Gets the request that triggered the protocol upgrade. */ public FullHttpRequest upgradeRequest() { return upgradeRequest; } @Override public int refCnt() { return upgradeRequest.refCnt(); } @Override public UpgradeEvent retain() { upgradeRequest.retain(); return this; } @Override public UpgradeEvent retain(int increment) { upgradeRequest.retain(increment); return this; } @Override public UpgradeEvent touch() { upgradeRequest.touch(); return this; } @Override public UpgradeEvent touch(Object hint) { upgradeRequest.touch(hint); return this; } @Override public boolean release() { return upgradeRequest.release(); } @Override public boolean release(int decrement) { return upgradeRequest.release(); } @Override public String toString() { return "UpgradeEvent [protocol=" + protocol + ", upgradeRequest=" + upgradeRequest + ']'; } } private final Map<String, UpgradeCodec> upgradeCodecMap; private final SourceCodec sourceCodec; private boolean handlingUpgrade; /** * Constructs the upgrader with the supported codecs. * * @param sourceCodec the codec that is being used initially. * @param upgradeCodecs the codecs (in order of preference) that this server supports * upgrading to from the source codec. * @param maxContentLength the maximum length of the aggregated content. */ public HttpServerUpgradeHandler(SourceCodec sourceCodec, Collection<UpgradeCodec> upgradeCodecs, int maxContentLength) { super(maxContentLength); if (sourceCodec == null) { throw new NullPointerException("sourceCodec"); } if (upgradeCodecs == null) { throw new NullPointerException("upgradeCodecs"); } this.sourceCodec = sourceCodec; upgradeCodecMap = new LinkedHashMap<String, UpgradeCodec>(upgradeCodecs.size()); for (UpgradeCodec upgradeCodec : upgradeCodecs) { String name = upgradeCodec.protocol().toUpperCase(Locale.US); upgradeCodecMap.put(name, upgradeCodec); } } @Override protected void decode(ChannelHandlerContext ctx, HttpObject msg, List<Object> out) throws Exception { // Determine if we're already handling an upgrade request or just starting a new one. handlingUpgrade |= isUpgradeRequest(msg); if (!handlingUpgrade) { // Not handling an upgrade request, just pass it to the next handler. ReferenceCountUtil.retain(msg); out.add(msg); return; } FullHttpRequest fullRequest; if (msg instanceof FullHttpRequest) { fullRequest = (FullHttpRequest) msg; ReferenceCountUtil.retain(msg); out.add(msg); } else { // Call the base class to handle the aggregation of the full request. super.decode(ctx, msg, out); if (out.isEmpty()) { // The full request hasn't been created yet, still awaiting more data. return; } // Finished aggregating the full request, get it from the output list. assert out.size() == 1; handlingUpgrade = false; fullRequest = (FullHttpRequest) out.get(0); } if (upgrade(ctx, fullRequest)) { // The upgrade was successful, remove the message from the output list // so that it's not propagated to the next handler. This request will // be propagated as a user event instead. out.clear(); } // The upgrade did not succeed, just allow the full request to propagate to the // next handler. } /** * Determines whether or not the message is an HTTP upgrade request. */ private static boolean isUpgradeRequest(HttpObject msg) { return msg instanceof HttpRequest && ((HttpRequest) msg).headers().get(UPGRADE) != null; } /** * Attempts to upgrade to the protocol(s) identified by the {@link HttpHeaders.Names#UPGRADE} header (if provided * in the request). * * @param ctx the context for this handler. * @param request the HTTP request. * @return {@code true} if the upgrade occurred, otherwise {@code false}. */ private boolean upgrade(final ChannelHandlerContext ctx, final FullHttpRequest request) { // Select the best protocol based on those requested in the UPGRADE header. CharSequence upgradeHeader = request.headers().get(UPGRADE); final UpgradeCodec upgradeCodec = selectUpgradeCodec(upgradeHeader); if (upgradeCodec == null) { // None of the requested protocols are supported, don't upgrade. return false; } // Make sure the CONNECTION header is present. CharSequence connectionHeader = request.headers().get(CONNECTION); if (connectionHeader == null) { return false; } // Make sure the CONNECTION header contains UPGRADE as well as all protocol-specific headers. Collection<String> requiredHeaders = upgradeCodec.requiredUpgradeHeaders(); Set<CharSequence> values = splitHeader(connectionHeader); if (!values.contains(UPGRADE) || !values.containsAll(requiredHeaders)) { return false; } // Ensure that all required protocol-specific headers are found in the request. for (String requiredHeader : requiredHeaders) { if (!request.headers().contains(requiredHeader)) { return false; } } // Create the user event to be fired once the upgrade completes. final UpgradeEvent event = new UpgradeEvent(upgradeCodec.protocol(), request); // Prepare and send the upgrade response. Wait for this write to complete before upgrading, // since we need the old codec in-place to properly encode the response. final FullHttpResponse upgradeResponse = createUpgradeResponse(upgradeCodec); upgradeCodec.prepareUpgradeResponse(ctx, request, upgradeResponse); ctx.writeAndFlush(upgradeResponse).addListener(new ChannelFutureListener() { @Override public void operationComplete(ChannelFuture future) throws Exception { try { if (future.isSuccess()) { // Perform the upgrade to the new protocol. sourceCodec.upgradeFrom(ctx); upgradeCodec.upgradeTo(ctx, request, upgradeResponse); // Notify that the upgrade has occurred. Retain the event to offset // the release() in the finally block. ctx.fireUserEventTriggered(event.retain()); // Remove this handler from the pipeline. ctx.pipeline().remove(HttpServerUpgradeHandler.this); } else { future.channel().close(); } } finally { // Release the event if the upgrade event wasn't fired. event.release(); } } }); return true; } /** * Looks up the most desirable supported upgrade codec from the list of choices in the UPGRADE * header. If no suitable codec was found, returns {@code null}. */ private UpgradeCodec selectUpgradeCodec(CharSequence upgradeHeader) { Set<CharSequence> requestedProtocols = splitHeader(upgradeHeader); // Retain only the protocols that are in the protocol map. Maintain the original insertion // order into the protocolMap, so that the first one in the remaining set is the most // desirable protocol for the server. Set<String> supportedProtocols = new LinkedHashSet<String>(upgradeCodecMap.keySet()); supportedProtocols.retainAll(requestedProtocols); if (!supportedProtocols.isEmpty()) { String protocol = supportedProtocols.iterator().next().toUpperCase(Locale.US); return upgradeCodecMap.get(protocol); } return null; } /** * Creates the 101 Switching Protocols response message. */ private static FullHttpResponse createUpgradeResponse(UpgradeCodec upgradeCodec) { DefaultFullHttpResponse res = new DefaultFullHttpResponse(HTTP_1_1, SWITCHING_PROTOCOLS); res.headers().add(CONNECTION, UPGRADE); res.headers().add(UPGRADE, upgradeCodec.protocol()); res.headers().add(CONTENT_LENGTH, "0"); return res; } /** * Splits a comma-separated header value. The returned set is case-insensitive and contains each * part with whitespace removed. */ private static Set<CharSequence> splitHeader(CharSequence header) { StringBuilder builder = new StringBuilder(header.length()); Set<CharSequence> protocols = new TreeSet<CharSequence>(AsciiString.CHARSEQUENCE_CASE_INSENSITIVE_ORDER); for (int i = 0; i < header.length(); ++i) { char c = header.charAt(i); if (Character.isWhitespace(c)) { // Don't include any whitespace. continue; } if (c == ',') { // Add the string and reset the builder for the next protocol. protocols.add(builder.toString()); builder.setLength(0); } else { builder.append(c); } } // Add the last protocol if (builder.length() > 0) { protocols.add(builder.toString()); } return protocols; } }
/* * Copyright 2021 Red Hat, Inc. and/or its affiliates * and other contributors as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.keycloak.models.map.storage.tree; import org.keycloak.models.map.storage.tree.TreeNode.PathOrientation; import java.util.Date; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; import org.junit.Test; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.notNullValue; /** * * @author hmlnarik */ public class DefaultTreeNodeTest { private class Node extends DefaultTreeNode<Node> { public Node() { super(treeProperties); } public Node(String id) { super(treeProperties); setId(id); } public Node(Node parent, String id) { super(treeProperties); setId(id); setParent(parent); } @Override public String getLabel() { return this.getId() == null ? "Node:" + System.identityHashCode(this) : this.getId(); } } private static final String KEY_1 = "key1"; private static final String VALUE_1 = "value"; private static final String KEY_2 = "key2"; private static final Date VALUE_2 = new Date(); private static final String KEY_3 = "key3"; private static final Integer VALUE_3 = 12345; public Map<String, Object> treeProperties = new HashMap<>(); { treeProperties.put(KEY_1, VALUE_1); treeProperties.put(KEY_2, VALUE_2); } @Test public void testSingleNodeTree() { Node root = new Node(); root.setNodeProperty(KEY_1, VALUE_1); root.setEdgeProperty(KEY_2, VALUE_2); assertThat(root.getParent(), is(Optional.empty())); assertThat(root.getChildren(), empty()); assertNodeProperty(root, KEY_1, VALUE_1); assertNodeProperty(root, KEY_2, null); assertEdgeProperty(root, KEY_1, null); assertEdgeProperty(root, KEY_2, VALUE_2); assertTreeProperties(root); } @Test public void testSimpleTwoNodeTree() { Node root = new Node(); Node child = new Node(); root.setNodeProperty(KEY_1, VALUE_1); child.setParent(root); child.setId("my-id"); child.setEdgeProperty(KEY_2, VALUE_2); // check parent-child relationships assertThat(root.getParent(), is(Optional.empty())); assertThat(root.getChildren(), hasSize(1)); assertThat(child.getParent(), is(Optional.of(root))); assertThat(child.getChildren(), empty()); // check properties assertThat(root.getNodeProperties().keySet(), hasSize(1)); assertThat(root.getEdgeProperties().keySet(), empty()); assertThat(child.getNodeProperties().keySet(), empty()); assertThat(child.getEdgeProperties().keySet(), hasSize(1)); assertTreeProperties(root); assertTreeProperties(child); } @Test public void testSimpleTwoNodeTreeSwapped() { Node root = new Node(); Node child = new Node(); child.setParent(root); child.setId("my-id"); // Now swap the roles root.setParent(child); // check parent-child relationships assertThat(child.getParent(), is(Optional.empty())); assertThat(child.getChildren(), hasSize(1)); assertThat(root.getParent(), is(Optional.of(child))); assertThat(root.getChildren(), empty()); // check properties have not changed root.setNodeProperty(KEY_1, VALUE_1); child.setEdgeProperty(KEY_2, VALUE_2); assertThat(root.getNodeProperties().keySet(), hasSize(1)); assertThat(root.getEdgeProperties().keySet(), empty()); assertThat(child.getNodeProperties().keySet(), empty()); assertThat(child.getEdgeProperties().keySet(), hasSize(1)); assertTreeProperties(root); assertTreeProperties(child); } @Test public void testStructureLinearThreeNodeSwapped() { Node level1 = new Node(); Node level2 = new Node(); Node level3 = new Node(); level2.setParent(level1); level3.setParent(level2); // check parent-child relationships assertThat(level1.getParent(), is(Optional.empty())); assertThat(level1.getChildren(), containsInAnyOrder(level2)); assertThat(level2.getParent(), is(Optional.of(level1))); assertThat(level2.getChildren(), containsInAnyOrder(level3)); assertThat(level3.getParent(), is(Optional.of(level2))); assertThat(level3.getChildren(), empty()); // Swap nodes level1.setParent(level3); // check parent-child relationships assertThat(level3.getParent(), is(Optional.empty())); assertThat(level3.getChildren(), containsInAnyOrder(level1)); assertThat(level1.getParent(), is(Optional.of(level3))); assertThat(level1.getChildren(), containsInAnyOrder(level2)); assertThat(level2.getParent(), is(Optional.of(level1))); assertThat(level2.getChildren(), empty()); } @Test public void testStructureAThreeNodeSwapped() { Node level1 = new Node(); Node level21 = new Node(); Node level22 = new Node(); Node level23 = new Node(); level21.setParent(level1); level22.setParent(level1); level23.setParent(level1); // check parent-child relationships assertThat(level1.getParent(), is(Optional.empty())); assertThat(level1.getChildren(), containsInAnyOrder(level21, level22, level23)); assertThat(level21.getParent(), is(Optional.of(level1))); assertThat(level21.getChildren(), empty()); assertThat(level22.getParent(), is(Optional.of(level1))); assertThat(level22.getChildren(), empty()); assertThat(level23.getParent(), is(Optional.of(level1))); assertThat(level23.getChildren(), empty()); // Change parents level1.setParent(level22); // check parent-child relationships assertThat(level22.getParent(), is(Optional.empty())); assertThat(level22.getChildren(), containsInAnyOrder(level1)); assertThat(level1.getParent(), is(Optional.of(level22))); assertThat(level1.getChildren(), containsInAnyOrder(level21, level23)); assertThat(level21.getParent(), is(Optional.of(level1))); assertThat(level21.getChildren(), empty()); assertThat(level23.getParent(), is(Optional.of(level1))); assertThat(level23.getChildren(), empty()); // Change parents level21.setParent(level22); // check parent-child relationships assertThat(level22.getParent(), is(Optional.empty())); assertThat(level22.getChildren(), containsInAnyOrder(level1, level21)); assertThat(level1.getParent(), is(Optional.of(level22))); assertThat(level1.getChildren(), containsInAnyOrder(level23)); assertThat(level21.getParent(), is(Optional.of(level22))); assertThat(level21.getChildren(), empty()); assertThat(level23.getParent(), is(Optional.of(level1))); assertThat(level23.getChildren(), empty()); // Change parents level21.setParent(null); // check parent-child relationships assertThat(level22.getParent(), is(Optional.empty())); assertThat(level22.getChildren(), containsInAnyOrder(level1)); assertThat(level1.getParent(), is(Optional.of(level22))); assertThat(level1.getChildren(), containsInAnyOrder(level23)); assertThat(level21.getParent(), is(Optional.empty())); assertThat(level21.getChildren(), empty()); assertThat(level23.getParent(), is(Optional.of(level1))); assertThat(level23.getChildren(), empty()); } @Test public void testChangeId() { Node root = new Node(); Node child1 = new Node(); child1.setParent(root); child1.setId("my-id1"); Node child2 = new Node(); child2.setParent(root); child2.setId("my-id2"); // check parent-child relationships assertThat(root.getChild("my-id1"), is(Optional.of(child1))); assertThat(root.getChild("my-id2"), is(Optional.of(child2))); child1.setId("my-id3"); assertThat(root.getChild("my-id1"), is(Optional.empty())); assertThat(root.getChild("my-id3"), is(Optional.of(child1))); } @Test public void testRemoveChildDirectly() { Node root = new Node(); Node child1 = new Node(); child1.setParent(root); child1.setId("my-id1"); Node child2 = new Node(); child2.setParent(root); child2.setId("my-id2"); // check parent-child relationships assertThat(root.getChild("my-id1"), is(Optional.of(child1))); assertThat(root.getChild("my-id2"), is(Optional.of(child2))); assertThat(root.removeChild(child1), is(Optional.of(child1))); assertThat(root.getChildren(), containsInAnyOrder(child2)); assertThat(child1.getParent(), is(Optional.empty())); assertThat(root.removeChild(child1), is(Optional.empty())); // try to remove it once again assertThat(root.getChildren(), containsInAnyOrder(child2)); assertThat(child1.getParent(), is(Optional.empty())); } @Test public void testRemoveChildViaPredicate() { Node root = new Node(); Node child1 = new Node(); child1.setParent(root); child1.setId("my-id1"); Node child2 = new Node(); child2.setParent(root); child2.setId("my-id2"); Node child3 = new Node(); child3.setParent(root); child3.setId("my-id3"); // check removals assertThat(root.removeChild(node -> "my-id1".equals(node.getId())), is(1)); assertThat(root.getChildren(), containsInAnyOrder(child2, child3)); assertThat(child1.getParent(), is(Optional.empty())); assertThat(child2.getParent(), is(Optional.of(root))); assertThat(child3.getParent(), is(Optional.of(root))); assertThat(root.removeChild(node -> true), is(2)); assertThat(root.getChildren(), empty()); assertThat(child1.getParent(), is(Optional.empty())); assertThat(child2.getParent(), is(Optional.empty())); assertThat(child3.getParent(), is(Optional.empty())); } @Test public void testRemoveChild() { Node root = new Node(); Node child1 = new Node(); child1.setParent(root); child1.setId("my-id1"); Node child2 = new Node(); child2.setParent(root); child2.setId("my-id2"); // check parent-child relationships assertThat(root.getChild("my-id1"), is(Optional.of(child1))); assertThat(root.getChild("my-id2"), is(Optional.of(child2))); root.removeChild(child1); assertThat(root.getChildren(), containsInAnyOrder(child2)); assertThat(child1.getParent(), is(Optional.empty())); } @Test public void testDfs() { Node root = new Node("1"); Node child11 = new Node(root, "1.1"); Node child12 = new Node(root, "1.2"); Node child111 = new Node(child11, "1.1.1"); Node child112 = new Node(child11, "1.1.2"); Node child121 = new Node(child12, "1.2.1"); Node child122 = new Node(child12, "1.2.2"); Node child123 = new Node(child12, "1.2.3"); Node child1121 = new Node(child112, "1.1.2.1"); List<Node> res = new LinkedList<>(); assertThat(root.findFirstDfs(n -> { res.add(n); return false; }), is(Optional.empty())); assertThat(res, contains(root, child11, child111, child112, child1121, child12, child121, child122, child123)); res.clear(); assertThat(root.findFirstDfs(n -> { res.add(n); return n == child12; }), is(Optional.of(child12))); assertThat(res, contains(root, child11, child111, child112, child1121, child12)); } @Test public void testDfsBottommost() { Node root = new Node("1"); Node child11 = new Node(root, "1.1"); Node child12 = new Node(root, "1.2"); Node child13 = new Node(root, "1.3"); Node child111 = new Node(child11, "1.1.1"); Node child112 = new Node(child11, "1.1.2"); Node child121 = new Node(child12, "1.2.1"); Node child122 = new Node(child12, "1.2.2"); Node child123 = new Node(child12, "1.2.3"); Node child1121 = new Node(child112, "1.1.2.1"); Node child131 = new Node(child13, "1.3.1"); Node child132 = new Node(child13, "1.3.2"); List<Node> res = new LinkedList<>(); assertThat(root.findFirstBottommostDfs(n -> { res.add(n); return false; }), is(Optional.empty())); assertThat(res, contains(root, child11, child111, child112, child1121, child12, child121, child122, child123, child13, child131, child132)); res.clear(); assertThat(root.findFirstBottommostDfs(n -> { res.add(n); return n == child12; }), is(Optional.of(child12))); assertThat(res, contains(root, child11, child111, child112, child1121, child12, child121, child122, child123)); res.clear(); assertThat(root.findFirstBottommostDfs(n -> { res.add(n); return n.getId().startsWith("1.1.2"); }), is(Optional.of(child1121))); assertThat(res, contains(root, child11, child111, child112, child1121)); } @Test public void testBfs() { Node root = new Node("1"); Node child11 = new Node(root, "1.1"); Node child12 = new Node(root, "1.2"); Node child111 = new Node(child11, "1.1.1"); Node child112 = new Node(child11, "1.1.2"); Node child121 = new Node(child12, "1.2.1"); Node child122 = new Node(child12, "1.2.2"); Node child123 = new Node(child12, "1.2.3"); Node child1121 = new Node(child112, "1.1.2.1"); List<Node> res = new LinkedList<>(); assertThat(root.findFirstBfs(n -> { res.add(n); return false; }), is(Optional.empty())); assertThat(res, contains(root, child11, child12, child111, child112, child121, child122, child123, child1121)); res.clear(); assertThat(root.findFirstBfs(n -> { res.add(n); return n == child12; }), is(Optional.of(child12))); assertThat(res, contains(root, child11, child12)); } @Test public void testWalkBfs() { Node root = new Node("1"); Node child11 = new Node(root, "1.1"); Node child12 = new Node(root, "1.2"); Node child111 = new Node(child11, "1.1.1"); Node child112 = new Node(child11, "1.1.2"); Node child121 = new Node(child12, "1.2.1"); Node child122 = new Node(child12, "1.2.2"); Node child123 = new Node(child12, "1.2.3"); Node child1121 = new Node(child112, "1.1.2.1"); List<Node> res = new LinkedList<>(); root.walkBfs(res::add); assertThat(res, contains(root, child11, child12, child111, child112, child121, child122, child123, child1121)); } @Test public void testWalkDfs() { Node root = new Node("1"); Node child11 = new Node(root, "1.1"); Node child12 = new Node(root, "1.2"); Node child111 = new Node(child11, "1.1.1"); Node child112 = new Node(child11, "1.1.2"); Node child121 = new Node(child12, "1.2.1"); Node child122 = new Node(child12, "1.2.2"); Node child123 = new Node(child12, "1.2.3"); Node child1121 = new Node(child112, "1.1.2.1"); List<Node> uponEntry = new LinkedList<>(); List<Node> afterChildren = new LinkedList<>(); root.walkDfs(uponEntry::add, afterChildren::add); assertThat(uponEntry, contains(root, child11, child111, child112, child1121, child12, child121, child122, child123)); assertThat(afterChildren, contains(child111, child1121, child112, child11, child121, child122, child123, child12, root)); } @Test public void testForEachParent() { Node root = new Node("1"); Node child11 = new Node(root, "1.1"); Node child12 = new Node(root, "1.2"); Node child111 = new Node(child11, "1.1.1"); Node child112 = new Node(child11, "1.1.2"); Node child121 = new Node(child12, "1.2.1"); Node child122 = new Node(child12, "1.2.2"); Node child123 = new Node(child12, "1.2.3"); Node child1121 = new Node(child112, "1.1.2.1"); List<Node> res = new LinkedList<>(); res.clear(); root.forEachParent(res::add); assertThat(res, empty()); res.clear(); child1121.forEachParent(res::add); assertThat(res, contains(child112, child11, root)); res.clear(); child123.forEachParent(res::add); assertThat(res, contains(child12, root)); } @Test public void testPathToRoot() { Node root = new Node("1"); Node child11 = new Node(root, "1.1"); Node child12 = new Node(root, "1.2"); Node child111 = new Node(child11, "1.1.1"); Node child112 = new Node(child11, "1.1.2"); Node child121 = new Node(child12, "1.2.1"); Node child122 = new Node(child12, "1.2.2"); Node child123 = new Node(child12, "1.2.3"); Node child1121 = new Node(child112, "1.1.2.1"); assertThat(child1121.getPathToRoot(PathOrientation.TOP_FIRST), contains(root, child11, child112, child1121)); assertThat(child123.getPathToRoot(PathOrientation.TOP_FIRST), contains(root, child12, child123)); assertThat(root.getPathToRoot(PathOrientation.TOP_FIRST), contains(root)); assertThat(child1121.getPathToRoot(PathOrientation.BOTTOM_FIRST), contains(child1121, child112, child11, root)); assertThat(child123.getPathToRoot(PathOrientation.BOTTOM_FIRST), contains(child123, child12, root)); assertThat(root.getPathToRoot(PathOrientation.BOTTOM_FIRST), contains(root)); } @Test public void testToStringStackOverflow() { Node n = new Node("1"); n.setNodeProperty("prop", n); assertThat(n.toString().length(), lessThan(255)); } private void assertTreeProperties(Node node) { assertThat(node.getTreeProperty(KEY_1, String.class), notNullValue()); assertThat(node.getTreeProperty(KEY_1, Date.class), notNullValue()); assertThat(node.getTreeProperty(KEY_1, String.class), is(Optional.of(VALUE_1))); assertThat(node.getTreeProperty(KEY_1, Date.class), is(Optional.empty())); assertThat(node.getTreeProperty(KEY_2, String.class), is(Optional.empty())); assertThat(node.getTreeProperty(KEY_2, Date.class), is(Optional.of(VALUE_2))); assertThat(node.getTreeProperties().size(), is(2)); treeProperties.put(KEY_3, VALUE_3); assertThat(node.getTreeProperties().size(), is(3)); assertThat(node.getTreeProperty(KEY_3, String.class), is(Optional.empty())); assertThat(node.getTreeProperty(KEY_3, Integer.class), is(Optional.of(VALUE_3))); treeProperties.remove(KEY_3); assertThat(node.getTreeProperties().size(), is(2)); assertThat(node.getTreeProperties(), not(hasKey(KEY_3))); } private void assertNodeProperty(Node node, String key, Object value) { if (value != null) { assertThat(node.getNodeProperty(key, value.getClass()), is(Optional.of(value))); assertThat(node.getNodeProperty(key, Object.class), is(Optional.of(value))); assertThat(node.getNodeProperty(key, Throwable.class), is(Optional.empty())); } else { assertThat(node.getNodeProperty(key, Object.class), is(Optional.empty())); } } private void assertEdgeProperty(Node node, String key, Object value) { if (value != null) { assertThat(node.getEdgeProperty(key, value.getClass()), is(Optional.of(value))); assertThat(node.getEdgeProperty(key, Object.class), is(Optional.of(value))); assertThat(node.getEdgeProperty(key, Throwable.class), is(Optional.empty())); } else { assertThat(node.getEdgeProperty(key, Object.class), is(Optional.empty())); } } }
package at.ac.tuwien.oz.definitions.ozclass; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.antlr.v4.runtime.tree.ParseTree; import org.antlr.v4.runtime.tree.ParseTreeProperty; import org.stringtemplate.v4.ST; import at.ac.tuwien.oz.datatypes.Declarations; import at.ac.tuwien.oz.datatypes.ExpressionType; import at.ac.tuwien.oz.datatypes.Ident; import at.ac.tuwien.oz.datatypes.Idents; import at.ac.tuwien.oz.datatypes.Variable; import at.ac.tuwien.oz.datatypes.axioms.AxiomReferences; import at.ac.tuwien.oz.definitions.BaseDefinition; import at.ac.tuwien.oz.definitions.DefinitionTable; import at.ac.tuwien.oz.definitions.IDefinition; import at.ac.tuwien.oz.definitions.IScope; import at.ac.tuwien.oz.definitions.ObjectZDefinition; import at.ac.tuwien.oz.definitions.local.Axiomatic; import at.ac.tuwien.oz.definitions.local.LocalDefinition; import at.ac.tuwien.oz.definitions.local.LocalDefinitions; import at.ac.tuwien.oz.definitions.operation.Operation; import at.ac.tuwien.oz.definitions.operation.OperationExpression; import at.ac.tuwien.oz.definitions.operation.interfaces.IComposableOperation; import at.ac.tuwien.oz.translator.templates.PerfectTemplateProvider; public class ObjectZClass extends BaseDefinition implements IScope{ private ObjectZDefinition enclosingScope; private DefinitionTable<IDefinition> classMembers; private List<Ident> formalParameters; private List<Ident> visibleFeatures; private List<ClassDescriptor> inheritedClasses; private LocalDefinitions localDefinitions; private Axiomatic localAxiomatic; private InitialState initialState; private StateSchema stateSchema; private Operations operations; private boolean allFeaturesVisible; private ST classTemplate; public ObjectZClass(String name, ObjectZDefinition globalScope, ParseTree ctx){ super(name, ctx); this.enclosingScope = globalScope; this.classMembers = new DefinitionTable<IDefinition>(); this.formalParameters = new ArrayList<Ident>(); this.visibleFeatures = new ArrayList<Ident>(); this.inheritedClasses = new ArrayList<ClassDescriptor>(); this.localDefinitions = new LocalDefinitions(); this.operations = new Operations(); this.localAxiomatic = new Axiomatic(); } @Override public IScope getEnclosingScope(){ return this.enclosingScope; } private void define(IDefinition definition) { this.classMembers.add(definition.getId(), definition); } @Override public IDefinition resolve(Ident id) { if (id == null){ return null; } return this.classMembers.get(id); } public void addFormalParameter(Ident i){ this.formalParameters.add(i); } public void addVisibleFeature(Ident i){ this.visibleFeatures.add(i); } public void addInheritedClass(ClassDescriptor descriptor){ this.inheritedClasses.add(descriptor); } public void addLocalDefinition(LocalDefinition localDefinition){ this.define(localDefinition); this.localDefinitions.add(localDefinition); } public void addStateSchema(StateSchema stateSchema) { this.stateSchema = stateSchema; for (Variable primary: stateSchema.getPrimaryDeclarations()){ this.define(primary); } for (Variable secondary: stateSchema.getSecondaryDeclarations()){ this.define(secondary); } } public void addInitialState(InitialState init){ this.initialState = init; } public void addOperation(Operation op){ this.define(op); this.operations.add(op); } public Operation resolveOperation(Ident id) { Operation operation = operations.resolve(id); return operation; } public Variable resolveVariable(Ident id){ return this.stateSchema.resolve(id); } public Variable resolveConstant(Ident id){ return this.localAxiomatic.resolve(id); } public boolean isStateVariable(Ident usedId) { if (usedId == null || this.classMembers.get(usedId) == null){ return false; } for(Variable primary: stateSchema.getPrimaryDeclarations()){ if (usedId.equals(primary.getId())){ return true; } } for (Variable secondary: stateSchema.getSecondaryDeclarations()){ if (usedId.equals(secondary.getId())){ return true; } } // TODO look into super classes return false; } public void setAllFeaturesVisible() { this.allFeaturesVisible = true; } public List<Ident> getVisibleFeatures() { if (allFeaturesVisible && visibleFeatures.isEmpty()){ calculateVisibleFeatures(); } return visibleFeatures; } public boolean isVisible(Ident id){ return this.getVisibleFeatures().contains(id); } private void calculateVisibleFeatures() { if (initialState != null){ visibleFeatures.add(new Ident("INIT")); } // add all class members; for (Ident i: this.classMembers.identifiers()){ visibleFeatures.add(i); } } public Declarations getPrimaryDeclarations() { if (this.stateSchema == null){ return Declarations.empty(); } return this.stateSchema.getPrimaryDeclarations(); } public Declarations getSecondaryDeclarations() { if (this.stateSchema == null){ return Declarations.empty(); } return this.stateSchema.getSecondaryDeclarations(); } public Declarations getAxiomaticDeclarations() { return this.localAxiomatic.getDeclarations(); } public void addAxiomaticDeclarations(Declarations axiomaticDeclarations) { this.localAxiomatic.addAxiomaticDeclarations(axiomaticDeclarations); for (Variable axiomVar: axiomaticDeclarations){ this.define(axiomVar); } } public AxiomReferences getAxiomaticReferences() { return this.localAxiomatic.getAxiomReferences(); } public AxiomReferences getAxiomaticReferences(Variable axiomVar) { return this.localAxiomatic.getAxiomReferences(axiomVar); } public void addAxiomaticReferences(AxiomReferences axiomaticReferences) { this.localAxiomatic.addAxiomaticReferences(axiomaticReferences); } public AxiomReferences getInitialAxiomReferences() { if (initialState == null){ return new AxiomReferences(); } return this.initialState.getAxiomReferences(); } public AxiomReferences getStateAxiomReferences() { return this.stateSchema.getPrimaryAxiomReferences(); } public AxiomReferences getStateAxiomReferences(Variable stateVar) { AxiomReferences primaryReferences = this.stateSchema.getPrimaryAxiomReferences(stateVar); if (primaryReferences.isEmpty()){ return this.stateSchema.getSecondaryAxiomReferences(stateVar); } else { return primaryReferences; } } public Operations getOperations() { return this.operations; } public Operations getSimpleOperations() { Operations simpleOperations = new Operations(); Collection<Operation> simpleOperationCollection = this.operations.asList().stream() .filter(op -> op.isSimpleOperation()) .collect(Collectors.toList()); simpleOperations.addAll(simpleOperationCollection); return simpleOperations; } public List<IComposableOperation> getOperationExpressions() { List<IComposableOperation> operationExpressions = new ArrayList<IComposableOperation>(); Collection<IComposableOperation> operationExpressionCollection = this.operations.asList().stream() .filter(op -> op.isOperationExpression()) .map(op -> (OperationExpression)op) .collect(Collectors.toList()); operationExpressions.addAll(operationExpressionCollection); return operationExpressions; } public boolean isSubTypeOf(Ident id) { Idents inheritedClassIds = new Idents(); for (ClassDescriptor des: this.inheritedClasses){ inheritedClassIds.add(des.getId()); if (des.getId().equals(id)){ return true; } } for (Ident inheritedClassId: inheritedClassIds){ ObjectZClass superClass = enclosingScope.resolveClass(inheritedClassId); if (superClass.isSubTypeOf(id)){ return true; } } return false; } @Override public ExpressionType getExpressionType() { // TODO think about it if we should include formal parameters here or even generic ones?? -> leave it for now return ExpressionType.getUserDefinedType(ctx, this.name); } public ObjectZDefinition getDefinition(){ return this.enclosingScope; } public ST getTemplate(){ return this.classTemplate; } public void translate(ParseTreeProperty<ST> templateTree) { for (LocalDefinition localDef: this.localDefinitions){ localDef.translate(templateTree); } this.localAxiomatic.translate(templateTree); if (this.initialState != null){ this.initialState.translate(templateTree); } if (this.stateSchema != null){ this.stateSchema.translate(templateTree); } this.inheritedClasses.stream().forEach(c -> c.translate(templateTree)); this.classTemplate = PerfectTemplateProvider.getInstance().getClassDef(this); } public List<Ident> getFormalParameters() { return this.formalParameters; } public List<ClassDescriptor> getInheritedClasses(){ return this.inheritedClasses; } public Map<ClassDescriptor, Declarations> getInheritedFields(){ Map<ClassDescriptor, Declarations> inheritedFields = new HashMap<>(); if (inheritedClasses == null || inheritedClasses.isEmpty()){ return inheritedFields; } for (ClassDescriptor inheritedClass: inheritedClasses){ ObjectZClass superClass = this.enclosingScope.resolveClass(inheritedClass.getId()); Declarations inheritedMembers = new Declarations(); inheritedMembers.addAll(superClass.getPrimaryDeclarations()); inheritedMembers.addAll(superClass.getAxiomaticDeclarations()); inheritedFields.put(inheritedClass, inheritedMembers); } return inheritedFields; } public LocalDefinitions getLocalDefinitions() { return this.localDefinitions; } public AxiomReferences getInvariants() { return this.stateSchema.getInvariants(); } }
// Copyright 2021 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.signin; import static org.mockito.Mockito.when; import android.content.res.Configuration; import android.support.test.runner.lifecycle.Stage; import androidx.test.filters.MediumTest; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.junit.MockitoJUnit; import org.mockito.junit.MockitoRule; import org.chromium.base.test.params.ParameterAnnotations; import org.chromium.base.test.params.ParameterProvider; import org.chromium.base.test.params.ParameterSet; import org.chromium.base.test.params.ParameterizedRunner; import org.chromium.base.test.util.ApplicationTestUtils; import org.chromium.base.test.util.CommandLineFlags; import org.chromium.base.test.util.CriteriaHelper; import org.chromium.base.test.util.Feature; import org.chromium.chrome.R; import org.chromium.chrome.browser.firstrun.FirstRunPageDelegate; import org.chromium.chrome.browser.firstrun.PolicyLoadListener; import org.chromium.chrome.browser.flags.ChromeSwitches; import org.chromium.chrome.browser.night_mode.ChromeNightModeTestUtils; import org.chromium.chrome.browser.privacy.settings.PrivacyPreferencesManagerImpl; import org.chromium.chrome.browser.profiles.Profile; import org.chromium.chrome.browser.signin.SigninFirstRunFragmentTest.CustomSigninFirstRunFragment; import org.chromium.chrome.browser.signin.services.FREMobileIdentityConsistencyFieldTrial; import org.chromium.chrome.browser.signin.services.FREMobileIdentityConsistencyFieldTrial.VariationsGroup; import org.chromium.chrome.browser.signin.services.IdentityServicesProvider; import org.chromium.chrome.browser.signin.services.SigninManager; import org.chromium.chrome.test.ChromeJUnit4RunnerDelegate; import org.chromium.chrome.test.ChromeTabbedActivityTestRule; import org.chromium.chrome.test.util.ActivityTestUtils; import org.chromium.chrome.test.util.browser.signin.AccountManagerTestRule; import org.chromium.components.externalauth.ExternalAuthUtils; import org.chromium.components.signin.identitymanager.IdentityManager; import org.chromium.content_public.browser.test.util.TestThreadUtils; import org.chromium.ui.test.util.RenderTestRule; import java.io.IOException; import java.util.Arrays; import java.util.List; /** Render tests for the class {@link SigninFirstRunFragment}. */ @RunWith(ParameterizedRunner.class) @ParameterAnnotations.UseRunnerDelegate(ChromeJUnit4RunnerDelegate.class) @CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE}) public class SigninFirstRunFragmentRenderTest { /** Parameter provider for night mode state and device orientation. */ public static class NightModeAndOrientationParameterProvider implements ParameterProvider { private static List<ParameterSet> sParams = Arrays.asList( new ParameterSet() .value(/*nightModeEnabled=*/false, Configuration.ORIENTATION_PORTRAIT) .name("NightModeDisabled_Portrait"), new ParameterSet() .value(/*nightModeEnabled=*/false, Configuration.ORIENTATION_LANDSCAPE) .name("NightModeDisabled_Landscape"), new ParameterSet() .value(/*nightModeEnabled=*/true, Configuration.ORIENTATION_PORTRAIT) .name("NightModeEnabled_Portrait"), new ParameterSet() .value(/*nightModeEnabled=*/true, Configuration.ORIENTATION_LANDSCAPE) .name("NightModeEnabled_Landscape")); @Override public Iterable<ParameterSet> getParameters() { return sParams; } } private static final String TEST_EMAIL1 = "test.account1@gmail.com"; private static final String CHILD_ACCOUNT_NAME = AccountManagerTestRule.generateChildEmail("account@gmail.com"); @Rule public final MockitoRule mMockitoRule = MockitoJUnit.rule(); @Rule public final RenderTestRule mRenderTestRule = RenderTestRule.Builder.withPublicCorpus().build(); @Rule public final AccountManagerTestRule mAccountManagerTestRule = new AccountManagerTestRule(); @Rule public final ChromeTabbedActivityTestRule mChromeActivityTestRule = new ChromeTabbedActivityTestRule(); @Mock private ExternalAuthUtils mExternalAuthUtilsMock; @Mock private FirstRunPageDelegate mFirstRunPageDelegateMock; @Mock private PolicyLoadListener mPolicyLoadListenerMock; @Mock private SigninManager mSigninManagerMock; @Mock private SigninChecker mSigninCheckerMock; @Mock private IdentityManager mIdentityManagerMock; @Mock private IdentityServicesProvider mIdentityServicesProviderMock; @Mock private PrivacyPreferencesManagerImpl mPrivacyPreferencesManagerMock; private CustomSigninFirstRunFragment mFragment; @ParameterAnnotations.UseMethodParameterBefore(NightModeAndOrientationParameterProvider.class) public void setupNightModeAndDeviceOrientation(boolean nightModeEnabled, int orientation) { TestThreadUtils.runOnUiThreadBlocking(() -> { ChromeNightModeTestUtils.setUpNightModeForChromeActivity(nightModeEnabled); }); mRenderTestRule.setNightModeEnabled(nightModeEnabled); mRenderTestRule.setVariantPrefix( orientation == Configuration.ORIENTATION_PORTRAIT ? "Portrait" : "Landscape"); } @BeforeClass public static void setUpBeforeActivityLaunched() { ChromeNightModeTestUtils.setUpNightModeBeforeChromeActivityLaunched(); } @Before public void setUp() { when(mExternalAuthUtilsMock.canUseGooglePlayServices()).thenReturn(true); ExternalAuthUtils.setInstanceForTesting(mExternalAuthUtilsMock); SigninCheckerProvider.setForTests(mSigninCheckerMock); FREMobileIdentityConsistencyFieldTrial.setFirstRunVariationsTrialGroupForTesting( VariationsGroup.DEFAULT); when(mPolicyLoadListenerMock.get()).thenReturn(false); when(mFirstRunPageDelegateMock.getPolicyLoadListener()).thenReturn(mPolicyLoadListenerMock); mChromeActivityTestRule.startMainActivityOnBlankPage(); mFragment = new CustomSigninFirstRunFragment(); TestThreadUtils.runOnUiThreadBlocking(() -> { mFragment.onNativeInitialized(); }); mFragment.setPageDelegate(mFirstRunPageDelegateMock); } @AfterClass public static void tearDownAfterActivityDestroyed() { ChromeNightModeTestUtils.tearDownNightModeAfterChromeActivityDestroyed(); } @Test @MediumTest @Feature("RenderTest") public void testFragmentRotationToLandscapeWithAccount() throws IOException { mAccountManagerTestRule.addAccount(TEST_EMAIL1); launchActivityWithFragment(Configuration.ORIENTATION_PORTRAIT); ActivityTestUtils.rotateActivityToOrientation( mChromeActivityTestRule.getActivity(), Configuration.ORIENTATION_LANDSCAPE); CriteriaHelper.pollUiThread(() -> { return mFragment.getView().findViewById(R.id.account_text_secondary).isShown(); }); mRenderTestRule.render( mFragment.getView(), "signin_first_run_fragment_with_account_landscape"); } @Test @MediumTest @Feature("RenderTest") public void testFragmentRotationToPortraitWithAccount() throws IOException { mAccountManagerTestRule.addAccount(TEST_EMAIL1); launchActivityWithFragment(Configuration.ORIENTATION_LANDSCAPE); ActivityTestUtils.rotateActivityToOrientation( mChromeActivityTestRule.getActivity(), Configuration.ORIENTATION_PORTRAIT); CriteriaHelper.pollUiThread(() -> { return mFragment.getView().findViewById(R.id.account_text_secondary).isShown(); }); mRenderTestRule.render( mFragment.getView(), "signin_first_run_fragment_with_account_portrait"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWithAccount(boolean nightModeEnabled, int orientation) throws IOException { mAccountManagerTestRule.addAccount(TEST_EMAIL1); launchActivityWithFragment(orientation); CriteriaHelper.pollUiThread(() -> { return mFragment.getView().findViewById(R.id.account_text_secondary).isShown(); }); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_with_account"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWithAccountOnManagedDevice(boolean nightModeEnabled, int orientation) throws IOException { when(mPolicyLoadListenerMock.get()).thenReturn(true); mAccountManagerTestRule.addAccount(TEST_EMAIL1); launchActivityWithFragment(orientation); CriteriaHelper.pollUiThread(() -> { return mFragment.getView().findViewById(R.id.account_text_secondary).isShown(); }); mRenderTestRule.render( mFragment.getView(), "signin_first_run_fragment_with_account_managed"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWithAccountWhenSigninIsDisabledByPolicy( boolean nightModeEnabled, int orientation) throws IOException { IdentityServicesProvider.setInstanceForTests(mIdentityServicesProviderMock); TestThreadUtils.runOnUiThreadBlocking(() -> { when(IdentityServicesProvider.get().getSigninManager( Profile.getLastUsedRegularProfile())) .thenReturn(mSigninManagerMock); when(IdentityServicesProvider.get().getIdentityManager( Profile.getLastUsedRegularProfile())) .thenReturn(mIdentityManagerMock); }); when(mSigninManagerMock.isSigninDisabledByPolicy()).thenReturn(true); when(mPolicyLoadListenerMock.get()).thenReturn(true); mAccountManagerTestRule.addAccount(TEST_EMAIL1); launchActivityWithFragment(orientation); CriteriaHelper.pollUiThread(() -> { return !mFragment.getView().findViewById(R.id.signin_fre_selected_account).isShown(); }); mRenderTestRule.render( mFragment.getView(), "signin_first_run_fragment_when_signin_disabled_by_policy"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWithoutAccount(boolean nightModeEnabled, int orientation) throws IOException { launchActivityWithFragment(orientation); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_without_account"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWithoutAccountOnManagedDevice(boolean nightModeEnabled, int orientation) throws IOException { when(mPolicyLoadListenerMock.get()).thenReturn(true); launchActivityWithFragment(orientation); mRenderTestRule.render( mFragment.getView(), "signin_first_run_fragment_without_account_managed"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWithChildAccount(boolean nightModeEnabled, int orientation) throws IOException { mAccountManagerTestRule.addAccount(CHILD_ACCOUNT_NAME); launchActivityWithFragment(orientation); CriteriaHelper.pollUiThread(() -> { return mFragment.getView().findViewById(R.id.account_text_secondary).isShown(); }); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_with_child_account"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWhenCannotUseGooglePlayService( boolean nightModeEnabled, int orientation) throws IOException { when(mExternalAuthUtilsMock.canUseGooglePlayServices()).thenReturn(false); TestThreadUtils.runOnUiThreadBlocking(() -> { mFragment.onNativeInitialized(); }); launchActivityWithFragment(orientation); mRenderTestRule.render( mFragment.getView(), "signin_first_run_fragment_signin_not_supported"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWhenMetricsReportingIsDisabledByPolicy( boolean nightModeEnabled, int orientation) throws IOException { when(mPolicyLoadListenerMock.get()).thenReturn(true); when(mPrivacyPreferencesManagerMock.isUsageAndCrashReportingPermittedByPolicy()) .thenReturn(false); PrivacyPreferencesManagerImpl.setInstanceForTesting(mPrivacyPreferencesManagerMock); launchActivityWithFragment(orientation); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_when_metrics_reporting_is_disabled_by_policy"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWhenMetricsReportingIsDisabledByPolicyWithAccount( boolean nightModeEnabled, int orientation) throws IOException { when(mPolicyLoadListenerMock.get()).thenReturn(true); when(mPrivacyPreferencesManagerMock.isUsageAndCrashReportingPermittedByPolicy()) .thenReturn(false); PrivacyPreferencesManagerImpl.setInstanceForTesting(mPrivacyPreferencesManagerMock); mAccountManagerTestRule.addAccount(TEST_EMAIL1); launchActivityWithFragment(orientation); CriteriaHelper.pollUiThread(() -> { return mFragment.getView().findViewById(R.id.account_text_secondary).isShown(); }); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_when_metrics_reporting_is_disabled_by_policy_with_account"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragmentWhenMetricsReportingIsDisabledByPolicyWithChildAccount( boolean nightModeEnabled, int orientation) throws IOException { when(mPolicyLoadListenerMock.get()).thenReturn(true); when(mPrivacyPreferencesManagerMock.isUsageAndCrashReportingPermittedByPolicy()) .thenReturn(false); PrivacyPreferencesManagerImpl.setInstanceForTesting(mPrivacyPreferencesManagerMock); mAccountManagerTestRule.addAccount(CHILD_ACCOUNT_NAME); launchActivityWithFragment(orientation); CriteriaHelper.pollUiThread(() -> { return mFragment.getView().findViewById(R.id.account_text_secondary).isShown(); }); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_when_metrics_reporting_is_disabled_by_policy_with_child_account"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragment_WelcomeToChrome(boolean nightModeEnabled, int orientation) throws IOException { FREMobileIdentityConsistencyFieldTrial.setFirstRunVariationsTrialGroupForTesting( VariationsGroup.WELCOME_TO_CHROME); launchActivityWithFragment(orientation); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_welcome_to_chrome"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragment_WelcomeToChrome_MostOutOfChrome( boolean nightModeEnabled, int orientation) throws IOException { FREMobileIdentityConsistencyFieldTrial.setFirstRunVariationsTrialGroupForTesting( VariationsGroup.WELCOME_TO_CHROME_MOST_OUT_OF_CHROME); launchActivityWithFragment(orientation); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_welcome_to_chrome_most_out_of_chrome"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragment_WelcomeToChrome_StrongestSecurity( boolean nightModeEnabled, int orientation) throws IOException { FREMobileIdentityConsistencyFieldTrial.setFirstRunVariationsTrialGroupForTesting( VariationsGroup.WELCOME_TO_CHROME_STRONGEST_SECURITY); launchActivityWithFragment(orientation); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_welcome_to_chrome_strongest_security"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragment_WelcomeToChrome_EasierAcrossDevices( boolean nightModeEnabled, int orientation) throws IOException { FREMobileIdentityConsistencyFieldTrial.setFirstRunVariationsTrialGroupForTesting( VariationsGroup.WELCOME_TO_CHROME_EASIER_ACROSS_DEVICES); launchActivityWithFragment(orientation); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_welcome_to_chrome_easier_across_devices"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragment_MostOutOfChrome(boolean nightModeEnabled, int orientation) throws IOException { FREMobileIdentityConsistencyFieldTrial.setFirstRunVariationsTrialGroupForTesting( VariationsGroup.MOST_OUT_OF_CHROME); launchActivityWithFragment(orientation); mRenderTestRule.render(mFragment.getView(), "signin_first_run_fragment_most_out_chrome"); } @Test @MediumTest @Feature("RenderTest") @ParameterAnnotations.UseMethodParameter(NightModeAndOrientationParameterProvider.class) public void testFragment_MakeChromeYourOwn(boolean nightModeEnabled, int orientation) throws IOException { FREMobileIdentityConsistencyFieldTrial.setFirstRunVariationsTrialGroupForTesting( VariationsGroup.MAKE_CHROME_YOUR_OWN); launchActivityWithFragment(orientation); mRenderTestRule.render( mFragment.getView(), "signin_first_run_fragment_make_chrome_your_own"); } private void launchActivityWithFragment(int orientation) { ActivityTestUtils.rotateActivityToOrientation( mChromeActivityTestRule.getActivity(), orientation); TestThreadUtils.runOnUiThreadBlocking(() -> { mChromeActivityTestRule.getActivity() .getSupportFragmentManager() .beginTransaction() .add(android.R.id.content, mFragment) .commit(); }); ApplicationTestUtils.waitForActivityState( mChromeActivityTestRule.getActivity(), Stage.RESUMED); } }
package com.serenegiant.encoder; /* * AudioVideoRecordingSample * Sample project to cature audio and video from internal mic/camera and save as MPEG4 file. * * Copyright (c) 2014-2015 saki t_saki@serenegiant.com * * File name: MediaVideoEncoder.java * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * All files in the folder are under this Apache License, Version 2.0. */ import java.io.IOException; import android.media.MediaCodec; import android.media.MediaCodecInfo; import android.media.MediaCodecList; import android.media.MediaFormat; import android.opengl.EGLContext; import android.util.Log; import android.view.Surface; import com.serenegiant.glutils.RenderHandler; public class MediaVideoEncoder extends MediaEncoder { private static final boolean DEBUG = false; // TODO set false on release private static final String TAG = "MediaVideoEncoder"; private static final String MIME_TYPE = "video/avc"; // parameters for recording private static final int FRAME_RATE = 25; private static final float BPP = 0.25f; private final int mWidth; private final int mHeight; private RenderHandler mRenderHandler; private Surface mSurface; public MediaVideoEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener, final int width, final int height) { super(muxer, listener); if (DEBUG) Log.i(TAG, "MediaVideoEncoder: "); mWidth = width; mHeight = height; mRenderHandler = RenderHandler.createHandler(TAG); } public boolean frameAvailableSoon(final float[] tex_matrix) { boolean result; if (result = super.frameAvailableSoon()) mRenderHandler.draw(tex_matrix); return result; } @Override public boolean frameAvailableSoon() { boolean result; if (result = super.frameAvailableSoon()) mRenderHandler.draw(null); return result; } @Override protected void prepare() throws IOException { if (DEBUG) Log.i(TAG, "prepare: "); mTrackIndex = -1; mMuxerStarted = mIsEOS = false; final MediaCodecInfo videoCodecInfo = selectVideoCodec(MIME_TYPE); if (videoCodecInfo == null) { Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return; } if (DEBUG) Log.i(TAG, "selected codec: " + videoCodecInfo.getName()); final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); // API >= 18 format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate()); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 10); if (DEBUG) Log.i(TAG, "format: " + format); mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); // get Surface for encoder input // this method only can call between #configure and #start mSurface = mMediaCodec.createInputSurface(); // API >= 18 mMediaCodec.start(); if (DEBUG) Log.i(TAG, "prepare finishing"); if (mListener != null) { try { mListener.onPrepared(this); } catch (final Exception e) { Log.e(TAG, "prepare:", e); } } } public void setEglContext(final EGLContext shared_context, final int tex_id) { mRenderHandler.setEglContext(shared_context, tex_id, mSurface, true); } @Override protected void release() { if (DEBUG) Log.i(TAG, "release:"); if (mSurface != null) { mSurface.release(); mSurface = null; } if (mRenderHandler != null) { mRenderHandler.release(); mRenderHandler = null; } super.release(); } private int calcBitRate() { final int bitrate = (int)(BPP * FRAME_RATE * mWidth * mHeight); Log.i(TAG, String.format("bitrate=%5.2f[Mbps]", bitrate / 1024f / 1024f)); return bitrate; } /** * select the first codec that match a specific MIME type * @param mimeType * @return null if no codec matched */ protected static final MediaCodecInfo selectVideoCodec(final String mimeType) { if (DEBUG) Log.v(TAG, "selectVideoCodec:"); // get the list of available codecs final int numCodecs = MediaCodecList.getCodecCount(); for (int i = 0; i < numCodecs; i++) { final MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i); if (!codecInfo.isEncoder()) { // skipp decoder continue; } // select first codec that match a specific MIME type and color format final String[] types = codecInfo.getSupportedTypes(); for (int j = 0; j < types.length; j++) { if (types[j].equalsIgnoreCase(mimeType)) { if (DEBUG) Log.i(TAG, "codec:" + codecInfo.getName() + ",MIME=" + types[j]); final int format = selectColorFormat(codecInfo, mimeType); if (format > 0) { return codecInfo; } } } } return null; } /** * select color format available on specific codec and we can use. * @return 0 if no colorFormat is matched */ protected static final int selectColorFormat(final MediaCodecInfo codecInfo, final String mimeType) { if (DEBUG) Log.i(TAG, "selectColorFormat: "); int result = 0; final MediaCodecInfo.CodecCapabilities caps; try { Thread.currentThread().setPriority(Thread.MAX_PRIORITY); caps = codecInfo.getCapabilitiesForType(mimeType); } finally { Thread.currentThread().setPriority(Thread.NORM_PRIORITY); } int colorFormat; for (int i = 0; i < caps.colorFormats.length; i++) { colorFormat = caps.colorFormats[i]; if (isRecognizedViewoFormat(colorFormat)) { if (result == 0) result = colorFormat; break; } } if (result == 0) Log.e(TAG, "couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType); return result; } /** * color formats that we can use in this class */ protected static int[] recognizedFormats; static { recognizedFormats = new int[] { // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar, // MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar, // MediaCodecInfo.CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface, }; } private static final boolean isRecognizedViewoFormat(final int colorFormat) { if (DEBUG) Log.i(TAG, "isRecognizedViewoFormat:colorFormat=" + colorFormat); final int n = recognizedFormats != null ? recognizedFormats.length : 0; for (int i = 0; i < n; i++) { if (recognizedFormats[i] == colorFormat) { return true; } } return false; } @Override protected void signalEndOfInputStream() { if (DEBUG) Log.d(TAG, "sending EOS to encoder"); mMediaCodec.signalEndOfInputStream(); // API >= 18 mIsEOS = true; } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.devicefarm.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * Represents the response from the server after AWS Device Farm makes a request to return information about the remote * access session. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/devicefarm-2015-06-23/ListRemoteAccessSessions" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListRemoteAccessSessionsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * A container representing the metadata from the service about each remote access session you are requesting. * </p> */ private java.util.List<RemoteAccessSession> remoteAccessSessions; /** * <p> * An identifier that was returned from the previous call to this operation, which can be used to return the next * set of items in the list. * </p> */ private String nextToken; /** * <p> * A container representing the metadata from the service about each remote access session you are requesting. * </p> * * @return A container representing the metadata from the service about each remote access session you are * requesting. */ public java.util.List<RemoteAccessSession> getRemoteAccessSessions() { return remoteAccessSessions; } /** * <p> * A container representing the metadata from the service about each remote access session you are requesting. * </p> * * @param remoteAccessSessions * A container representing the metadata from the service about each remote access session you are * requesting. */ public void setRemoteAccessSessions(java.util.Collection<RemoteAccessSession> remoteAccessSessions) { if (remoteAccessSessions == null) { this.remoteAccessSessions = null; return; } this.remoteAccessSessions = new java.util.ArrayList<RemoteAccessSession>(remoteAccessSessions); } /** * <p> * A container representing the metadata from the service about each remote access session you are requesting. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setRemoteAccessSessions(java.util.Collection)} or {@link #withRemoteAccessSessions(java.util.Collection)} * if you want to override the existing values. * </p> * * @param remoteAccessSessions * A container representing the metadata from the service about each remote access session you are * requesting. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRemoteAccessSessionsResult withRemoteAccessSessions(RemoteAccessSession... remoteAccessSessions) { if (this.remoteAccessSessions == null) { setRemoteAccessSessions(new java.util.ArrayList<RemoteAccessSession>(remoteAccessSessions.length)); } for (RemoteAccessSession ele : remoteAccessSessions) { this.remoteAccessSessions.add(ele); } return this; } /** * <p> * A container representing the metadata from the service about each remote access session you are requesting. * </p> * * @param remoteAccessSessions * A container representing the metadata from the service about each remote access session you are * requesting. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRemoteAccessSessionsResult withRemoteAccessSessions(java.util.Collection<RemoteAccessSession> remoteAccessSessions) { setRemoteAccessSessions(remoteAccessSessions); return this; } /** * <p> * An identifier that was returned from the previous call to this operation, which can be used to return the next * set of items in the list. * </p> * * @param nextToken * An identifier that was returned from the previous call to this operation, which can be used to return the * next set of items in the list. */ public void setNextToken(String nextToken) { this.nextToken = nextToken; } /** * <p> * An identifier that was returned from the previous call to this operation, which can be used to return the next * set of items in the list. * </p> * * @return An identifier that was returned from the previous call to this operation, which can be used to return the * next set of items in the list. */ public String getNextToken() { return this.nextToken; } /** * <p> * An identifier that was returned from the previous call to this operation, which can be used to return the next * set of items in the list. * </p> * * @param nextToken * An identifier that was returned from the previous call to this operation, which can be used to return the * next set of items in the list. * @return Returns a reference to this object so that method calls can be chained together. */ public ListRemoteAccessSessionsResult withNextToken(String nextToken) { setNextToken(nextToken); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getRemoteAccessSessions() != null) sb.append("RemoteAccessSessions: ").append(getRemoteAccessSessions()).append(","); if (getNextToken() != null) sb.append("NextToken: ").append(getNextToken()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListRemoteAccessSessionsResult == false) return false; ListRemoteAccessSessionsResult other = (ListRemoteAccessSessionsResult) obj; if (other.getRemoteAccessSessions() == null ^ this.getRemoteAccessSessions() == null) return false; if (other.getRemoteAccessSessions() != null && other.getRemoteAccessSessions().equals(this.getRemoteAccessSessions()) == false) return false; if (other.getNextToken() == null ^ this.getNextToken() == null) return false; if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getRemoteAccessSessions() == null) ? 0 : getRemoteAccessSessions().hashCode()); hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode()); return hashCode; } @Override public ListRemoteAccessSessionsResult clone() { try { return (ListRemoteAccessSessionsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
package com.suscipio_solutions.consecro_mud.Abilities.Properties; import java.util.List; import java.util.Vector; import com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability; import com.suscipio_solutions.consecro_mud.Areas.interfaces.Area; import com.suscipio_solutions.consecro_mud.Common.interfaces.CMMsg; import com.suscipio_solutions.consecro_mud.Common.interfaces.CharState; import com.suscipio_solutions.consecro_mud.Common.interfaces.CharStats; import com.suscipio_solutions.consecro_mud.Common.interfaces.PhyStats; import com.suscipio_solutions.consecro_mud.Exits.interfaces.Exit; import com.suscipio_solutions.consecro_mud.Items.interfaces.Item; import com.suscipio_solutions.consecro_mud.Libraries.interfaces.ExpertiseLibrary; import com.suscipio_solutions.consecro_mud.Locales.interfaces.Room; import com.suscipio_solutions.consecro_mud.MOBS.interfaces.MOB; import com.suscipio_solutions.consecro_mud.core.CMClass; import com.suscipio_solutions.consecro_mud.core.CMLib; import com.suscipio_solutions.consecro_mud.core.CMParms; import com.suscipio_solutions.consecro_mud.core.CMath; import com.suscipio_solutions.consecro_mud.core.Log; import com.suscipio_solutions.consecro_mud.core.interfaces.CMObject; import com.suscipio_solutions.consecro_mud.core.interfaces.Environmental; import com.suscipio_solutions.consecro_mud.core.interfaces.Physical; import com.suscipio_solutions.consecro_mud.core.interfaces.Tickable; @SuppressWarnings("rawtypes") public class Property implements Ability { @Override public String ID() { return "Property"; } @Override public String name(){ return "a Property";} @Override public String Name(){return name();} @Override public String description(){return "";} @Override public String displayText(){return "";} protected boolean savable=true; protected String miscText=""; protected Physical affected=null; /** * Designates whether, when used as a property/effect, what sort of objects this * ability can affect. Uses the Ability.CAN_* constants. * @see com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability * @return a mask showing the type of objects this ability can affect */ protected int canAffectCode(){return 0;} /** * Designates whether, when invoked as a skill, what sort of objects this * ability can effectively target. Uses the Ability.CAN_* constants. * @see com.suscipio_solutions.consecro_mud.Abilities.interfaces.Ability * @return a mask showing the type of objects this ability can target */ protected int canTargetCode(){return 0;} @Override public boolean canTarget(int can_code){return CMath.bset(canTargetCode(),can_code);} @Override public boolean canAffect(int can_code){return CMath.bset(canAffectCode(),can_code);} @Override public double castingTime(final MOB mob, final List<String> cmds){return 0.0;} @Override public double combatCastingTime(final MOB mob, final List<String> cmds){return 0.0;} @Override public double checkedCastingCost(final MOB mob, final List<String> cmds){return 0.0;} @Override public int abilityCode(){return 0;} @Override public void setAbilityCode(int newCode){} @Override public int adjustedLevel(MOB mob, int asLevel){return -1;} @Override public boolean bubbleAffect(){return false;} @Override public long flags(){return 0;} @Override public int getTickStatus(){return Tickable.STATUS_NOT;} @Override public int usageType(){return 0;} @Override public void initializeClass(){} @Override public ExpertiseLibrary.SkillCost getTrainingCost(MOB mob) { return new ExpertiseLibrary.SkillCost(ExpertiseLibrary.CostType.TRAIN, Double.valueOf(1.0));} @Override public String L(final String str, final String ... xs) { return CMLib.lang().fullSessionTranslation(str, xs); } @Override public void setName(String newName){} @Override public void setDescription(String newDescription){} @Override public void setDisplayText(String newDisplayText){} @Override public String image(){return "";} @Override public String rawImage(){return "";} @Override public void setImage(String newImage){} @Override public MOB invoker(){return null;} @Override public void setInvoker(MOB mob){} public static final String[] empty={}; @Override public String[] triggerStrings(){return empty;} @Override public boolean invoke(MOB mob, Vector commands, Physical target, boolean auto, int asLevel){return false;} @Override public boolean invoke(MOB mob, Physical target, boolean auto, int asLevel){return false;} @Override public boolean preInvoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel, int secondsElapsed, double actionsRemaining){return true;} @Override public boolean autoInvocation(MOB mob){return false;} @Override public void unInvoke(){} @Override public boolean canBeUninvoked(){return false;} @Override public boolean isAutoInvoked(){return true;} @Override public boolean isNowAnAutoEffect(){return true;} @Override public List<String> externalFiles(){return null;} @Override public boolean canBeTaughtBy(MOB teacher, MOB student){return false;} @Override public boolean canBePracticedBy(MOB teacher, MOB student){return false;} @Override public boolean canBeLearnedBy(MOB teacher, MOB student){return false;} @Override public void teach(MOB teacher, MOB student){} @Override public void practice(MOB teacher, MOB student){} @Override public int maxRange(){return Integer.MAX_VALUE;} @Override public int minRange(){return Integer.MIN_VALUE;} @Override public long expirationDate(){return 0;} @Override public void setExpirationDate(long time){} @Override public void startTickDown(MOB invokerMOB, Physical affected, int tickTime) { if(affected.fetchEffect(ID())==null) affected.addEffect(this); } @Override public int proficiency(){return 0;} @Override public void setProficiency(int newProficiency){} @Override public boolean proficiencyCheck(MOB mob, int adjustment, boolean auto){return false;} @Override public void helpProficiency(MOB mob, int adjustment){} @Override public Physical affecting(){return affected;} @Override public void setAffectedOne(Physical P){affected=P;} @Override public boolean putInCommandlist(){return false;} @Override public int abstractQuality(){return Ability.QUALITY_INDIFFERENT;} @Override public int enchantQuality(){return Ability.QUALITY_INDIFFERENT;} @Override public int castingQuality(MOB invoker, Physical target){return Ability.QUALITY_INDIFFERENT;} @Override public int classificationCode(){ return Ability.ACODE_PROPERTY;} @Override public boolean isSavable(){ return savable; } @Override public void setSavable(boolean truefalse) { savable=truefalse; } protected boolean amDestroyed=false; @Override public void destroy(){amDestroyed=true; affected=null; miscText=null; } @Override public boolean amDestroyed(){return amDestroyed;} //protected void finalize(){ CMClass.unbumpCounter(this,CMClass.CMObjectType.ABILITY); }//removed for mem & perf @Override public CMObject newInstance() { try { return this.getClass().newInstance(); } catch(final Exception e) { Log.errOut(ID(),e); } return new Property(); } public Property() { super(); //CMClass.bumpCounter(this,CMClass.CMObjectType.ABILITY);//removed for mem & perf } @Override public int getSaveStatIndex(){return getStatCodes().length;} private static final String[] CODES={"CLASS","TEXT"}; @Override public String[] getStatCodes(){return CODES;} @Override public boolean isStat(String code){ return CMParms.indexOf(getStatCodes(),code.toUpperCase().trim())>=0;} protected int getCodeNum(String code) { for(int i=0;i<CODES.length;i++) if(code.equalsIgnoreCase(CODES[i])) return i; return -1; } @Override public String getStat(String code) { switch(getCodeNum(code)) { case 0: return ID(); case 1: return text(); } return ""; } @Override public void setStat(String code, String val) { switch(getCodeNum(code)) { case 0: return; case 1: setMiscText(val); break; } } @Override public boolean sameAs(Environmental E) { if(!(E instanceof Property)) return false; for(int i=0;i<CODES.length;i++) if(!E.getStat(CODES[i]).equals(getStat(CODES[i]))) return false; return true; } private void cloneFix(Ability E){} @Override public CMObject copyOf() { try { final Property E=(Property)this.clone(); //CMClass.bumpCounter(E,CMClass.CMObjectType.ABILITY);//removed for mem & perf E.cloneFix(this); return E; } catch(final CloneNotSupportedException e) { return this.newInstance(); } } @Override public int compareTo(CMObject o){ return CMClass.classID(this).compareToIgnoreCase(CMClass.classID(o));} @Override public void setMiscText(String newMiscText) { miscText=newMiscText;} @Override public String text(){ return miscText;} @Override public String miscTextFormat(){return CMParms.FORMAT_UNDEFINED;} @Override public boolean appropriateToMyFactions(MOB mob){return true;} @Override public String accountForYourself(){return "";} @Override public String requirements(MOB mob){return "";} @Override public boolean canAffect(Physical P) { if((P==null)&&(canAffectCode()==0)) return true; if(P==null) return false; if((P instanceof MOB)&&((canAffectCode()&Ability.CAN_MOBS)>0)) return true; if((P instanceof Item)&&((canAffectCode()&Ability.CAN_ITEMS)>0)) return true; if((P instanceof Exit)&&((canAffectCode()&Ability.CAN_EXITS)>0)) return true; if((P instanceof Room)&&((canAffectCode()&Ability.CAN_ROOMS)>0)) return true; if((P instanceof Area)&&((canAffectCode()&Ability.CAN_AREAS)>0)) return true; return false; } @Override public boolean canTarget(Physical P) { return false;} @Override public void affectPhyStats(Physical affected, PhyStats affectableStats) {} @Override public void affectCharStats(MOB affectedMob, CharStats affectableStats) {} @Override public void affectCharState(MOB affectedMob, CharState affectableMaxState) {} @Override public void executeMsg(final Environmental myHost, final CMMsg msg) { return; } @Override public boolean okMessage(final Environmental myHost, final CMMsg msg) { return true; } @Override public boolean tick(Tickable ticking, int tickID) { return true; } @Override public void makeLongLasting(){} @Override public void makeNonUninvokable(){} private static final int[] cost=new int[3]; @Override public int[] usageCost(MOB mob,boolean ignoreCostOverride){return cost;} @Override public boolean isGeneric(){return false;} }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.trino.sql.rewrite; import com.google.common.collect.ImmutableList; import io.trino.Session; import io.trino.cost.CachingStatsProvider; import io.trino.cost.PlanNodeStatsEstimate; import io.trino.cost.StatsCalculator; import io.trino.cost.SymbolStatsEstimate; import io.trino.execution.warnings.WarningCollector; import io.trino.metadata.Metadata; import io.trino.security.AccessControl; import io.trino.spi.security.GroupProvider; import io.trino.spi.type.BigintType; import io.trino.spi.type.DecimalType; import io.trino.spi.type.IntegerType; import io.trino.spi.type.RealType; import io.trino.spi.type.SmallintType; import io.trino.spi.type.TinyintType; import io.trino.spi.type.Type; import io.trino.sql.QueryUtil; import io.trino.sql.analyzer.QueryExplainer; import io.trino.sql.parser.SqlParser; import io.trino.sql.planner.Plan; import io.trino.sql.planner.Symbol; import io.trino.sql.planner.plan.OutputNode; import io.trino.sql.tree.AllColumns; import io.trino.sql.tree.AstVisitor; import io.trino.sql.tree.Cast; import io.trino.sql.tree.DoubleLiteral; import io.trino.sql.tree.Expression; import io.trino.sql.tree.Node; import io.trino.sql.tree.NodeRef; import io.trino.sql.tree.NullLiteral; import io.trino.sql.tree.Parameter; import io.trino.sql.tree.Query; import io.trino.sql.tree.QuerySpecification; import io.trino.sql.tree.Row; import io.trino.sql.tree.SelectItem; import io.trino.sql.tree.ShowStats; import io.trino.sql.tree.Statement; import io.trino.sql.tree.StringLiteral; import io.trino.sql.tree.Table; import io.trino.sql.tree.TableSubquery; import io.trino.sql.tree.Values; import java.time.LocalDate; import java.util.List; import java.util.Map; import java.util.Optional; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Verify.verify; import static com.google.common.collect.ImmutableList.toImmutableList; import static io.trino.spi.type.DateType.DATE; import static io.trino.spi.type.DoubleType.DOUBLE; import static io.trino.spi.type.VarcharType.VARCHAR; import static io.trino.sql.QueryUtil.aliased; import static io.trino.sql.QueryUtil.query; import static io.trino.sql.QueryUtil.selectAll; import static io.trino.sql.QueryUtil.selectList; import static io.trino.sql.QueryUtil.simpleQuery; import static io.trino.sql.analyzer.TypeSignatureTranslator.toSqlType; import static java.lang.Double.isFinite; import static java.lang.Math.round; import static java.util.Objects.requireNonNull; public class ShowStatsRewrite implements StatementRewrite.Rewrite { private static final Expression NULL_DOUBLE = new Cast(new NullLiteral(), toSqlType(DOUBLE)); private static final Expression NULL_VARCHAR = new Cast(new NullLiteral(), toSqlType(VARCHAR)); @Override public Statement rewrite( Session session, Metadata metadata, SqlParser parser, Optional<QueryExplainer> queryExplainer, Statement node, List<Expression> parameters, Map<NodeRef<Parameter>, Expression> parameterLookup, GroupProvider groupProvider, AccessControl accessControl, WarningCollector warningCollector, StatsCalculator statsCalculator) { return (Statement) new Visitor(session, parameters, queryExplainer, warningCollector, statsCalculator).process(node, null); } private static class Visitor extends AstVisitor<Node, Void> { private final Session session; private final List<Expression> parameters; private final Optional<QueryExplainer> queryExplainer; private final WarningCollector warningCollector; private final StatsCalculator statsCalculator; private Visitor(Session session, List<Expression> parameters, Optional<QueryExplainer> queryExplainer, WarningCollector warningCollector, StatsCalculator statsCalculator) { this.session = requireNonNull(session, "session is null"); this.parameters = requireNonNull(parameters, "parameters is null"); this.queryExplainer = requireNonNull(queryExplainer, "queryExplainer is null"); this.warningCollector = requireNonNull(warningCollector, "warningCollector is null"); this.statsCalculator = requireNonNull(statsCalculator, "statsCalculator is null"); } @Override protected Node visitShowStats(ShowStats node, Void context) { checkState(queryExplainer.isPresent(), "Query explainer must be provided for SHOW STATS SELECT"); Query query = getRelation(node); QuerySpecification specification = (QuerySpecification) query.getQueryBody(); Plan plan = queryExplainer.get().getLogicalPlan(session, query(specification), parameters, warningCollector); CachingStatsProvider cachingStatsProvider = new CachingStatsProvider(statsCalculator, session, plan.getTypes()); PlanNodeStatsEstimate stats = cachingStatsProvider.getStats(plan.getRoot()); return rewriteShowStats(plan, stats); } private Query getRelation(ShowStats node) { if (node.getRelation() instanceof Table) { return simpleQuery(selectList(new AllColumns()), node.getRelation()); } if (node.getRelation() instanceof TableSubquery) { return ((TableSubquery) node.getRelation()).getQuery(); } throw new IllegalArgumentException("Expected either TableSubquery or Table as relation"); } private Node rewriteShowStats(Plan plan, PlanNodeStatsEstimate planNodeStatsEstimate) { List<String> statsColumnNames = buildColumnsNames(); List<SelectItem> selectItems = buildSelectItems(statsColumnNames); ImmutableList.Builder<Expression> rowsBuilder = ImmutableList.builder(); verify(plan.getRoot() instanceof OutputNode, "Expected plan root be OutputNode, but was: %s", plan.getRoot().getClass().getName()); OutputNode root = (OutputNode) plan.getRoot(); for (int columnIndex = 0; columnIndex < root.getOutputSymbols().size(); columnIndex++) { Symbol outputSymbol = root.getOutputSymbols().get(columnIndex); String columnName = root.getColumnNames().get(columnIndex); Type columnType = plan.getTypes().get(outputSymbol); SymbolStatsEstimate symbolStatistics = planNodeStatsEstimate.getSymbolStatistics(outputSymbol); ImmutableList.Builder<Expression> rowValues = ImmutableList.builder(); rowValues.add(new StringLiteral(columnName)); rowValues.add(toDoubleLiteral(symbolStatistics.getAverageRowSize() * planNodeStatsEstimate.getOutputRowCount() * (1 - symbolStatistics.getNullsFraction()))); rowValues.add(toDoubleLiteral(symbolStatistics.getDistinctValuesCount())); rowValues.add(toDoubleLiteral(symbolStatistics.getNullsFraction())); rowValues.add(NULL_DOUBLE); rowValues.add(toStringLiteral(columnType, symbolStatistics.getLowValue())); rowValues.add(toStringLiteral(columnType, symbolStatistics.getHighValue())); rowsBuilder.add(new Row(rowValues.build())); } // Stats for whole table ImmutableList.Builder<Expression> rowValues = ImmutableList.builder(); rowValues.add(NULL_VARCHAR); rowValues.add(NULL_DOUBLE); rowValues.add(NULL_DOUBLE); rowValues.add(NULL_DOUBLE); rowValues.add(toDoubleLiteral(planNodeStatsEstimate.getOutputRowCount())); rowValues.add(NULL_VARCHAR); rowValues.add(NULL_VARCHAR); rowsBuilder.add(new Row(rowValues.build())); List<Expression> resultRows = rowsBuilder.build(); return simpleQuery(selectAll(selectItems), aliased(new Values(resultRows), "table_stats", statsColumnNames)); } @Override protected Node visitNode(Node node, Void context) { return node; } private static List<String> buildColumnsNames() { return ImmutableList.<String>builder() .add("column_name") .add("data_size") .add("distinct_values_count") .add("nulls_fraction") .add("row_count") .add("low_value") .add("high_value") .build(); } private static List<SelectItem> buildSelectItems(List<String> columnNames) { return columnNames.stream() .map(QueryUtil::unaliasedName) .collect(toImmutableList()); } private static Expression toStringLiteral(Type type, double value) { if (!isFinite(value)) { return NULL_VARCHAR; } if (type.equals(BigintType.BIGINT) || type.equals(IntegerType.INTEGER) || type.equals(SmallintType.SMALLINT) || type.equals(TinyintType.TINYINT)) { return new StringLiteral(Long.toString(round(value))); } if (type.equals(DOUBLE) || type instanceof DecimalType) { return new StringLiteral(Double.toString(value)); } if (type.equals(RealType.REAL)) { return new StringLiteral(Float.toString((float) value)); } if (type.equals(DATE)) { return new StringLiteral(LocalDate.ofEpochDay(round(value)).toString()); } throw new IllegalArgumentException("Unexpected type: " + type); } } private static Expression toDoubleLiteral(double value) { if (!isFinite(value)) { return NULL_DOUBLE; } return new DoubleLiteral(Double.toString(value)); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.daemon.logviewer.utils; import com.codahale.metrics.Meter; import java.io.IOException; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.FileTime; import java.util.ArrayList; import java.util.Comparator; import java.util.HashSet; import java.util.List; import java.util.PriorityQueue; import java.util.Set; import java.util.Stack; import java.util.regex.Pattern; import org.apache.commons.lang3.tuple.Pair; import org.apache.storm.metric.StormMetricsRegistry; import org.apache.storm.utils.Utils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Provide methods to help Logviewer to clean up * files in directories and to get a list of files without * worrying about excessive memory usage. */ public class DirectoryCleaner { private static final Logger LOG = LoggerFactory.getLogger(DirectoryCleaner.class); // used to recognize the pattern of active log files, we may remove the "current" from this list private static final Pattern ACTIVE_LOG_PATTERN = Pattern.compile(".*\\.(log|err|out|current|yaml|pid|metrics)$"); // used to recognize the pattern of some meta files in a worker log directory private static final Pattern META_LOG_PATTERN = Pattern.compile(".*\\.(yaml|pid)$"); private static final int PQ_SIZE = 1024; // max number of files to delete for every round private static final int MAX_ROUNDS = 512; // max rounds of scanning the dirs public static final int MAX_NUMBER_OF_FILES_FOR_DIR = 1024; private final Meter numFileOpenExceptions; public DirectoryCleaner(StormMetricsRegistry metricsRegistry) { this.numFileOpenExceptions = metricsRegistry.registerMeter(ExceptionMeterNames.NUM_FILE_OPEN_EXCEPTIONS); } /** * Creates DirectoryStream for give directory. * * @param dir File instance representing specific directory * @return DirectoryStream */ public DirectoryStream<Path> getStreamForDirectory(Path dir) throws IOException { try { return Files.newDirectoryStream(dir); } catch (IOException e) { numFileOpenExceptions.mark(); throw e; } } /** * If totalSize of files exceeds the either the per-worker quota or global quota, * Logviewer deletes oldest inactive log files in a worker directory or in all worker dirs. * We use the parameter forPerDir to switch between the two deletion modes. * * @param dirs the list of directories to be scanned for deletion * @param quota the per-dir quota or the total quota for the all directories * @param forPerDir if true, deletion happens for a single dir; otherwise, for all directories globally * @param activeDirs only for global deletion, we want to skip the active logs in activeDirs * @return number of files deleted */ public DeletionMeta deleteOldestWhileTooLarge(List<Path> dirs, long quota, boolean forPerDir, Set<Path> activeDirs) throws IOException { long totalSize = 0; for (Path dir : dirs) { try (DirectoryStream<Path> stream = getStreamForDirectory(dir)) { for (Path path : stream) { totalSize += Files.size(path); } } } LOG.debug("totalSize: {} quota: {}", totalSize, quota); long toDeleteSize = totalSize - quota; if (toDeleteSize <= 0) { return DeletionMeta.EMPTY; } int deletedFiles = 0; long deletedSize = 0; // the oldest pq_size files in this directory will be placed in PQ, with the newest at the root PriorityQueue<Pair<Path, FileTime>> pq = new PriorityQueue<>(PQ_SIZE, Comparator.comparing((Pair<Path, FileTime> p) -> p.getRight()).reversed()); int round = 0; final Set<Path> excluded = new HashSet<>(); while (toDeleteSize > 0) { LOG.debug("To delete size is {}, start a new round of deletion, round: {}", toDeleteSize, round); for (Path dir : dirs) { try (DirectoryStream<Path> stream = getStreamForDirectory(dir)) { for (Path path : stream) { if (!excluded.contains(path)) { if (isFileEligibleToSkipDelete(forPerDir, activeDirs, dir, path)) { excluded.add(path); } else { Pair<Path, FileTime> p = Pair.of(path, Files.getLastModifiedTime(path)); if (pq.size() < PQ_SIZE) { pq.offer(p); } else if (p.getRight().toMillis() < pq.peek().getRight().toMillis()) { pq.poll(); pq.offer(p); } } } } } } if (!pq.isEmpty()) { // need to reverse the order of elements in PQ to delete files from oldest to newest Stack<Pair<Path, FileTime>> stack = new Stack<>(); while (!pq.isEmpty()) { stack.push(pq.poll()); } while (!stack.isEmpty() && toDeleteSize > 0) { Pair<Path, FileTime> pair = stack.pop(); Path file = pair.getLeft(); final String canonicalPath = file.toAbsolutePath().normalize().toString(); final long fileSize = Files.size(file); final long lastModified = pair.getRight().toMillis(); //Original implementation doesn't actually check if delete succeeded or not. try { Utils.forceDelete(file.toString()); LOG.info("Delete file: {}, size: {}, lastModified: {}", canonicalPath, fileSize, lastModified); toDeleteSize -= fileSize; deletedSize += fileSize; deletedFiles++; } catch (IOException e) { excluded.add(file); } } pq.clear(); round++; if (round >= MAX_ROUNDS) { if (forPerDir) { LOG.warn("Reach the MAX_ROUNDS: {} during per-dir deletion, you may have too many files in " + "a single directory : {}, will delete the rest files in next interval.", MAX_ROUNDS, dirs.get(0).toAbsolutePath().normalize()); } else { LOG.warn("Reach the MAX_ROUNDS: {} during global deletion, you may have too many files, " + "will delete the rest files in next interval.", MAX_ROUNDS); } break; } } else { LOG.warn("No more files able to delete this round, but {} is over quota by {} MB", forPerDir ? "this directory" : "root directory", toDeleteSize * 1e-6); LOG.warn("No more files eligible to be deleted this round, but {} is over {} quota by {} MB", forPerDir ? "worker directory: " + dirs.get(0).toAbsolutePath().normalize() : "log root directory", forPerDir ? "per-worker" : "global", toDeleteSize * 1e-6); } } return new DeletionMeta(deletedSize, deletedFiles); } private boolean isFileEligibleToSkipDelete(boolean forPerDir, Set<Path> activeDirs, Path dir, Path file) throws IOException { if (forPerDir) { return ACTIVE_LOG_PATTERN.matcher(file.getFileName().toString()).matches(); } else { // for global cleanup // for an active worker's dir, make sure for the last "/" return activeDirs.contains(dir) ? ACTIVE_LOG_PATTERN.matcher(file.getFileName().toString()).matches() : META_LOG_PATTERN.matcher(file.getFileName().toString()).matches(); } } /** * Lists files in directory. * Note that to avoid memory problem, we only return the first 1024 files in a directory. * * @param dir directory to get file list * @return files in directory */ public List<Path> getFilesForDir(Path dir) throws IOException { List<Path> files = new ArrayList<>(); try (DirectoryStream<Path> stream = Files.newDirectoryStream(dir)) { for (Path path : stream) { files.add(path); if (files.size() >= MAX_NUMBER_OF_FILES_FOR_DIR) { break; } } } catch (IOException e) { numFileOpenExceptions.mark(); throw e; } return files; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.spy; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.RandomAccessFile; import java.nio.channels.FileChannel; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.SortedMap; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory; import org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.EditLogValidation; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; import org.apache.log4j.Level; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import com.google.common.collect.Maps; import com.google.common.io.Files; @RunWith(Parameterized.class) public class TestFSEditLogLoader { @Parameters public static Collection<Object[]> data() { Collection<Object[]> params = new ArrayList<Object[]>(); params.add(new Object[]{ Boolean.FALSE }); params.add(new Object[]{ Boolean.TRUE }); return params; } private static boolean useAsyncEditLog; public TestFSEditLogLoader(Boolean async) { useAsyncEditLog = async; } private static Configuration getConf() { Configuration conf = new HdfsConfiguration(); conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_EDITS_ASYNC_LOGGING, useAsyncEditLog); return conf; } static { GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL); GenericTestUtils.setLogLevel(FSEditLogLoader.LOG, Level.ALL); } private static final File TEST_DIR = PathUtils.getTestDir(TestFSEditLogLoader.class); private static final int NUM_DATA_NODES = 0; @Test public void testDisplayRecentEditLogOpCodes() throws IOException { // start a cluster Configuration conf = getConf(); MiniDFSCluster cluster = null; FileSystem fileSys = null; cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES) .enableManagedDfsDirsRedundancy(false).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); final FSNamesystem namesystem = cluster.getNamesystem(); FSImage fsimage = namesystem.getFSImage(); for (int i = 0; i < 20; i++) { fileSys.mkdirs(new Path("/tmp/tmp" + i)); } StorageDirectory sd = fsimage.getStorage().dirIterator(NameNodeDirType.EDITS).next(); cluster.shutdown(); File editFile = FSImageTestUtil.findLatestEditsLog(sd).getFile(); assertTrue("Should exist: " + editFile, editFile.exists()); // Corrupt the edits file. long fileLen = editFile.length(); RandomAccessFile rwf = new RandomAccessFile(editFile, "rw"); rwf.seek(fileLen - 40); for (int i = 0; i < 20; i++) { rwf.write(FSEditLogOpCodes.OP_DELETE.getOpCode()); } rwf.close(); StringBuilder bld = new StringBuilder(); bld.append("^Error replaying edit log at offset \\d+. "); bld.append("Expected transaction ID was \\d+\n"); bld.append("Recent opcode offsets: (\\d+\\s*){4}$"); try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES) .enableManagedDfsDirsRedundancy(false).format(false).build(); fail("should not be able to start"); } catch (IOException e) { assertTrue("error message contains opcodes message", e.getMessage().matches(bld.toString())); } } /** * Test that, if the NN restarts with a new minimum replication, * any files created with the old replication count will get * automatically bumped up to the new minimum upon restart. */ @Test public void testReplicationAdjusted() throws Exception { // start a cluster Configuration conf = getConf(); // Replicate and heartbeat fast to shave a few seconds off test conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1); conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1); MiniDFSCluster cluster = null; try { cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) .build(); cluster.waitActive(); FileSystem fs = cluster.getFileSystem(); // Create a file with replication count 1 Path p = new Path("/testfile"); DFSTestUtil.createFile(fs, p, 10, /*repl*/ (short)1, 1); DFSTestUtil.waitReplication(fs, p, (short)1); // Shut down and restart cluster with new minimum replication of 2 cluster.shutdown(); cluster = null; conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY, 2); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) .format(false).build(); cluster.waitActive(); fs = cluster.getFileSystem(); // The file should get adjusted to replication 2 when // the edit log is replayed. DFSTestUtil.waitReplication(fs, p, (short)2); } finally { if (cluster != null) { cluster.shutdown(); } } } /** * Corrupt the byte at the given offset in the given file, * by subtracting 1 from it. */ private void corruptByteInFile(File file, long offset) throws IOException { RandomAccessFile raf = new RandomAccessFile(file, "rw"); try { raf.seek(offset); int origByte = raf.read(); raf.seek(offset); raf.writeByte(origByte - 1); } finally { IOUtils.closeStream(raf); } } /** * Truncate the given file to the given length */ private void truncateFile(File logFile, long newLength) throws IOException { RandomAccessFile raf = new RandomAccessFile(logFile, "rw"); raf.setLength(newLength); raf.close(); } /** * Return the length of bytes in the given file after subtracting * the trailer of 0xFF (OP_INVALID)s. * This seeks to the end of the file and reads chunks backwards until * it finds a non-0xFF byte. * @throws IOException if the file cannot be read */ private static long getNonTrailerLength(File f) throws IOException { final int chunkSizeToRead = 256*1024; FileInputStream fis = new FileInputStream(f); try { byte buf[] = new byte[chunkSizeToRead]; FileChannel fc = fis.getChannel(); long size = fc.size(); long pos = size - (size % chunkSizeToRead); while (pos >= 0) { fc.position(pos); int readLen = (int) Math.min(size - pos, chunkSizeToRead); IOUtils.readFully(fis, buf, 0, readLen); for (int i = readLen - 1; i >= 0; i--) { if (buf[i] != FSEditLogOpCodes.OP_INVALID.getOpCode()) { return pos + i + 1; // + 1 since we count this byte! } } pos -= chunkSizeToRead; } return 0; } finally { fis.close(); } } @Test public void testStreamLimiter() throws IOException { final File LIMITER_TEST_FILE = new File(TEST_DIR, "limiter.test"); FileOutputStream fos = new FileOutputStream(LIMITER_TEST_FILE); try { fos.write(0x12); fos.write(0x12); fos.write(0x12); } finally { fos.close(); } FileInputStream fin = new FileInputStream(LIMITER_TEST_FILE); BufferedInputStream bin = new BufferedInputStream(fin); FSEditLogLoader.PositionTrackingInputStream tracker = new FSEditLogLoader.PositionTrackingInputStream(bin); try { tracker.setLimit(2); tracker.mark(100); tracker.read(); tracker.read(); try { tracker.read(); fail("expected to get IOException after reading past the limit"); } catch (IOException e) { } tracker.reset(); tracker.mark(100); byte arr[] = new byte[3]; try { tracker.read(arr); fail("expected to get IOException after reading past the limit"); } catch (IOException e) { } tracker.reset(); arr = new byte[2]; tracker.read(arr); } finally { tracker.close(); } } /** * Create an unfinalized edit log for testing purposes * * @param testDir Directory to create the edit log in * @param numTx Number of transactions to add to the new edit log * @param offsetToTxId A map from transaction IDs to offsets in the * edit log file. * @return The new edit log file name. * @throws IOException */ static private File prepareUnfinalizedTestEditLog(File testDir, int numTx, SortedMap<Long, Long> offsetToTxId) throws IOException { File inProgressFile = new File(testDir, NNStorage.getInProgressEditsFileName(1)); FSEditLog fsel = null, spyLog = null; try { fsel = FSImageTestUtil.createStandaloneEditLog(testDir); spyLog = spy(fsel); // Normally, the in-progress edit log would be finalized by // FSEditLog#endCurrentLogSegment. For testing purposes, we // disable that here. doNothing().when(spyLog).endCurrentLogSegment(true); spyLog.openForWrite(NameNodeLayoutVersion.CURRENT_LAYOUT_VERSION); assertTrue("should exist: " + inProgressFile, inProgressFile.exists()); for (int i = 0; i < numTx; i++) { long trueOffset = getNonTrailerLength(inProgressFile); long thisTxId = spyLog.getLastWrittenTxId() + 1; offsetToTxId.put(trueOffset, thisTxId); System.err.println("txid " + thisTxId + " at offset " + trueOffset); spyLog.logDelete("path" + i, i, false); spyLog.logSync(); } } finally { if (spyLog != null) { spyLog.close(); } else if (fsel != null) { fsel.close(); } } return inProgressFile; } @Test public void testValidateEditLogWithCorruptHeader() throws IOException { File testDir = new File(TEST_DIR, "testValidateEditLogWithCorruptHeader"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); File logFile = prepareUnfinalizedTestEditLog(testDir, 2, offsetToTxId); RandomAccessFile rwf = new RandomAccessFile(logFile, "rw"); try { rwf.seek(0); rwf.writeLong(42); // corrupt header } finally { rwf.close(); } EditLogValidation validation = EditLogFileInputStream.scanEditLog(logFile, Long.MAX_VALUE, true); assertTrue(validation.hasCorruptHeader()); } @Test public void testValidateEditLogWithCorruptBody() throws IOException { File testDir = new File(TEST_DIR, "testValidateEditLogWithCorruptBody"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); final int NUM_TXNS = 20; File logFile = prepareUnfinalizedTestEditLog(testDir, NUM_TXNS, offsetToTxId); // Back up the uncorrupted log File logFileBak = new File(testDir, logFile.getName() + ".bak"); Files.copy(logFile, logFileBak); EditLogValidation validation = EditLogFileInputStream.scanEditLog(logFile, Long.MAX_VALUE, true); assertTrue(!validation.hasCorruptHeader()); // We expect that there will be an OP_START_LOG_SEGMENT, followed by // NUM_TXNS opcodes, followed by an OP_END_LOG_SEGMENT. assertEquals(NUM_TXNS + 1, validation.getEndTxId()); // Corrupt each edit and verify that validation continues to work for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) { long txOffset = entry.getKey(); long txId = entry.getValue(); // Restore backup, corrupt the txn opcode Files.copy(logFileBak, logFile); corruptByteInFile(logFile, txOffset); validation = EditLogFileInputStream.scanEditLog(logFile, Long.MAX_VALUE, true); long expectedEndTxId = (txId == (NUM_TXNS + 1)) ? NUM_TXNS : (NUM_TXNS + 1); assertEquals("Failed when corrupting txn opcode at " + txOffset, expectedEndTxId, validation.getEndTxId()); assertTrue(!validation.hasCorruptHeader()); } // Truncate right before each edit and verify that validation continues // to work for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) { long txOffset = entry.getKey(); long txId = entry.getValue(); // Restore backup, corrupt the txn opcode Files.copy(logFileBak, logFile); truncateFile(logFile, txOffset); validation = EditLogFileInputStream.scanEditLog(logFile, Long.MAX_VALUE, true); long expectedEndTxId = (txId == 0) ? HdfsServerConstants.INVALID_TXID : (txId - 1); assertEquals("Failed when corrupting txid " + txId + " txn opcode " + "at " + txOffset, expectedEndTxId, validation.getEndTxId()); assertTrue(!validation.hasCorruptHeader()); } } @Test public void testValidateEmptyEditLog() throws IOException { File testDir = new File(TEST_DIR, "testValidateEmptyEditLog"); SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap(); File logFile = prepareUnfinalizedTestEditLog(testDir, 0, offsetToTxId); // Truncate the file so that there is nothing except the header and // layout flags section. truncateFile(logFile, 8); EditLogValidation validation = EditLogFileInputStream.scanEditLog(logFile, Long.MAX_VALUE, true); assertTrue(!validation.hasCorruptHeader()); assertEquals(HdfsServerConstants.INVALID_TXID, validation.getEndTxId()); } private static final Map<Byte, FSEditLogOpCodes> byteToEnum = new HashMap<Byte, FSEditLogOpCodes>(); static { for(FSEditLogOpCodes opCode : FSEditLogOpCodes.values()) { byteToEnum.put(opCode.getOpCode(), opCode); } } private static FSEditLogOpCodes fromByte(byte opCode) { return byteToEnum.get(opCode); } @Test public void testFSEditLogOpCodes() throws IOException { //try all codes for(FSEditLogOpCodes c : FSEditLogOpCodes.values()) { final byte code = c.getOpCode(); assertEquals("c=" + c + ", code=" + code, c, FSEditLogOpCodes.fromByte(code)); } //try all byte values for(int b = 0; b < (1 << Byte.SIZE); b++) { final byte code = (byte)b; assertEquals("b=" + b + ", code=" + code, fromByte(code), FSEditLogOpCodes.fromByte(code)); } } }
/* * Copyright 1999,2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.naming.resources; import java.net.URL; import java.net.URLConnection; import java.io.IOException; import java.io.InputStream; import java.io.FileNotFoundException; import java.security.Permission; import java.util.Date; import java.util.Enumeration; import java.util.Vector; import javax.naming.NamingException; import javax.naming.NamingEnumeration; import javax.naming.NameClassPair; import javax.naming.directory.DirContext; import javax.naming.directory.Attribute; import javax.naming.directory.Attributes; import org.apache.naming.JndiPermission; import org.apache.naming.resources.Resource; import org.apache.naming.resources.ResourceAttributes; /** * Connection to a JNDI directory context. * <p/> * Note: All the object attribute names are the WebDAV names, not the HTTP * names, so this class overrides some methods from URLConnection to do the * queries using the right names. Content handler is also not used; the * content is directly returned. * * @author <a href="mailto:remm@apache.org">Remy Maucherat</a> * @version $Revision: 303022 $ */ public class DirContextURLConnection extends URLConnection { // ----------------------------------------------------------- Constructors public DirContextURLConnection(DirContext context, URL url) { super(url); if (context == null) throw new IllegalArgumentException ("Directory context can't be null"); if (System.getSecurityManager() != null) { this.permission = new JndiPermission(url.toString()); } this.context = context; } // ----------------------------------------------------- Instance Variables /** * Directory context. */ protected DirContext context; /** * Associated resource. */ protected Resource resource; /** * Associated DirContext. */ protected DirContext collection; /** * Other unknown object. */ protected Object object; /** * Attributes. */ protected Attributes attributes; /** * Date. */ protected long date; /** * Permission */ protected Permission permission; // ------------------------------------------------------------- Properties /** * Connect to the DirContext, and retrive the bound object, as well as * its attributes. If no object is bound with the name specified in the * URL, then an IOException is thrown. * * @throws IOException Object not found */ public void connect() throws IOException { if (!connected) { try { date = System.currentTimeMillis(); String path = getURL().getFile(); if (context instanceof ProxyDirContext) { ProxyDirContext proxyDirContext = (ProxyDirContext) context; String hostName = proxyDirContext.getHostName(); String contextName = proxyDirContext.getContextName(); if (hostName != null) { if (!path.startsWith("/" + hostName + "/")) return; path = path.substring(hostName.length()+ 1); } if (contextName != null) { if (!path.startsWith(contextName + "/")) { return; } else { path = path.substring(contextName.length()); } } } object = context.lookup(path); attributes = context.getAttributes(path); if (object instanceof Resource) resource = (Resource) object; if (object instanceof DirContext) collection = (DirContext) object; } catch (NamingException e) { // Object not found } connected = true; } } /** * Return the content length value. */ public int getContentLength() { return getHeaderFieldInt(ResourceAttributes.CONTENT_LENGTH, -1); } /** * Return the content type value. */ public String getContentType() { return getHeaderField(ResourceAttributes.CONTENT_TYPE); } /** * Return the last modified date. */ public long getDate() { return date; } /** * Return the last modified date. */ public long getLastModified() { if (!connected) { // Try to connect (silently) try { connect(); } catch (IOException e) { } } if (attributes == null) return 0; Attribute lastModified = attributes.get(ResourceAttributes.LAST_MODIFIED); if (lastModified != null) { try { Date lmDate = (Date) lastModified.get(); return lmDate.getTime(); } catch (Exception e) { } } return 0; } /** * Returns the name of the specified header field. */ public String getHeaderField(String name) { if (!connected) { // Try to connect (silently) try { connect(); } catch (IOException e) { } } if (attributes == null) return (null); Attribute attribute = attributes.get(name); try { return attribute.get().toString(); } catch (Exception e) { // Shouldn't happen, unless the attribute has no value } return (null); } /** * Get object content. */ public Object getContent() throws IOException { if (!connected) connect(); if (resource != null) return getInputStream(); if (collection != null) return collection; if (object != null) return object; throw new FileNotFoundException(); } /** * Get object content. */ public Object getContent(Class[] classes) throws IOException { Object object = getContent(); for (int i = 0; i < classes.length; i++) { if (classes[i].isInstance(object)) return object; } return null; } /** * Get input stream. */ public InputStream getInputStream() throws IOException { if (!connected) connect(); if (resource == null) { throw new FileNotFoundException(); } else { // Reopen resource try { resource = (Resource) context.lookup(getURL().getFile()); } catch (NamingException e) { } } return (resource.streamContent()); } /** * Get the Permission for this URL */ public Permission getPermission() { return permission; } // --------------------------------------------------------- Public Methods /** * List children of this collection. The names given are relative to this * URI's path. The full uri of the children is then : path + "/" + name. */ public Enumeration list() throws IOException { if (!connected) { connect(); } if ((resource == null) && (collection == null)) { throw new FileNotFoundException(); } Vector result = new Vector(); if (collection != null) { try { NamingEnumeration enumeration = context.list(getURL().getFile()); while (enumeration.hasMoreElements()) { NameClassPair ncp = (NameClassPair) enumeration.nextElement(); result.addElement(ncp.getName()); } } catch (NamingException e) { // Unexpected exception throw new FileNotFoundException(); } } return result.elements(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.config; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.nio.ByteBuffer; import java.util.*; import com.google.common.collect.MapDifference; import com.google.common.collect.Maps; import org.apache.commons.lang.ArrayUtils; import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.builder.EqualsBuilder; import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.commons.lang.builder.ToStringBuilder; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.cql3.CFDefinition; import org.apache.cassandra.cql3.QueryProcessor; import org.apache.cassandra.cql3.UntypedResultSet; import org.apache.cassandra.cql3.statements.CreateColumnFamilyStatement; import org.apache.cassandra.db.*; import org.apache.cassandra.db.compaction.AbstractCompactionStrategy; import org.apache.cassandra.db.compaction.LeveledCompactionStrategy; import org.apache.cassandra.db.compaction.SizeTieredCompactionStrategy; import org.apache.cassandra.db.index.SecondaryIndex; import org.apache.cassandra.db.marshal.*; import org.apache.cassandra.exceptions.ConfigurationException; import org.apache.cassandra.exceptions.InvalidRequestException; import org.apache.cassandra.exceptions.RequestValidationException; import org.apache.cassandra.exceptions.SyntaxException; import org.apache.cassandra.io.IColumnSerializer; import org.apache.cassandra.io.compress.CompressionParameters; import org.apache.cassandra.io.compress.SnappyCompressor; import org.apache.cassandra.thrift.IndexType; import org.apache.cassandra.tracing.Tracing; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import static org.apache.cassandra.utils.FBUtilities.*; public final class CFMetaData { // // !! Important !! // This class can be tricky to modify. Please read http://wiki.apache.org/cassandra/ConfigurationNotes // for how to do so safely. // private static final Logger logger = LoggerFactory.getLogger(CFMetaData.class); public final static double DEFAULT_READ_REPAIR_CHANCE = 0.1; public final static double DEFAULT_DCLOCAL_READ_REPAIR_CHANCE = 0.0; public final static boolean DEFAULT_REPLICATE_ON_WRITE = true; public final static int DEFAULT_GC_GRACE_SECONDS = 864000; public final static int DEFAULT_MIN_COMPACTION_THRESHOLD = 4; public final static int DEFAULT_MAX_COMPACTION_THRESHOLD = 32; public final static Class<? extends AbstractCompactionStrategy> DEFAULT_COMPACTION_STRATEGY_CLASS = SizeTieredCompactionStrategy.class; public final static ByteBuffer DEFAULT_KEY_NAME = ByteBufferUtil.bytes("KEY"); public final static Caching DEFAULT_CACHING_STRATEGY = Caching.KEYS_ONLY; // Note that this is the default only for user created tables public final static String DEFAULT_COMPRESSOR = SnappyCompressor.isAvailable() ? SnappyCompressor.class.getCanonicalName() : null; @Deprecated public static final CFMetaData OldStatusCf = newSystemMetadata(Table.SYSTEM_KS, SystemTable.OLD_STATUS_CF, 0, "unused", BytesType.instance, null); @Deprecated public static final CFMetaData OldHintsCf = newSystemMetadata(Table.SYSTEM_KS, SystemTable.OLD_HINTS_CF, 1, "unused", BytesType.instance, BytesType.instance); @Deprecated public static final CFMetaData OldMigrationsCf = newSystemMetadata(Table.SYSTEM_KS, DefsTable.OLD_MIGRATIONS_CF, 2, "unused", TimeUUIDType.instance, null); @Deprecated public static final CFMetaData OldSchemaCf = newSystemMetadata(Table.SYSTEM_KS, DefsTable.OLD_SCHEMA_CF, 3, "unused", UTF8Type.instance, null); public static final CFMetaData IndexCf = compile(5, "CREATE TABLE \"" + SystemTable.INDEX_CF + "\" (" + "table_name text," + "index_name text," + "PRIMARY KEY (table_name, index_name)" + ") WITH COMPACT STORAGE AND COMMENT='indexes that have been completed'"); public static final CFMetaData CounterIdCf = compile(6, "CREATE TABLE \"" + SystemTable.COUNTER_ID_CF + "\" (" + "key text," + "id timeuuid," + "PRIMARY KEY (key, id)" + ") WITH COMPACT STORAGE AND COMMENT='counter node IDs'"); // new-style schema public static final CFMetaData SchemaKeyspacesCf = compile(8, "CREATE TABLE " + SystemTable.SCHEMA_KEYSPACES_CF + "(" + "keyspace_name text PRIMARY KEY," + "durable_writes boolean," + "strategy_class text," + "strategy_options text" + ") WITH COMPACT STORAGE AND COMMENT='keyspace definitions' AND gc_grace_seconds=8640"); public static final CFMetaData SchemaColumnFamiliesCf = compile(9, "CREATE TABLE " + SystemTable.SCHEMA_COLUMNFAMILIES_CF + "(" + "keyspace_name text," + "columnfamily_name text," + "id int," + "type text," + "comparator text," + "subcomparator text," + "comment text," + "read_repair_chance double," + "local_read_repair_chance double," + "replicate_on_write boolean," + "gc_grace_seconds int," + "default_validator text," + "key_validator text," + "min_compaction_threshold int," + "max_compaction_threshold int," + "key_alias text," // that one is kept for compatibility sake + "key_aliases text," + "bloom_filter_fp_chance double," + "caching text," + "compaction_strategy_class text," + "compression_parameters text," + "value_alias text," + "column_aliases text," + "compaction_strategy_options text," + "default_read_consistency text," + "default_write_consistency text," + "PRIMARY KEY (keyspace_name, columnfamily_name)" + ") WITH COMMENT='ColumnFamily definitions' AND gc_grace_seconds=8640"); public static final CFMetaData SchemaColumnsCf = compile(10, "CREATE TABLE " + SystemTable.SCHEMA_COLUMNS_CF + "(" + "keyspace_name text," + "columnfamily_name text," + "column_name text," + "validator text," + "index_type text," + "index_options text," + "index_name text," + "component_index int," + "PRIMARY KEY(keyspace_name, columnfamily_name, column_name)" + ") WITH COMMENT='ColumnFamily column attributes' AND gc_grace_seconds=8640"); public static final CFMetaData HintsCf = compile(11, "CREATE TABLE " + SystemTable.HINTS_CF + " (" + "target_id uuid," + "hint_id timeuuid," + "message_version int," + "mutation blob," + "PRIMARY KEY (target_id, hint_id, message_version)" + ") WITH COMPACT STORAGE " + "AND COMPACTION={'class' : 'SizeTieredCompactionStrategy', 'min_threshold' : 0, 'max_threshold' : 0} " + "AND COMMENT='hints awaiting delivery'" + "AND gc_grace_seconds=0"); public static final CFMetaData PeersCf = compile(12, "CREATE TABLE " + SystemTable.PEERS_CF + " (" + "peer inet PRIMARY KEY," + "ring_id uuid," + "tokens set<varchar>," + "schema_version uuid," + "release_version text," + "rpc_address inet," + "data_center text," + "rack text" + ") WITH COMMENT='known peers in the cluster'"); public static final CFMetaData LocalCf = compile(13, "CREATE TABLE " + SystemTable.LOCAL_CF + " (" + "key text PRIMARY KEY," + "tokens set<varchar>," + "cluster_name text," + "gossip_generation int," + "bootstrapped text," + "ring_id uuid," + "release_version text," + "thrift_version text," + "cql_version text," + "data_center text," + "rack text" + ") WITH COMMENT='information about the local node'"); public static final CFMetaData TraceSessionsCf = compile(14, "CREATE TABLE " + Tracing.SESSIONS_CF + " (" + " session_id uuid PRIMARY KEY," + " coordinator inet," + " request text," + " started_at timestamp," + " parameters map<text, text>," + " duration int" + ") WITH COMMENT='traced sessions'", Tracing.TRACE_KS); public static final CFMetaData TraceEventsCf = compile(15, "CREATE TABLE " + Tracing.EVENTS_CF + " (" + " session_id uuid," + " event_id timeuuid," + " source inet," + " thread text," + " activity text," + " source_elapsed int," + " PRIMARY KEY (session_id, event_id)" + ");", Tracing.TRACE_KS); public static final CFMetaData BatchlogCF = compile(16, "CREATE TABLE " + SystemTable.BATCHLOG_CF + " (" + "id uuid PRIMARY KEY," + "written_at timestamp," + "data blob" + ") WITH COMMENT='uncommited batches' AND gc_grace_seconds=0 " + "AND COMPACTION={'class' : 'SizeTieredCompactionStrategy', 'min_threshold' : 2}"); public static final CFMetaData RangeXfersCf = compile(17, "CREATE TABLE " + SystemTable.RANGE_XFERS_CF + " (" + "token_bytes blob PRIMARY KEY," + "requested_at timestamp" + ") WITH COMMENT='ranges requested for transfer here'"); public enum Caching { ALL, KEYS_ONLY, ROWS_ONLY, NONE; public static Caching fromString(String cache) throws ConfigurationException { try { return valueOf(cache.toUpperCase()); } catch (IllegalArgumentException e) { throw new ConfigurationException(String.format("%s not found, available types: %s.", cache, StringUtils.join(values(), ", "))); } } } //REQUIRED public final UUID cfId; // internal id, never exposed to user public final String ksName; // name of keyspace public final String cfName; // name of this column family public final ColumnFamilyType cfType; // standard, super public volatile AbstractType<?> comparator; // bytes, long, timeuuid, utf8, etc. public volatile AbstractType<?> subcolumnComparator; // like comparator, for supercolumns //OPTIONAL private volatile String comment = ""; private volatile double readRepairChance = DEFAULT_READ_REPAIR_CHANCE; private volatile double dcLocalReadRepairChance = DEFAULT_DCLOCAL_READ_REPAIR_CHANCE; private volatile boolean replicateOnWrite = DEFAULT_REPLICATE_ON_WRITE; private volatile int gcGraceSeconds = DEFAULT_GC_GRACE_SECONDS; private volatile AbstractType<?> defaultValidator = BytesType.instance; private volatile AbstractType<?> keyValidator = BytesType.instance; private volatile int minCompactionThreshold = DEFAULT_MIN_COMPACTION_THRESHOLD; private volatile int maxCompactionThreshold = DEFAULT_MAX_COMPACTION_THRESHOLD; // Both those aliases list can be null padded if only some of the position have been given an alias through ALTER TABLE .. RENAME private volatile List<ByteBuffer> keyAliases = new ArrayList<ByteBuffer>(); private volatile List<ByteBuffer> columnAliases = new ArrayList<ByteBuffer>(); private volatile ByteBuffer valueAlias = null; private volatile Double bloomFilterFpChance = null; private volatile Caching caching = DEFAULT_CACHING_STRATEGY; volatile Map<ByteBuffer, ColumnDefinition> column_metadata = new HashMap<ByteBuffer,ColumnDefinition>(); public volatile Class<? extends AbstractCompactionStrategy> compactionStrategyClass = DEFAULT_COMPACTION_STRATEGY_CLASS; public volatile Map<String, String> compactionStrategyOptions = new HashMap<String, String>(); public volatile CompressionParameters compressionParameters = new CompressionParameters(null); // Processed infos used by CQL. This can be fully reconstructed from the CFMedata, // so it's not saved on disk. It is however costlyish to recreate for each query // so we cache it here (and update on each relevant CFMetadata change) private volatile CFDefinition cqlCfDef; public CFMetaData comment(String prop) { comment = enforceCommentNotNull(prop); return this;} public CFMetaData readRepairChance(double prop) {readRepairChance = prop; return this;} public CFMetaData dcLocalReadRepairChance(double prop) {dcLocalReadRepairChance = prop; return this;} public CFMetaData replicateOnWrite(boolean prop) {replicateOnWrite = prop; return this;} public CFMetaData gcGraceSeconds(int prop) {gcGraceSeconds = prop; return this;} public CFMetaData defaultValidator(AbstractType<?> prop) {defaultValidator = prop; updateCfDef(); return this;} public CFMetaData keyValidator(AbstractType<?> prop) {keyValidator = prop; updateCfDef(); return this;} public CFMetaData minCompactionThreshold(int prop) {minCompactionThreshold = prop; return this;} public CFMetaData maxCompactionThreshold(int prop) {maxCompactionThreshold = prop; return this;} public CFMetaData keyAliases(List<ByteBuffer> prop) {keyAliases = prop; updateCfDef(); return this;} public CFMetaData columnAliases(List<ByteBuffer> prop) {columnAliases = prop; updateCfDef(); return this;} public CFMetaData valueAlias(ByteBuffer prop) {valueAlias = prop; updateCfDef(); return this;} public CFMetaData columnMetadata(Map<ByteBuffer,ColumnDefinition> prop) {column_metadata = prop; updateCfDef(); return this;} public CFMetaData compactionStrategyClass(Class<? extends AbstractCompactionStrategy> prop) {compactionStrategyClass = prop; return this;} public CFMetaData compactionStrategyOptions(Map<String, String> prop) {compactionStrategyOptions = prop; return this;} public CFMetaData compressionParameters(CompressionParameters prop) {compressionParameters = prop; return this;} public CFMetaData bloomFilterFpChance(Double prop) {bloomFilterFpChance = prop; return this;} public CFMetaData caching(Caching prop) {caching = prop; return this;} public CFMetaData(String keyspace, String name, ColumnFamilyType type, AbstractType<?> comp, AbstractType<?> subcc) { this(keyspace, name, type, comp, subcc, getId(keyspace, name)); } CFMetaData(String keyspace, String name, ColumnFamilyType type, AbstractType<?> comp, AbstractType<?> subcc, UUID id) { // Final fields must be set in constructor ksName = keyspace; cfName = name; cfType = type; comparator = comp; subcolumnComparator = enforceSubccDefault(type, subcc); cfId = id; updateCfDef(); // init cqlCfDef } private static CFMetaData compile(int id, String cql, String keyspace) { try { CreateColumnFamilyStatement statement = (CreateColumnFamilyStatement) QueryProcessor.parseStatement(cql).prepare().statement; CFMetaData cfmd = newSystemMetadata(keyspace, statement.columnFamily(), id, "", statement.comparator, null); statement.applyPropertiesTo(cfmd); return cfmd; } catch (RequestValidationException e) { throw new RuntimeException(e); } } private static CFMetaData compile(int id, String cql) { return compile(id, cql, Table.SYSTEM_KS); } private AbstractType<?> enforceSubccDefault(ColumnFamilyType cftype, AbstractType<?> subcc) { return (subcc == null) && (cftype == ColumnFamilyType.Super) ? BytesType.instance : subcc; } private static String enforceCommentNotNull (CharSequence comment) { return (comment == null) ? "" : comment.toString(); } static UUID getId(String ksName, String cfName) { return UUID.nameUUIDFromBytes(ArrayUtils.addAll(ksName.getBytes(), cfName.getBytes())); } private void init() { updateCfDef(); // init cqlCfDef } private static CFMetaData newSystemMetadata(String keyspace, String cfName, int oldCfId, String comment, AbstractType<?> comparator, AbstractType<?> subcc) { ColumnFamilyType type = subcc == null ? ColumnFamilyType.Standard : ColumnFamilyType.Super; CFMetaData newCFMD = new CFMetaData(keyspace, cfName, type, comparator, subcc); // adding old -> new style ID mapping to support backward compatibility Schema.instance.addOldCfIdMapping(oldCfId, newCFMD.cfId); return newCFMD.comment(comment) .readRepairChance(0) .dcLocalReadRepairChance(0) .gcGraceSeconds(0); } public static CFMetaData newIndexMetadata(CFMetaData parent, ColumnDefinition info, AbstractType<?> columnComparator) { // Depends on parent's cache setting, turn on its index CF's cache. // Here, only key cache is enabled, but later (in KeysIndex) row cache will be turned on depending on cardinality. Caching indexCaching = parent.getCaching() == Caching.ALL || parent.getCaching() == Caching.KEYS_ONLY ? Caching.KEYS_ONLY : Caching.NONE; return new CFMetaData(parent.ksName, parent.indexColumnFamilyName(info), ColumnFamilyType.Standard, columnComparator, null) .keyValidator(info.getValidator()) .readRepairChance(0.0) .dcLocalReadRepairChance(0.0) .gcGraceSeconds(0) .caching(indexCaching) .compactionStrategyClass(parent.compactionStrategyClass) .compactionStrategyOptions(parent.compactionStrategyOptions) .reloadSecondaryIndexMetadata(parent); } public CFMetaData reloadSecondaryIndexMetadata(CFMetaData parent) { minCompactionThreshold(parent.minCompactionThreshold); maxCompactionThreshold(parent.maxCompactionThreshold); compactionStrategyClass(parent.compactionStrategyClass); compactionStrategyOptions(parent.compactionStrategyOptions); compressionParameters(parent.compressionParameters); return this; } public CFMetaData clone() { return copyOpts(new CFMetaData(ksName, cfName, cfType, comparator, subcolumnComparator, cfId), this); } // Create a new CFMD by changing just the cfName public static CFMetaData rename(CFMetaData cfm, String newName) { return copyOpts(new CFMetaData(cfm.ksName, newName, cfm.cfType, cfm.comparator, cfm.subcolumnComparator, cfm.cfId), cfm); } static CFMetaData copyOpts(CFMetaData newCFMD, CFMetaData oldCFMD) { Map<ByteBuffer, ColumnDefinition> clonedColumns = new HashMap<ByteBuffer, ColumnDefinition>(); for (ColumnDefinition cd : oldCFMD.column_metadata.values()) { ColumnDefinition cloned = cd.clone(); clonedColumns.put(cloned.name, cloned); } return newCFMD.comment(oldCFMD.comment) .readRepairChance(oldCFMD.readRepairChance) .dcLocalReadRepairChance(oldCFMD.dcLocalReadRepairChance) .replicateOnWrite(oldCFMD.replicateOnWrite) .gcGraceSeconds(oldCFMD.gcGraceSeconds) .defaultValidator(oldCFMD.defaultValidator) .keyValidator(oldCFMD.keyValidator) .minCompactionThreshold(oldCFMD.minCompactionThreshold) .maxCompactionThreshold(oldCFMD.maxCompactionThreshold) .keyAliases(new ArrayList<ByteBuffer>(oldCFMD.keyAliases)) .columnAliases(new ArrayList<ByteBuffer>(oldCFMD.columnAliases)) .valueAlias(oldCFMD.valueAlias) .columnMetadata(clonedColumns) .compactionStrategyClass(oldCFMD.compactionStrategyClass) .compactionStrategyOptions(oldCFMD.compactionStrategyOptions) .compressionParameters(oldCFMD.compressionParameters) .bloomFilterFpChance(oldCFMD.bloomFilterFpChance) .caching(oldCFMD.caching); } /** * generate a column family name for an index corresponding to the given column. * This is NOT the same as the index's name! This is only used in sstable filenames and is not exposed to users. * * @param info A definition of the column with index * * @return name of the index ColumnFamily */ public String indexColumnFamilyName(ColumnDefinition info) { // TODO simplify this when info.index_name is guaranteed to be set return cfName + Directories.SECONDARY_INDEX_NAME_SEPARATOR + (info.getIndexName() == null ? ByteBufferUtil.bytesToHex(info.name) : info.getIndexName()); } public String getComment() { return comment; } public double getReadRepairChance() { return readRepairChance; } public double getDcLocalReadRepair() { return dcLocalReadRepairChance; } public boolean getReplicateOnWrite() { return replicateOnWrite; } public int getGcGraceSeconds() { return gcGraceSeconds; } public AbstractType<?> getDefaultValidator() { return defaultValidator; } public AbstractType<?> getKeyValidator() { return keyValidator; } public Integer getMinCompactionThreshold() { return minCompactionThreshold; } public Integer getMaxCompactionThreshold() { return maxCompactionThreshold; } // Used by CQL2 only. public ByteBuffer getKeyName() { if (keyAliases.size() > 1) throw new IllegalStateException("Cannot acces column family with composite key from CQL < 3.0.0"); return keyAliases.isEmpty() ? DEFAULT_KEY_NAME : keyAliases.get(0); } public List<ByteBuffer> getKeyAliases() { return keyAliases; } public List<ByteBuffer> getColumnAliases() { return columnAliases; } public ByteBuffer getValueAlias() { return valueAlias; } public CompressionParameters compressionParameters() { return compressionParameters; } public Map<ByteBuffer, ColumnDefinition> getColumn_metadata() { return Collections.unmodifiableMap(column_metadata); } public AbstractType<?> getComparatorFor(ByteBuffer superColumnName) { return superColumnName == null ? comparator : subcolumnComparator; } public double getBloomFilterFpChance() { return bloomFilterFpChance == null ? compactionStrategyClass == LeveledCompactionStrategy.class ? 1.0 : 0.01 : bloomFilterFpChance; } public Caching getCaching() { return caching; } public boolean equals(Object obj) { if (obj == this) { return true; } else if (obj == null || obj.getClass() != getClass()) { return false; } CFMetaData rhs = (CFMetaData) obj; return new EqualsBuilder() .append(ksName, rhs.ksName) .append(cfName, rhs.cfName) .append(cfType, rhs.cfType) .append(comparator, rhs.comparator) .append(subcolumnComparator, rhs.subcolumnComparator) .append(comment, rhs.comment) .append(readRepairChance, rhs.readRepairChance) .append(dcLocalReadRepairChance, rhs.dcLocalReadRepairChance) .append(replicateOnWrite, rhs.replicateOnWrite) .append(gcGraceSeconds, rhs.gcGraceSeconds) .append(defaultValidator, rhs.defaultValidator) .append(keyValidator, rhs.keyValidator) .append(minCompactionThreshold, rhs.minCompactionThreshold) .append(maxCompactionThreshold, rhs.maxCompactionThreshold) .append(cfId, rhs.cfId) .append(column_metadata, rhs.column_metadata) .append(keyAliases, rhs.keyAliases) .append(columnAliases, rhs.columnAliases) .append(valueAlias, rhs.valueAlias) .append(compactionStrategyClass, rhs.compactionStrategyClass) .append(compactionStrategyOptions, rhs.compactionStrategyOptions) .append(compressionParameters, rhs.compressionParameters) .append(bloomFilterFpChance, rhs.bloomFilterFpChance) .append(caching, rhs.caching) .isEquals(); } public int hashCode() { return new HashCodeBuilder(29, 1597) .append(ksName) .append(cfName) .append(cfType) .append(comparator) .append(subcolumnComparator) .append(comment) .append(readRepairChance) .append(dcLocalReadRepairChance) .append(replicateOnWrite) .append(gcGraceSeconds) .append(defaultValidator) .append(keyValidator) .append(minCompactionThreshold) .append(maxCompactionThreshold) .append(cfId) .append(column_metadata) .append(keyAliases) .append(columnAliases) .append(valueAlias) .append(compactionStrategyClass) .append(compactionStrategyOptions) .append(compressionParameters) .append(bloomFilterFpChance) .append(caching) .toHashCode(); } public AbstractType<?> getValueValidator(ByteBuffer column) { return getValueValidator(getColumnDefinition(column)); } public AbstractType<?> getValueValidator(ColumnDefinition columnDefinition) { return columnDefinition == null ? defaultValidator : columnDefinition.getValidator(); } /** applies implicit defaults to cf definition. useful in updates */ public static void applyImplicitDefaults(org.apache.cassandra.thrift.CfDef cf_def) { if (!cf_def.isSetComment()) cf_def.setComment(""); if (!cf_def.isSetReplicate_on_write()) cf_def.setReplicate_on_write(CFMetaData.DEFAULT_REPLICATE_ON_WRITE); if (!cf_def.isSetMin_compaction_threshold()) cf_def.setMin_compaction_threshold(CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD); if (!cf_def.isSetMax_compaction_threshold()) cf_def.setMax_compaction_threshold(CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD); if (cf_def.compaction_strategy == null) cf_def.compaction_strategy = DEFAULT_COMPACTION_STRATEGY_CLASS.getSimpleName(); if (cf_def.compaction_strategy_options == null) cf_def.compaction_strategy_options = Collections.emptyMap(); if (!cf_def.isSetCompression_options()) { cf_def.setCompression_options(new HashMap<String, String>() {{ if (DEFAULT_COMPRESSOR != null) put(CompressionParameters.SSTABLE_COMPRESSION, DEFAULT_COMPRESSOR); }}); } if (!cf_def.isSetDclocal_read_repair_chance()) cf_def.setDclocal_read_repair_chance(CFMetaData.DEFAULT_DCLOCAL_READ_REPAIR_CHANCE); } public static CFMetaData fromThrift(org.apache.cassandra.thrift.CfDef cf_def) throws InvalidRequestException, ConfigurationException { ColumnFamilyType cfType = ColumnFamilyType.create(cf_def.column_type); if (cfType == null) { throw new InvalidRequestException("Invalid column type " + cf_def.column_type); } applyImplicitDefaults(cf_def); try { CFMetaData newCFMD = new CFMetaData(cf_def.keyspace, cf_def.name, cfType, TypeParser.parse(cf_def.comparator_type), cf_def.subcomparator_type == null ? null : TypeParser.parse(cf_def.subcomparator_type)); if (cf_def.isSetGc_grace_seconds()) { newCFMD.gcGraceSeconds(cf_def.gc_grace_seconds); } if (cf_def.isSetMin_compaction_threshold()) { newCFMD.minCompactionThreshold(cf_def.min_compaction_threshold); } if (cf_def.isSetMax_compaction_threshold()) { newCFMD.maxCompactionThreshold(cf_def.max_compaction_threshold); } if (cf_def.isSetKey_alias()) { newCFMD.keyAliases(Collections.<ByteBuffer>singletonList(cf_def.key_alias)); } if (cf_def.isSetKey_validation_class()) { newCFMD.keyValidator(TypeParser.parse(cf_def.key_validation_class)); } if (cf_def.isSetCompaction_strategy()) newCFMD.compactionStrategyClass = createCompactionStrategy(cf_def.compaction_strategy); if (cf_def.isSetCompaction_strategy_options()) newCFMD.compactionStrategyOptions(new HashMap<String, String>(cf_def.compaction_strategy_options)); if (cf_def.isSetBloom_filter_fp_chance()) newCFMD.bloomFilterFpChance(cf_def.bloom_filter_fp_chance); if (cf_def.isSetCaching()) newCFMD.caching(Caching.fromString(cf_def.caching)); if (cf_def.isSetRead_repair_chance()) newCFMD.readRepairChance(cf_def.read_repair_chance); if (cf_def.isSetDclocal_read_repair_chance()) newCFMD.dcLocalReadRepairChance(cf_def.dclocal_read_repair_chance); CompressionParameters cp = CompressionParameters.create(cf_def.compression_options); return newCFMD.comment(cf_def.comment) .replicateOnWrite(cf_def.replicate_on_write) .defaultValidator(TypeParser.parse(cf_def.default_validation_class)) .keyValidator(TypeParser.parse(cf_def.key_validation_class)) .columnMetadata(ColumnDefinition.fromThrift(cf_def.column_metadata)) .compressionParameters(cp); } catch (SyntaxException e) { throw new ConfigurationException(e.getMessage()); } catch (MarshalException e) { throw new ConfigurationException(e.getMessage()); } } public void reload() { Row cfDefRow = SystemTable.readSchemaRow(ksName, cfName); if (cfDefRow.cf == null || cfDefRow.cf.isEmpty()) throw new RuntimeException(String.format("%s not found in the schema definitions table.", ksName + ":" + cfName)); try { apply(fromSchema(cfDefRow)); } catch (ConfigurationException e) { throw new RuntimeException(e); } } /** * Updates CFMetaData in-place to match cf_def * * *Note*: This method left public only for DefsTest, don't use directly! * * @throws ConfigurationException if ks/cf names or cf ids didn't match */ public void apply(CFMetaData cfm) throws ConfigurationException { logger.debug("applying {} to {}", cfm, this); // validate if (!cfm.ksName.equals(ksName)) throw new ConfigurationException(String.format("Keyspace mismatch (found %s; expected %s)", cfm.ksName, ksName)); if (!cfm.cfName.equals(cfName)) throw new ConfigurationException(String.format("Column family mismatch (found %s; expected %s)", cfm.cfName, cfName)); if (!cfm.cfId.equals(cfId)) throw new ConfigurationException(String.format("Column family ID mismatch (found %s; expected %s)", cfm.cfId, cfId)); if (!cfm.cfType.equals(cfType)) throw new ConfigurationException("types do not match."); if (!cfm.comparator.isCompatibleWith(comparator)) throw new ConfigurationException("comparators do not match or are not compatible."); if (cfm.subcolumnComparator == null) { if (subcolumnComparator != null) throw new ConfigurationException("subcolumncomparators do not match."); // else, it's null and we're good. } else if (!cfm.subcolumnComparator.isCompatibleWith(subcolumnComparator)) throw new ConfigurationException("subcolumncomparators do not match or are note compatible."); // TODO: this method should probably return a new CFMetaData so that // 1) we can keep comparator and subcolumnComparator final // 2) updates are applied atomically comparator = cfm.comparator; subcolumnComparator = cfm.subcolumnComparator; // compaction thresholds are checked by ThriftValidation. We shouldn't be doing // validation on the apply path; it's too late for that. comment = enforceCommentNotNull(cfm.comment); readRepairChance = cfm.readRepairChance; dcLocalReadRepairChance = cfm.dcLocalReadRepairChance; replicateOnWrite = cfm.replicateOnWrite; gcGraceSeconds = cfm.gcGraceSeconds; defaultValidator = cfm.defaultValidator; keyValidator = cfm.keyValidator; minCompactionThreshold = cfm.minCompactionThreshold; maxCompactionThreshold = cfm.maxCompactionThreshold; /* * Because thrift updates don't know about aliases, we should ignore * the case where the new aliases are empty. */ if (!cfm.keyAliases.isEmpty()) keyAliases = cfm.keyAliases; if (!cfm.columnAliases.isEmpty()) columnAliases = cfm.columnAliases; if (cfm.valueAlias != null) valueAlias = cfm.valueAlias; bloomFilterFpChance = cfm.bloomFilterFpChance; caching = cfm.caching; MapDifference<ByteBuffer, ColumnDefinition> columnDiff = Maps.difference(column_metadata, cfm.column_metadata); // columns that are no longer needed for (ColumnDefinition cd : columnDiff.entriesOnlyOnLeft().values()) column_metadata.remove(cd.name); // newly added columns for (ColumnDefinition cd : columnDiff.entriesOnlyOnRight().values()) column_metadata.put(cd.name, cd); // old columns with updated attributes for (ByteBuffer name : columnDiff.entriesDiffering().keySet()) { ColumnDefinition oldDef = column_metadata.get(name); ColumnDefinition def = cfm.column_metadata.get(name); oldDef.apply(def, getColumnDefinitionComparator(oldDef)); } compactionStrategyClass = cfm.compactionStrategyClass; compactionStrategyOptions = cfm.compactionStrategyOptions; compressionParameters = cfm.compressionParameters(); updateCfDef(); logger.debug("application result is {}", this); } public static Class<? extends AbstractCompactionStrategy> createCompactionStrategy(String className) throws ConfigurationException { className = className.contains(".") ? className : "org.apache.cassandra.db.compaction." + className; return FBUtilities.classForName(className, "compaction strategy"); } public AbstractCompactionStrategy createCompactionStrategyInstance(ColumnFamilyStore cfs) { try { Constructor<? extends AbstractCompactionStrategy> constructor = compactionStrategyClass.getConstructor(new Class[] { ColumnFamilyStore.class, Map.class // options }); return constructor.newInstance(cfs, compactionStrategyOptions); } catch (NoSuchMethodException e) { throw new RuntimeException(e); } catch (InstantiationException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } } // converts CFM to thrift CfDef public org.apache.cassandra.thrift.CfDef toThrift() { org.apache.cassandra.thrift.CfDef def = new org.apache.cassandra.thrift.CfDef(ksName, cfName); def.setColumn_type(cfType.name()); def.setComparator_type(comparator.toString()); if (subcolumnComparator != null) { assert cfType == ColumnFamilyType.Super : String.format("%s CF %s should not have subcomparator %s defined", cfType, cfName, subcolumnComparator); def.setSubcomparator_type(subcolumnComparator.toString()); } def.setComment(enforceCommentNotNull(comment)); def.setRead_repair_chance(readRepairChance); def.setDclocal_read_repair_chance(dcLocalReadRepairChance); def.setReplicate_on_write(replicateOnWrite); def.setGc_grace_seconds(gcGraceSeconds); def.setDefault_validation_class(defaultValidator == null ? null : defaultValidator.toString()); def.setKey_validation_class(keyValidator.toString()); def.setMin_compaction_threshold(minCompactionThreshold); def.setMax_compaction_threshold(maxCompactionThreshold); // We only return the alias if only one is set since thrift don't know about multiple key aliases if (keyAliases.size() == 1) def.setKey_alias(keyAliases.get(0)); List<org.apache.cassandra.thrift.ColumnDef> column_meta = new ArrayList<org.apache.cassandra.thrift.ColumnDef>(column_metadata.size()); for (ColumnDefinition cd : column_metadata.values()) column_meta.add(cd.toThrift()); def.setColumn_metadata(column_meta); def.setCompaction_strategy(compactionStrategyClass.getName()); def.setCompaction_strategy_options(new HashMap<String, String>(compactionStrategyOptions)); def.setCompression_options(compressionParameters.asThriftOptions()); if (bloomFilterFpChance != null) def.setBloom_filter_fp_chance(bloomFilterFpChance); def.setCaching(caching.toString()); return def; } /** * Returns the ColumnDefinition for {@code name}. * * Note that {@code name} correspond to the returned ColumnDefinition name, * and in particular for composite cfs, it should usually be only a * component of the full column name. If you have a full column name, use * getColumnDefinitionFromColumnName instead. */ public ColumnDefinition getColumnDefinition(ByteBuffer name) { return column_metadata.get(name); } /** * Returns a ColumnDefinition given a full (internal) column name. */ public ColumnDefinition getColumnDefinitionFromColumnName(ByteBuffer columnName) { if (comparator instanceof CompositeType) { CompositeType composite = (CompositeType)comparator; ByteBuffer[] components = composite.split(columnName); for (ColumnDefinition def : column_metadata.values()) { ByteBuffer toCompare = def.componentIndex == null ? columnName : components[def.componentIndex]; if (def.name.equals(toCompare)) return def; } return null; } else { return column_metadata.get(columnName); } } public ColumnDefinition getColumnDefinitionForIndex(String indexName) { for (ColumnDefinition def : column_metadata.values()) { if (indexName.equals(def.getIndexName())) return def; } return null; } /** * Convert a null index_name to appropriate default name according to column status */ public void addDefaultIndexNames() throws ConfigurationException { // if this is ColumnFamily update we need to add previously defined index names to the existing columns first UUID cfId = Schema.instance.getId(ksName, cfName); if (cfId != null) { CFMetaData cfm = Schema.instance.getCFMetaData(cfId); for (Map.Entry<ByteBuffer, ColumnDefinition> entry : column_metadata.entrySet()) { ColumnDefinition newDef = entry.getValue(); if (!cfm.column_metadata.containsKey(entry.getKey()) || newDef.getIndexType() == null) continue; String oldIndexName = cfm.column_metadata.get(entry.getKey()).getIndexName(); if (oldIndexName == null) continue; if (newDef.getIndexName() != null && !oldIndexName.equals(newDef.getIndexName())) throw new ConfigurationException("Can't modify index name: was '" + oldIndexName + "' changed to '" + newDef.getIndexName() + "'."); newDef.setIndexName(oldIndexName); } } Set<String> existingNames = existingIndexNames(null); for (ColumnDefinition column : column_metadata.values()) { if (column.getIndexType() != null && column.getIndexName() == null) { String baseName = getDefaultIndexName(cfName, getColumnDefinitionComparator(column), column.name); String indexName = baseName; int i = 0; while (existingNames.contains(indexName)) indexName = baseName + '_' + (++i); column.setIndexName(indexName); } } } public static String getDefaultIndexName(String cfName, AbstractType<?> comparator, ByteBuffer columnName) { return (cfName + "_" + comparator.getString(columnName) + "_idx").replaceAll("\\W", ""); } public IColumnSerializer getColumnSerializer() { if (cfType == ColumnFamilyType.Standard) return Column.serializer(); return SuperColumn.serializer(subcolumnComparator); } public OnDiskAtom.Serializer getOnDiskSerializer() { if (cfType == ColumnFamilyType.Standard) return Column.onDiskSerializer(); return SuperColumn.onDiskSerializer(subcolumnComparator); } public static boolean isNameValid(String name) { return name != null && !name.isEmpty() && name.length() <= Schema.NAME_LENGTH && name.matches("\\w+"); } public static boolean isIndexNameValid(String name) { return name != null && !name.isEmpty() && name.matches("\\w+"); } public CFMetaData validate() throws ConfigurationException { if (!isNameValid(ksName)) throw new ConfigurationException(String.format("Keyspace name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", Schema.NAME_LENGTH, ksName)); if (!isNameValid(cfName)) throw new ConfigurationException(String.format("ColumnFamily name must not be empty, more than %s characters long, or contain non-alphanumeric-underscore characters (got \"%s\")", Schema.NAME_LENGTH, cfName)); if (cfType == null) throw new ConfigurationException(String.format("Invalid column family type for %s", cfName)); if (cfType == ColumnFamilyType.Super) { if (subcolumnComparator == null) throw new ConfigurationException(String.format("Missing subcolumn comparator for super column family %s", cfName)); } else { if (subcolumnComparator != null) throw new ConfigurationException(String.format("Subcolumn comparator (%s) is invalid for standard column family %s", subcolumnComparator, cfName)); } if (comparator instanceof CounterColumnType) throw new ConfigurationException("CounterColumnType is not a valid comparator"); if (subcolumnComparator instanceof CounterColumnType) throw new ConfigurationException("CounterColumnType is not a valid sub-column comparator"); if (keyValidator instanceof CounterColumnType) throw new ConfigurationException("CounterColumnType is not a valid key validator"); // Mixing counter with non counter columns is not supported (#2614) if (defaultValidator instanceof CounterColumnType) { for (ColumnDefinition def : column_metadata.values()) if (!(def.getValidator() instanceof CounterColumnType)) throw new ConfigurationException("Cannot add a non counter column (" + getColumnDefinitionComparator(def).getString(def.name) + ") in a counter column family"); } else { for (ColumnDefinition def : column_metadata.values()) if (def.getValidator() instanceof CounterColumnType) throw new ConfigurationException("Cannot add a counter column (" + getColumnDefinitionComparator(def).getString(def.name) + ") in a non counter column family"); } // check if any of the columns has name equal to the cf.key_alias for (ColumnDefinition columndef : column_metadata.values()) { for (ByteBuffer alias : keyAliases) if (alias.equals(columndef.name)) throw new ConfigurationException("Cannot have key alias equals to a column name: " + UTF8Type.instance.compose(alias)); for (ByteBuffer alias : columnAliases) if (alias.equals(columndef.name)) throw new ConfigurationException("Cannot have column alias equals to a column name: " + UTF8Type.instance.compose(alias)); if (valueAlias != null && valueAlias.equals(columndef.name)) throw new ConfigurationException("Cannot have value alias equals to a column name: " + UTF8Type.instance.compose(valueAlias)); } for (ByteBuffer alias : keyAliases) validateAlias(alias, "Key"); for (ByteBuffer alias : columnAliases) validateAlias(alias, "Column"); validateAlias(valueAlias, "Value"); // initialize a set of names NOT in the CF under consideration Set<String> indexNames = existingIndexNames(cfName); for (ColumnDefinition c : column_metadata.values()) { AbstractType<?> comparator = getColumnDefinitionComparator(c); try { comparator.validate(c.name); } catch (MarshalException e) { throw new ConfigurationException(String.format("Column name %s is not valid for comparator %s", ByteBufferUtil.bytesToHex(c.name), comparator)); } if (c.getIndexType() == null) { if (c.getIndexName() != null) throw new ConfigurationException("Index name cannot be set without index type"); } else { if (cfType == ColumnFamilyType.Super) throw new ConfigurationException("Secondary indexes are not supported on super column families"); if (!isIndexNameValid(c.getIndexName())) throw new ConfigurationException("Illegal index name " + c.getIndexName()); // check index names against this CF _and_ globally if (indexNames.contains(c.getIndexName())) throw new ConfigurationException("Duplicate index name " + c.getIndexName()); indexNames.add(c.getIndexName()); if (c.getIndexType() == IndexType.CUSTOM) { if (c.getIndexOptions() == null || !c.getIndexOptions().containsKey(SecondaryIndex.CUSTOM_INDEX_OPTION_NAME)) throw new ConfigurationException("Required index option missing: " + SecondaryIndex.CUSTOM_INDEX_OPTION_NAME); } // This method validates the column metadata but does not intialize the index SecondaryIndex.createInstance(null, c); } } validateCompactionThresholds(); return this; } private static Set<String> existingIndexNames(String cfToExclude) { Set<String> indexNames = new HashSet<String>(); for (ColumnFamilyStore cfs : ColumnFamilyStore.all()) { if (cfToExclude == null || !cfs.getColumnFamilyName().equals(cfToExclude)) for (ColumnDefinition cd : cfs.metadata.getColumn_metadata().values()) indexNames.add(cd.getIndexName()); } return indexNames; } private static void validateAlias(ByteBuffer alias, String msg) throws ConfigurationException { if (alias != null) { try { UTF8Type.instance.validate(alias); } catch (MarshalException e) { throw new ConfigurationException(msg + " alias must be UTF8"); } } } private void validateCompactionThresholds() throws ConfigurationException { if (maxCompactionThreshold == 0) return; if (minCompactionThreshold <= 1) throw new ConfigurationException(String.format("Min compaction threshold cannot be less than 2 (got %d).", minCompactionThreshold)); if (minCompactionThreshold > maxCompactionThreshold) throw new ConfigurationException(String.format("Min compaction threshold (got %d) cannot be greater than max compaction threshold (got %d)", minCompactionThreshold, maxCompactionThreshold)); } /** * Create schema mutations to update this metadata to provided new state. * * @param newState The new metadata (for the same CF) * @param modificationTimestamp Timestamp to use for mutation * * @return Difference between attributes in form of schema mutation */ public RowMutation toSchemaUpdate(CFMetaData newState, long modificationTimestamp) { RowMutation rm = new RowMutation(Table.SYSTEM_KS, SystemTable.getSchemaKSKey(ksName)); newState.toSchemaNoColumns(rm, modificationTimestamp); MapDifference<ByteBuffer, ColumnDefinition> columnDiff = Maps.difference(column_metadata, newState.column_metadata); // columns that are no longer needed for (ColumnDefinition cd : columnDiff.entriesOnlyOnLeft().values()) cd.deleteFromSchema(rm, cfName, getColumnDefinitionComparator(cd), modificationTimestamp); // newly added columns for (ColumnDefinition cd : columnDiff.entriesOnlyOnRight().values()) cd.toSchema(rm, cfName, getColumnDefinitionComparator(cd), modificationTimestamp); // old columns with updated attributes for (ByteBuffer name : columnDiff.entriesDiffering().keySet()) { ColumnDefinition cd = newState.getColumnDefinition(name); cd.toSchema(rm, cfName, getColumnDefinitionComparator(cd), modificationTimestamp); } return rm; } /** * Remove all CF attributes from schema * * @param timestamp Timestamp to use * * @return RowMutation to use to completely remove cf from schema */ public RowMutation dropFromSchema(long timestamp) { RowMutation rm = new RowMutation(Table.SYSTEM_KS, SystemTable.getSchemaKSKey(ksName)); ColumnFamily cf = rm.addOrGet(SystemTable.SCHEMA_COLUMNFAMILIES_CF); int ldt = (int) (System.currentTimeMillis() / 1000); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "id")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "type")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "comparator")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "subcomparator")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "comment")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "read_repair_chance")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "local_read_repair_chance")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "replicate_on_write")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "gc_grace_seconds")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "default_validator")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "key_validator")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "min_compaction_threshold")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "max_compaction_threshold")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "key_alias")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "key_aliases")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "bloom_filter_fp_chance")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "caching")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "compaction_strategy_class")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "compression_parameters")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "value_alias")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "column_aliases")); cf.addColumn(DeletedColumn.create(ldt, timestamp, cfName, "compaction_strategy_options")); for (ColumnDefinition cd : column_metadata.values()) cd.deleteFromSchema(rm, cfName, getColumnDefinitionComparator(cd), timestamp); return rm; } public void toSchema(RowMutation rm, long timestamp) { toSchemaNoColumns(rm, timestamp); for (ColumnDefinition cd : column_metadata.values()) cd.toSchema(rm, cfName, getColumnDefinitionComparator(cd), timestamp); } private void toSchemaNoColumns(RowMutation rm, long timestamp) { // For property that can be null (and can be changed), we insert tombstones, to make sure // we don't keep a property the user has removed ColumnFamily cf = rm.addOrGet(SystemTable.SCHEMA_COLUMNFAMILIES_CF); int ldt = (int) (System.currentTimeMillis() / 1000); Integer oldId = Schema.instance.convertNewCfId(cfId); if (oldId != null) // keep old ids (see CASSANDRA-3794 for details) cf.addColumn(Column.create(oldId, timestamp, cfName, "id")); cf.addColumn(Column.create(cfType.toString(), timestamp, cfName, "type")); cf.addColumn(Column.create(comparator.toString(), timestamp, cfName, "comparator")); if (subcolumnComparator != null) cf.addColumn(Column.create(subcolumnComparator.toString(), timestamp, cfName, "subcomparator")); cf.addColumn(comment == null ? DeletedColumn.create(ldt, timestamp, cfName, "comment") : Column.create(comment, timestamp, cfName, "comment")); cf.addColumn(Column.create(readRepairChance, timestamp, cfName, "read_repair_chance")); cf.addColumn(Column.create(dcLocalReadRepairChance, timestamp, cfName, "local_read_repair_chance")); cf.addColumn(Column.create(replicateOnWrite, timestamp, cfName, "replicate_on_write")); cf.addColumn(Column.create(gcGraceSeconds, timestamp, cfName, "gc_grace_seconds")); cf.addColumn(Column.create(defaultValidator.toString(), timestamp, cfName, "default_validator")); cf.addColumn(Column.create(keyValidator.toString(), timestamp, cfName, "key_validator")); cf.addColumn(Column.create(minCompactionThreshold, timestamp, cfName, "min_compaction_threshold")); cf.addColumn(Column.create(maxCompactionThreshold, timestamp, cfName, "max_compaction_threshold")); cf.addColumn(Column.create(json(aliasesAsStrings(keyAliases)), timestamp, cfName, "key_aliases")); cf.addColumn(bloomFilterFpChance == null ? DeletedColumn.create(ldt, timestamp, cfName, "bloomFilterFpChance") : Column.create(bloomFilterFpChance, timestamp, cfName, "bloom_filter_fp_chance")); cf.addColumn(Column.create(caching.toString(), timestamp, cfName, "caching")); cf.addColumn(Column.create(compactionStrategyClass.getName(), timestamp, cfName, "compaction_strategy_class")); cf.addColumn(Column.create(json(compressionParameters.asThriftOptions()), timestamp, cfName, "compression_parameters")); cf.addColumn(valueAlias == null ? DeletedColumn.create(ldt, timestamp, cfName, "value_alias") : Column.create(valueAlias, timestamp, cfName, "value_alias")); cf.addColumn(Column.create(json(aliasesAsStrings(columnAliases)), timestamp, cfName, "column_aliases")); cf.addColumn(Column.create(json(compactionStrategyOptions), timestamp, cfName, "compaction_strategy_options")); } // Package protected for use by tests static CFMetaData fromSchemaNoColumns(UntypedResultSet.Row result) { try { CFMetaData cfm = new CFMetaData(result.getString("keyspace_name"), result.getString("columnfamily_name"), ColumnFamilyType.valueOf(result.getString("type")), TypeParser.parse(result.getString("comparator")), result.has("subcomparator") ? TypeParser.parse(result.getString("subcomparator")) : null); if (result.has("id"))// try to identify if ColumnFamily Id is old style (before C* 1.2) and add old -> new mapping if so Schema.instance.addOldCfIdMapping(result.getInt("id"), cfm.cfId); cfm.readRepairChance(result.getDouble("read_repair_chance")); cfm.dcLocalReadRepairChance(result.getDouble("local_read_repair_chance")); cfm.replicateOnWrite(result.getBoolean("replicate_on_write")); cfm.gcGraceSeconds(result.getInt("gc_grace_seconds")); cfm.defaultValidator(TypeParser.parse(result.getString("default_validator"))); cfm.keyValidator(TypeParser.parse(result.getString("key_validator"))); cfm.minCompactionThreshold(result.getInt("min_compaction_threshold")); cfm.maxCompactionThreshold(result.getInt("max_compaction_threshold")); if (result.has("comment")) cfm.comment(result.getString("comment")); // We need support the old key_alias for compatibility sake if (result.has("key_aliases")) { cfm.keyAliases(aliasesFromStrings(fromJsonList(result.getString("key_aliases")))); } else if (result.has("key_alias")) { cfm.keyAliases(Collections.<ByteBuffer>singletonList(result.getBytes("key_alias"))); } if (result.has("bloom_filter_fp_chance")) cfm.bloomFilterFpChance(result.getDouble("bloom_filter_fp_chance")); cfm.caching(Caching.valueOf(result.getString("caching"))); cfm.compactionStrategyClass(createCompactionStrategy(result.getString("compaction_strategy_class"))); cfm.compressionParameters(CompressionParameters.create(fromJsonMap(result.getString("compression_parameters")))); cfm.columnAliases(aliasesFromStrings(fromJsonList(result.getString("column_aliases")))); if (result.has("value_alias")) cfm.valueAlias(result.getBytes("value_alias")); cfm.compactionStrategyOptions(fromJsonMap(result.getString("compaction_strategy_options"))); return cfm; } catch (SyntaxException e) { throw new RuntimeException(e); } catch (ConfigurationException e) { throw new RuntimeException(e); } } /** * Deserialize CF metadata from low-level representation * * @return Thrift-based metadata deserialized from schema */ public static CFMetaData fromSchema(UntypedResultSet.Row result) { CFMetaData cfDef = fromSchemaNoColumns(result); Row serializedColumnDefinitions = ColumnDefinition.readSchema(cfDef.ksName, cfDef.cfName); return addColumnDefinitionSchema(cfDef, serializedColumnDefinitions).updateCfDef(); } private static CFMetaData fromSchema(Row row) { UntypedResultSet.Row result = QueryProcessor.resultify("SELECT * FROM system.schema_columnfamilies", row).one(); return fromSchema(result); } private List<String> aliasesAsStrings(List<ByteBuffer> rawAliases) { List<String> aliases = new ArrayList<String>(rawAliases.size()); for (ByteBuffer rawAlias : rawAliases) aliases.add(UTF8Type.instance.compose(rawAlias)); return aliases; } private static List<ByteBuffer> aliasesFromStrings(List<String> aliases) { List<ByteBuffer> rawAliases = new ArrayList<ByteBuffer>(aliases.size()); for (String alias : aliases) rawAliases.add(UTF8Type.instance.decompose(alias)); return rawAliases; } /** * Convert current metadata into schema mutation * * @param timestamp Timestamp to use * * @return Low-level representation of the CF * * @throws ConfigurationException if any of the attributes didn't pass validation */ public RowMutation toSchema(long timestamp) throws ConfigurationException { RowMutation rm = new RowMutation(Table.SYSTEM_KS, SystemTable.getSchemaKSKey(ksName)); toSchema(rm, timestamp); return rm; } public AbstractType<?> getColumnDefinitionComparator(ColumnDefinition def) { return getColumnDefinitionComparator(def.componentIndex); } public AbstractType<?> getColumnDefinitionComparator(Integer componentIndex) { AbstractType<?> cfComparator = cfType == ColumnFamilyType.Super ? subcolumnComparator : comparator; if (cfComparator instanceof CompositeType) { if (componentIndex == null) return cfComparator; List<AbstractType<?>> types = ((CompositeType)cfComparator).types; AbstractType<?> t = types.get(componentIndex); assert t != null : "Non-sensical component index"; return t; } else { return cfComparator; } } // Package protected for use by tests static CFMetaData addColumnDefinitionSchema(CFMetaData cfDef, Row serializedColumnDefinitions) { for (ColumnDefinition cd : ColumnDefinition.fromSchema(serializedColumnDefinitions, cfDef)) cfDef.column_metadata.put(cd.name, cd); return cfDef; } public void addColumnDefinition(ColumnDefinition def) { column_metadata.put(def.name, def); } public boolean removeColumnDefinition(ColumnDefinition def) { return column_metadata.remove(def.name) != null; } private CFMetaData updateCfDef() { cqlCfDef = new CFDefinition(this); return this; } public CFDefinition getCfDef() { assert cqlCfDef != null; return cqlCfDef; } /** * Returns whether this CFMetaData has information non exposed on thrift so * that it cannot be correctly handled automatically by thrift clients. */ public boolean isThriftIncompatible() { if (!cqlCfDef.isComposite) return false; for (ColumnDefinition columnDef : column_metadata.values()) { if (columnDef.componentIndex != null) return true; } return false; } @Override public String toString() { return new ToStringBuilder(this) .append("cfId", cfId) .append("ksName", ksName) .append("cfName", cfName) .append("cfType", cfType) .append("comparator", comparator) .append("subcolumncomparator", subcolumnComparator) .append("comment", comment) .append("readRepairChance", readRepairChance) .append("dclocalReadRepairChance", dcLocalReadRepairChance) .append("replicateOnWrite", replicateOnWrite) .append("gcGraceSeconds", gcGraceSeconds) .append("defaultValidator", defaultValidator) .append("keyValidator", keyValidator) .append("minCompactionThreshold", minCompactionThreshold) .append("maxCompactionThreshold", maxCompactionThreshold) .append("keyAliases", keyAliases) .append("columnAliases", columnAliases) .append("valueAlias", valueAlias) .append("column_metadata", column_metadata) .append("compactionStrategyClass", compactionStrategyClass) .append("compactionStrategyOptions", compactionStrategyOptions) .append("compressionOptions", compressionParameters.asThriftOptions()) .append("bloomFilterFpChance", bloomFilterFpChance) .append("caching", caching) .toString(); } }
/* * Copyright (c) 2015 Karl Tauber <karl at jformdesigner dot com> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * o Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * o Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.markdownwriterfx.preview; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.URL; import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.function.BiConsumer; import javafx.concurrent.Worker.State; import javafx.scene.control.IndexRange; import javafx.scene.web.WebView; import org.markdownwriterfx.options.Options; import org.markdownwriterfx.preview.MarkdownPreviewPane.PreviewContext; import org.markdownwriterfx.preview.MarkdownPreviewPane.Renderer; import org.markdownwriterfx.util.Utils; import com.vladsch.flexmark.ast.FencedCodeBlock; import com.vladsch.flexmark.util.ast.Node; import com.vladsch.flexmark.util.ast.NodeVisitor; import com.vladsch.flexmark.util.ast.Visitor; /** * WebView preview. * * @author Karl Tauber */ class WebViewPreview implements MarkdownPreviewPane.Preview { private static final HashMap<String, String> prismLangDependenciesMap = new HashMap<>(); private WebView webView; private final ArrayList<Runnable> runWhenLoadedList = new ArrayList<>(); private int lastScrollX; private int lastScrollY; private IndexRange lastEditorSelection; WebViewPreview() { } private void createNodes() { webView = new WebView(); webView.setFocusTraversable(false); // disable WebView default drag and drop handler to allow dropping markdown files webView.setOnDragEntered(null); webView.setOnDragExited(null); webView.setOnDragOver(null); webView.setOnDragDropped(null); webView.setOnDragDetected(null); webView.setOnDragDone(null); webView.getEngine().getLoadWorker().stateProperty().addListener((ob,o,n) -> { if (n == State.SUCCEEDED && !runWhenLoadedList.isEmpty()) { ArrayList<Runnable> runnables = new ArrayList<>(runWhenLoadedList); runWhenLoadedList.clear(); for (Runnable runnable : runnables) runnable.run(); } }); } private void runWhenLoaded(Runnable runnable) { if (webView.getEngine().getLoadWorker().isRunning()) runWhenLoadedList.add(runnable); else runnable.run(); } @Override public javafx.scene.Node getNode() { if (webView == null) createNodes(); return webView; } @Override public void update(PreviewContext context, Renderer renderer) { if (!webView.getEngine().getLoadWorker().isRunning()) { // get window.scrollX and window.scrollY from web engine, // but only if no worker is running (in this case the result would be zero) Object scrollXobj = webView.getEngine().executeScript("window.scrollX"); Object scrollYobj = webView.getEngine().executeScript("window.scrollY"); lastScrollX = (scrollXobj instanceof Number) ? ((Number)scrollXobj).intValue() : 0; lastScrollY = (scrollYobj instanceof Number) ? ((Number)scrollYobj).intValue() : 0; } lastEditorSelection = context.getEditorSelection(); Path path = context.getPath(); String base = (path != null) ? ("<base href=\"" + path.getParent().toUri().toString() + "\">\n") : ""; String scrollScript = (lastScrollX > 0 || lastScrollY > 0) ? (" onload='window.scrollTo("+lastScrollX+", "+lastScrollY+");'") : ""; webView.getEngine().loadContent( "<!DOCTYPE html>\n" + "<html>\n" + "<head>\n" + "<link rel=\"stylesheet\" href=\"" + getClass().getResource("markdownpad-github.css") + "\">\n" + "<style>\n" + Utils.defaultIfEmpty(Options.getAdditionalCSS(), "") + "\n" + ".mwfx-editor-selection {\n" + " border-right: 5px solid #f47806;\n" + " margin-right: -5px;\n" + " background-color: rgb(253, 247, 241);\n" + "}\n" + "</style>\n" + "<script src=\"" + getClass().getResource("preview.js") + "\"></script>\n" + prismSyntaxHighlighting(context.getMarkdownAST()) + base + "</head>\n" + "<body" + scrollScript + ">\n" + renderer.getHtml(false) + "<script>" + highlightNodesAt(lastEditorSelection) + "</script>\n" + "</body>\n" + "</html>"); } @Override public void scrollY(PreviewContext context, double value) { runWhenLoaded(() -> { webView.getEngine().executeScript("preview.scrollTo(" + value + ");"); }); } @Override public void editorSelectionChanged(PreviewContext context, IndexRange range) { if (range.equals(lastEditorSelection)) return; lastEditorSelection = range; runWhenLoaded(() -> { webView.getEngine().executeScript(highlightNodesAt(range)); }); } private String highlightNodesAt(IndexRange range) { return "preview.highlightNodesAt(" + range.getEnd() + ")"; } private String prismSyntaxHighlighting(Node astRoot) { initPrismLangDependencies(); // check whether markdown contains fenced code blocks and remember languages ArrayList<String> languages = new ArrayList<>(); NodeVisitor visitor = new NodeVisitor(Collections.emptyList()) { @Override protected void processNode(Node node, boolean withChildren, BiConsumer<Node, Visitor<Node>> processor) { if (node instanceof FencedCodeBlock) { String language = ((FencedCodeBlock)node).getInfo().toString(); if (language.contains(language)) languages.add(language); // dependencies while ((language = prismLangDependenciesMap.get(language)) != null) { if (language.contains(language)) languages.add(0, language); // dependencies must be loaded first } } else visitChildren(node); } }; visitor.visit(astRoot); if (languages.isEmpty()) return ""; // build HTML (only load used languages) // Note: not using Prism Autoloader plugin because it lazy loads/highlights, which causes flicker // during fast typing; it also does not work with "alias" languages (e.g. js, html, xml, svg, ...) StringBuilder buf = new StringBuilder(); buf.append("<link rel=\"stylesheet\" href=\"").append(getClass().getResource("prism/prism.css")).append("\">\n"); buf.append("<script src=\"").append(getClass().getResource("prism/prism-core.min.js")).append("\"></script>\n"); for (String language : languages) { URL url = getClass().getResource("prism/components/prism-"+language+".min.js"); if (url != null) buf.append("<script src=\"").append(url).append("\"></script>\n"); } return buf.toString(); } /** * load and parse prism/lang_dependencies.txt */ private static void initPrismLangDependencies() { if (!prismLangDependenciesMap.isEmpty()) return; try (BufferedReader reader = new BufferedReader(new InputStreamReader( WebViewPreview.class.getResourceAsStream("prism/lang_dependencies.txt")))) { String line; while ((line = reader.readLine()) != null) { if (!line.startsWith("{")) continue; line = line.replaceAll("\\[([^\\]]+)\\]", "[not supported]"); line = trimDelim(line, "{", "}"); for (String str : line.split(",")) { String[] parts = str.split(":"); if (parts[1].startsWith("[")) continue; // not supported String key = trimDelim(parts[0], "\"", "\""); String value = trimDelim(parts[1], "\"", "\""); prismLangDependenciesMap.put(key, value); } } } catch (IOException e) { // ignore } } private static String trimDelim(String str, String leadingDelim, String trailingDelim) { str = str.trim(); if (!str.startsWith(leadingDelim) || !str.endsWith(trailingDelim)) throw new IllegalArgumentException(str); return str.substring(leadingDelim.length(), str.length() - trailingDelim.length()); } }
/* * Copyright (C) 2009 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect.testing; import static com.google.common.collect.testing.testers.ListListIteratorTester.getListIteratorFullyModifiableMethod; import static com.google.common.collect.testing.testers.ListSubListTester.getSubListOriginalListSetAffectsSubListLargeListMethod; import static com.google.common.collect.testing.testers.ListSubListTester.getSubListOriginalListSetAffectsSubListMethod; import static com.google.common.collect.testing.testers.ListSubListTester.getSubListSubListRemoveAffectsOriginalLargeListMethod; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.features.ListFeature; import java.lang.reflect.Method; import java.util.AbstractList; import java.util.AbstractSequentialList; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; import java.util.Vector; import java.util.concurrent.CopyOnWriteArrayList; import junit.framework.Test; import junit.framework.TestSuite; /** * Generates a test suite covering the {@link List} implementations in the * {@link java.util} package. Can be subclassed to specify tests that should * be suppressed. * * @author Kevin Bourrillion */ @GwtIncompatible public class TestsForListsInJavaUtil { public static Test suite() { return new TestsForListsInJavaUtil().allTests(); } public Test allTests() { TestSuite suite = new TestSuite("java.util Lists"); suite.addTest(testsForEmptyList()); suite.addTest(testsForSingletonList()); suite.addTest(testsForArraysAsList()); suite.addTest(testsForArrayList()); suite.addTest(testsForLinkedList()); suite.addTest(testsForCopyOnWriteArrayList()); suite.addTest(testsForUnmodifiableList()); suite.addTest(testsForCheckedList()); suite.addTest(testsForAbstractList()); suite.addTest(testsForAbstractSequentialList()); suite.addTest(testsForVector()); return suite; } protected Collection<Method> suppressForEmptyList() { return Collections.emptySet(); } protected Collection<Method> suppressForSingletonList() { return Collections.emptySet(); } protected Collection<Method> suppressForArraysAsList() { return Collections.emptySet(); } protected Collection<Method> suppressForArrayList() { return Collections.emptySet(); } protected Collection<Method> suppressForLinkedList() { return Collections.emptySet(); } protected Collection<Method> suppressForCopyOnWriteArrayList() { return Arrays.asList( getSubListOriginalListSetAffectsSubListMethod(), getSubListOriginalListSetAffectsSubListLargeListMethod(), getSubListSubListRemoveAffectsOriginalLargeListMethod(), getListIteratorFullyModifiableMethod()); } protected Collection<Method> suppressForUnmodifiableList() { return Collections.emptySet(); } protected Collection<Method> suppressForCheckedList() { return Collections.emptySet(); } protected Collection<Method> suppressForAbstractList() { return Collections.emptySet(); } protected Collection<Method> suppressForAbstractSequentialList() { return Collections.emptySet(); } protected Collection<Method> suppressForVector() { return Collections.emptySet(); } public Test testsForEmptyList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { return Collections.emptyList(); } }) .named("emptyList") .withFeatures(CollectionFeature.SERIALIZABLE, CollectionSize.ZERO) .suppressing(suppressForEmptyList()) .createTestSuite(); } public Test testsForSingletonList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { return Collections.singletonList(elements[0]); } }) .named("singletonList") .withFeatures( CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionSize.ONE) .suppressing(suppressForSingletonList()) .createTestSuite(); } public Test testsForArraysAsList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { return Arrays.asList(elements.clone()); } }) .named("Arrays.asList") .withFeatures( ListFeature.SUPPORTS_SET, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionSize.ANY) .suppressing(suppressForArraysAsList()) .createTestSuite(); } public Test testsForArrayList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { return new ArrayList<String>(MinimalCollection.of(elements)); } }) .named("ArrayList") .withFeatures( ListFeature.GENERAL_PURPOSE, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION, CollectionSize.ANY) .suppressing(suppressForArrayList()) .createTestSuite(); } public Test testsForLinkedList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { return new LinkedList<String>(MinimalCollection.of(elements)); } }) .named("LinkedList") .withFeatures( ListFeature.GENERAL_PURPOSE, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION, CollectionSize.ANY) .suppressing(suppressForLinkedList()) .createTestSuite(); } public Test testsForCopyOnWriteArrayList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { return new CopyOnWriteArrayList<String>(MinimalCollection.of(elements)); } }) .named("CopyOnWriteArrayList") .withFeatures( ListFeature.SUPPORTS_ADD_WITH_INDEX, ListFeature.SUPPORTS_REMOVE_WITH_INDEX, ListFeature.SUPPORTS_SET, CollectionFeature.SUPPORTS_ADD, CollectionFeature.SUPPORTS_REMOVE, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionSize.ANY) .suppressing(suppressForCopyOnWriteArrayList()) .createTestSuite(); } public Test testsForUnmodifiableList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { List<String> innerList = new ArrayList<String>(); Collections.addAll(innerList, elements); return Collections.unmodifiableList(innerList); } }) .named("unmodifiableList/ArrayList") .withFeatures( CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionSize.ANY) .suppressing(suppressForUnmodifiableList()) .createTestSuite(); } public Test testsForCheckedList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override public List<String> create(String[] elements) { List<String> innerList = new ArrayList<String>(); Collections.addAll(innerList, elements); return Collections.checkedList(innerList, String.class); } }) .named("checkedList/ArrayList") .withFeatures( ListFeature.GENERAL_PURPOSE, CollectionFeature.SERIALIZABLE, CollectionFeature.RESTRICTS_ELEMENTS, CollectionFeature.ALLOWS_NULL_VALUES, CollectionSize.ANY) .suppressing(suppressForCheckedList()) .createTestSuite(); } public Test testsForAbstractList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override protected List<String> create(final String[] elements) { return new AbstractList<String>() { @Override public int size() { return elements.length; } @Override public String get(int index) { return elements[index]; } }; } }) .named("AbstractList") .withFeatures( CollectionFeature.NONE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionSize.ANY) .suppressing(suppressForAbstractList()) .createTestSuite(); } public Test testsForAbstractSequentialList() { return ListTestSuiteBuilder.using( new TestStringListGenerator() { @Override protected List<String> create(final String[] elements) { // For this test we trust ArrayList works final List<String> list = new ArrayList<String>(); Collections.addAll(list, elements); return new AbstractSequentialList<String>() { @Override public int size() { return list.size(); } @Override public ListIterator<String> listIterator(int index) { return list.listIterator(index); } }; } }) .named("AbstractSequentialList") .withFeatures( ListFeature.GENERAL_PURPOSE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionSize.ANY) .suppressing(suppressForAbstractSequentialList()) .createTestSuite(); } private Test testsForVector() { return ListTestSuiteBuilder .using(new TestStringListGenerator() { @Override protected List<String> create(String[] elements) { return new Vector<String>(MinimalCollection.of(elements)); } }) .named("Vector") .withFeatures( ListFeature.GENERAL_PURPOSE, CollectionFeature.ALLOWS_NULL_VALUES, CollectionFeature.FAILS_FAST_ON_CONCURRENT_MODIFICATION, CollectionFeature.SERIALIZABLE, CollectionSize.ANY) .createTestSuite(); } }
/* * Copyright (c) 2004-2022, University of Oslo * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * Neither the name of the HISP project nor the names of its contributors may * be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON * ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package org.hisp.dhis.datavalue; import java.io.Serializable; import java.util.Date; import java.util.regex.Pattern; import lombok.Builder; import org.apache.commons.lang3.StringUtils; import org.hisp.dhis.audit.AuditAttribute; import org.hisp.dhis.audit.AuditScope; import org.hisp.dhis.audit.Auditable; import org.hisp.dhis.category.CategoryOptionCombo; import org.hisp.dhis.common.BaseIdentifiableObject; import org.hisp.dhis.dataelement.DataElement; import org.hisp.dhis.organisationunit.OrganisationUnit; import org.hisp.dhis.period.Period; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.databind.annotation.JsonSerialize; /** * @author Kristian Nordal * @version $Id: DataValue.java 4638 2008-02-25 10:06:47Z larshelg $ */ @Auditable( scope = AuditScope.AGGREGATE ) public class DataValue implements Serializable { /** * Determines if a de-serialized file is compatible with this class. */ private static final long serialVersionUID = 6269303850789110610L; private static final Pattern ZERO_PATTERN = Pattern.compile( "^0(\\.0*)?$" ); public static final String TRUE = "true"; public static final String FALSE = "false"; // ------------------------------------------------------------------------- // Persistent properties // ------------------------------------------------------------------------- @AuditAttribute private DataElement dataElement; @AuditAttribute private Period period; @AuditAttribute private OrganisationUnit source; @AuditAttribute private CategoryOptionCombo categoryOptionCombo; @AuditAttribute private CategoryOptionCombo attributeOptionCombo; @AuditAttribute private String value; private String storedBy; private Date created; private Date lastUpdated; private String comment; private Boolean followup; private boolean deleted; // ------------------------------------------------------------------------- // Transient properties // ------------------------------------------------------------------------- private transient boolean auditValueIsSet = false; private transient boolean valueIsSet = false; private transient String auditValue; // ------------------------------------------------------------------------- // Constructors // ------------------------------------------------------------------------- public DataValue() { this.created = new Date(); this.lastUpdated = new Date(); } /** * @param dataElement the data element. * @param period the period. * @param source the organisation unit. * @param categoryOptionCombo the category option combo. * @param attributeOptionCombo the attribute option combo. */ public DataValue( DataElement dataElement, Period period, OrganisationUnit source, CategoryOptionCombo categoryOptionCombo, CategoryOptionCombo attributeOptionCombo ) { this.dataElement = dataElement; this.period = period; this.source = source; this.categoryOptionCombo = categoryOptionCombo; this.attributeOptionCombo = attributeOptionCombo; this.created = new Date(); this.lastUpdated = new Date(); } /** * @param dataElement the data element. * @param period the period. * @param source the organisation unit. * @param categoryOptionCombo the category option combo. * @param attributeOptionCombo the attribute option combo. * @param value the value. */ public DataValue( DataElement dataElement, Period period, OrganisationUnit source, CategoryOptionCombo categoryOptionCombo, CategoryOptionCombo attributeOptionCombo, String value ) { this.dataElement = dataElement; this.period = period; this.source = source; this.categoryOptionCombo = categoryOptionCombo; this.attributeOptionCombo = attributeOptionCombo; this.value = value; this.created = new Date(); this.lastUpdated = new Date(); } /** * @param dataElement the data element. * @param period the period. * @param source the organisation unit. * @param categoryOptionCombo the category option combo. * @param attributeOptionCombo the attribute option combo. * @param value the value. * @param storedBy the user that stored this data value. * @param lastUpdated the time of the last update to this data value. * @param comment the comment. */ public DataValue( DataElement dataElement, Period period, OrganisationUnit source, CategoryOptionCombo categoryOptionCombo, CategoryOptionCombo attributeOptionCombo, String value, String storedBy, Date lastUpdated, String comment ) { this.dataElement = dataElement; this.period = period; this.source = source; this.categoryOptionCombo = categoryOptionCombo; this.attributeOptionCombo = attributeOptionCombo; this.value = value; this.storedBy = storedBy; this.created = new Date(); this.lastUpdated = lastUpdated; this.comment = comment; } /** * @param dataElement the data element. * @param period the period. * @param source the organisation unit. * @param categoryOptionCombo the category option combo. * @param attributeOptionCombo the attribute option combo. * @param value the value. * @param storedBy the user that stored this data value. * @param lastUpdated the time of the last update to this data value. * @param comment the comment. * @param followup whether followup is set. * @param deleted whether the value is deleted. */ @Builder( toBuilder = true ) public DataValue( DataElement dataElement, Period period, OrganisationUnit source, CategoryOptionCombo categoryOptionCombo, CategoryOptionCombo attributeOptionCombo, String value, String storedBy, Date lastUpdated, String comment, Boolean followup, boolean deleted ) { this.dataElement = dataElement; this.period = period; this.source = source; this.categoryOptionCombo = categoryOptionCombo; this.attributeOptionCombo = attributeOptionCombo; this.value = value; this.storedBy = storedBy; this.created = new Date(); this.lastUpdated = lastUpdated; this.comment = comment; this.followup = followup; this.deleted = deleted; } // ------------------------------------------------------------------------- // Logic // ------------------------------------------------------------------------- /** * Alias for getCategoryOptionCombo(). TODO remove. */ public CategoryOptionCombo getOptionCombo() { return getCategoryOptionCombo(); } /** * Indicates whether the value is a zero. */ public boolean isZero() { return dataElement != null && dataElement.getValueType().isNumeric() && value != null && ZERO_PATTERN.matcher( value ).find(); } /** * Indicates whether the value is null. */ public boolean isNullValue() { return StringUtils.trimToNull( value ) == null && StringUtils.trimToNull( comment ) == null; } public boolean isFollowup() { return followup != null && followup; } public boolean hasComment() { return comment != null && !comment.isEmpty(); } public void toggleFollowUp() { if ( this.followup == null ) { this.followup = true; } else { this.followup = !this.followup; } } public void mergeWith( DataValue other ) { this.value = other.getValue(); this.storedBy = other.getStoredBy(); this.created = other.getCreated(); this.lastUpdated = other.getLastUpdated(); this.comment = other.getComment(); this.followup = other.isFollowup(); this.deleted = other.isDeleted(); } // ------------------------------------------------------------------------- // hashCode and equals // ------------------------------------------------------------------------- @Override public boolean equals( Object o ) { if ( this == o ) { return true; } if ( o == null ) { return false; } if ( !getClass().isAssignableFrom( o.getClass() ) ) { return false; } final DataValue other = (DataValue) o; return dataElement.equals( other.getDataElement() ) && period.equals( other.getPeriod() ) && source.equals( other.getSource() ) && categoryOptionCombo.equals( other.getCategoryOptionCombo() ) && attributeOptionCombo.equals( other.getAttributeOptionCombo() ); } @Override public int hashCode() { final int prime = 31; int result = 1; result = result * prime + dataElement.hashCode(); result = result * prime + period.hashCode(); result = result * prime + source.hashCode(); result = result * prime + categoryOptionCombo.hashCode(); result = result * prime + attributeOptionCombo.hashCode(); return result; } @Override public String toString() { return "[Data element: " + dataElement.getUid() + ", period: " + period.getUid() + ", source: " + source.getUid() + ", category option combo: " + categoryOptionCombo.getUid() + ", attribute option combo: " + attributeOptionCombo.getUid() + ", value: " + value + ", deleted: " + deleted + "]"; } // ------------------------------------------------------------------------- // Getters and setters // ------------------------------------------------------------------------- @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) public DataElement getDataElement() { return dataElement; } public void setDataElement( DataElement dataElement ) { this.dataElement = dataElement; } @JsonProperty public Period getPeriod() { return period; } public void setPeriod( Period period ) { this.period = period; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) public OrganisationUnit getSource() { return source; } public void setSource( OrganisationUnit source ) { this.source = source; } @JsonProperty @JsonSerialize( contentAs = BaseIdentifiableObject.class ) public CategoryOptionCombo getCategoryOptionCombo() { return categoryOptionCombo; } public void setCategoryOptionCombo( CategoryOptionCombo categoryOptionCombo ) { this.categoryOptionCombo = categoryOptionCombo; } @JsonProperty public String getValue() { return value; } public CategoryOptionCombo getAttributeOptionCombo() { return attributeOptionCombo; } public void setAttributeOptionCombo( CategoryOptionCombo attributeOptionCombo ) { this.attributeOptionCombo = attributeOptionCombo; } public void setValue( String value ) { if ( !auditValueIsSet ) { this.auditValue = valueIsSet ? this.value : value; auditValueIsSet = true; } valueIsSet = true; this.value = value; } public String getStoredBy() { return storedBy; } public void setStoredBy( String storedBy ) { this.storedBy = storedBy; } public Date getCreated() { return created; } public void setCreated( Date created ) { this.created = created; } public Date getLastUpdated() { return lastUpdated; } public void setLastUpdated( Date lastUpdated ) { this.lastUpdated = lastUpdated; } public String getComment() { return comment; } public void setComment( String comment ) { this.comment = comment; } public void setFollowup( Boolean followup ) { this.followup = followup; } public boolean isDeleted() { return deleted; } public void setDeleted( boolean deleted ) { this.deleted = deleted; } public String getAuditValue() { return auditValue; } }
// SortedArraySet.java, created Wed Mar 5 0:26:26 2003 by joewhaley // Copyright (C) 2001-3 John Whaley <jwhaley@alum.mit.edu> // Licensed under the terms of the GNU LGPL; see COPYING for details. package jwutil.collections; import java.io.Serializable; import java.util.AbstractList; import java.util.Collection; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedHashSet; import java.util.List; import java.util.NoSuchElementException; import java.util.RandomAccess; import java.util.Set; import java.util.SortedSet; import java.util.TreeSet; /** * Set that is stored as a sorted list. This allows linear-time merge operations, * among other things. * * Does not handle "null" elements. * * @author John Whaley <jwhaley@alum.mit.edu> * @version $Id: SortedArraySet.java,v 1.3 2005/05/05 19:39:34 joewhaley Exp $ */ public class SortedArraySet extends AbstractList implements SortedSet, List, Cloneable, Serializable, RandomAccess { /** * Version ID for serialization. */ private static final long serialVersionUID = 3258416123022947382L; /** * The array buffer into which the elements of the SortedArraySet are stored. * The capacity of the SortedArraySet is the length of this array buffer. */ private transient Object elementData[]; /** * The size of the SortedArraySet (the number of elements it contains). */ private int size; /** * The comparator used for this SortedArraySet, or "null" if we are using * the default element ordering. */ private final Comparator comparator; /** * Constructs an empty set with the specified initial capacity. * * @param initialCapacity the initial capacity of the set. * @exception IllegalArgumentException if the specified initial capacity * is negative */ private SortedArraySet(int initialCapacity) { super(); if (initialCapacity < 0) throw new IllegalArgumentException("Illegal Capacity: "+initialCapacity); this.elementData = new Object[initialCapacity]; this.size = 0; this.comparator = null; } /** * Constructs an empty set with an initial capacity of ten. */ private SortedArraySet() { this(10); } private SortedArraySet(Collection c) { this((int) Math.min((c.size()*110L)/100, Integer.MAX_VALUE)); this.addAll(c); } private SortedArraySet(Comparator comparator) { this(10, comparator); } private SortedArraySet(int initialCapacity, Comparator comparator) { super(); this.elementData = new Object[initialCapacity]; this.size = 0; this.comparator = comparator; } /** * @see java.util.List#get(int) */ public Object get(int arg0) { checkAgainstSize(arg0); return this.elementData[arg0]; } private void checkAgainstSize(int arg0) { if (arg0 >= this.size) throw new IndexOutOfBoundsException(arg0+" >= "+this.size); } /** * @see java.util.Collection#size() */ public int size() { return this.size; } /** * @see java.util.SortedSet#comparator() */ public Comparator comparator() { return comparator; } /** * @see java.util.SortedSet#subSet(java.lang.Object, java.lang.Object) */ public SortedSet subSet(Object arg0, Object arg1) { return new SubSet(arg0, arg1); } /** * @see java.util.SortedSet#headSet(java.lang.Object) */ public SortedSet headSet(Object arg0) { return new SubSet(arg0, true); } /** * @see java.util.SortedSet#tailSet(java.lang.Object) */ public SortedSet tailSet(Object arg0) { return new SubSet(arg0, false); } /** * @see java.util.SortedSet#first() */ public Object first() { int i = this.size; if (i == 0) throw new NoSuchElementException(); return this.elementData[i]; } /** * @see java.util.SortedSet#last() */ public Object last() { try { return this.elementData[this.size-1]; } catch (ArrayIndexOutOfBoundsException x) { throw new NoSuchElementException(); } } private int compare(Object o1, Object o2) { return (comparator==null ? ((Comparable)o1).compareTo(o2) : comparator.compare(o1, o2)); } private int whereDoesItGo(Object o) { int lo = 0; int hi = this.size-1; if (hi < 0) return 0; int mid = hi >> 1; for (;;) { Object o2 = this.elementData[mid]; int r = compare(o, o2); if (r < 0) { hi = mid - 1; if (lo > hi) return mid; } else if (r > 0) { lo = mid + 1; if (lo > hi) return lo; } else { return mid; } mid = ((hi - lo) >> 1) + lo; } } private class SubSet extends AbstractList implements SortedSet, List, Serializable, RandomAccess { /** * Version ID for serialization. */ private static final long serialVersionUID = 3690476935247770425L; private final Object from, to; private int startIndex, endIndex, parentModCount; private final boolean fromStart, toEnd; SubSet(Object from, Object to, int startIndex, int endIndex, boolean fromStart, boolean toEnd) { this.from = from; this.to = to; this.startIndex = startIndex; this.endIndex = endIndex; this.fromStart = fromStart; this.toEnd = toEnd; this.parentModCount = SortedArraySet.this.modCount; } SubSet(Object from, Object to) { if (compare(from, to) > 0) throw new IllegalArgumentException(from+" > "+to); this.from = from; this.to = to; this.fromStart = false; this.toEnd = false; this.parentModCount = SortedArraySet.this.modCount; updateIndices(); } SubSet(Object key, boolean headSet) { if (headSet) { fromStart = true; toEnd = false; from = null; to = key; } else { fromStart = false; toEnd = true; from = key; to = null; } this.parentModCount = SortedArraySet.this.modCount; updateIndices(); } private void checkModCount() { int mc = SortedArraySet.this.modCount; if (this.parentModCount != mc) { this.parentModCount = mc; updateIndices(); } } private void updateIndices() { if (!this.fromStart) { this.startIndex = SortedArraySet.this.whereDoesItGo(from); } if (!this.toEnd) { this.endIndex = SortedArraySet.this.whereDoesItGo(to); } else { this.endIndex = SortedArraySet.this.size; } } private void checkBounds(Object o) { if (!this.fromStart && compare(this.from, o) > 0) throw new IllegalArgumentException(o+" < "+from); if (!this.toEnd && compare(this.to, o) <= 0) throw new IllegalArgumentException(o+" >= "+to); } private int checkWithinRange2(Object o) { int i = SortedArraySet.this.whereDoesItGo(o); if (!this.fromStart && (i < this.startIndex || (i == this.startIndex && compare(this.from, o) > 0))) { throw new IllegalArgumentException(o+" < "+from); } if (!this.toEnd && (i > this.endIndex || (i == this.endIndex && compare(o, this.to) > 0))) { throw new IllegalArgumentException(o+" > "+to); } return i; } public boolean add(Object o) { checkBounds(o); return SortedArraySet.this.add(o); } public boolean remove(Object o) { checkBounds(o); return SortedArraySet.this.remove(o); } /** * @see java.util.AbstractCollection#size() */ public int size() { checkModCount(); return this.endIndex - this.startIndex; } /** * @see java.util.SortedSet#comparator() */ public Comparator comparator() { return SortedArraySet.this.comparator; } /** * @see java.util.SortedSet#subSet(java.lang.Object, java.lang.Object) */ public SortedSet subSet(Object fromElement, Object toElement) { checkModCount(); int start_index = SortedArraySet.this.whereDoesItGo(fromElement); if (!this.fromStart && (start_index < this.startIndex || (start_index == this.startIndex && compare(this.from, fromElement) > 0))) { throw new IllegalArgumentException(fromElement+" < "+from); } int end_index = SortedArraySet.this.whereDoesItGo(toElement); if (!this.toEnd && (end_index > this.endIndex || (end_index == this.endIndex && compare(toElement, this.to) > 0))) { throw new IllegalArgumentException(toElement+" > "+to); } if (start_index > end_index || (start_index == end_index && compare(toElement, toElement) > 0)) { throw new IllegalArgumentException(fromElement+" > "+toElement); } return new SubSet(fromElement, toElement, start_index, end_index, false, false); } /** * @see java.util.SortedSet#headSet(java.lang.Object) */ public SortedSet headSet(Object arg0) { checkModCount(); int end_index = checkWithinRange2(arg0); return new SubSet(this.from, arg0, this.startIndex, end_index, this.fromStart, false); } /** * @see java.util.SortedSet#tailSet(java.lang.Object) */ public SortedSet tailSet(Object arg0) { checkModCount(); int start_index = checkWithinRange2(arg0); return new SubSet(arg0, this.to, start_index, this.endIndex, false, this.toEnd); } /** * @see java.util.SortedSet#first() */ public Object first() { checkModCount(); int start = this.startIndex; if (this.endIndex <= start) throw new NoSuchElementException(); return SortedArraySet.this.elementData[start]; } /** * @see java.util.SortedSet#last() */ public Object last() { checkModCount(); int end = this.endIndex; if (end <= this.startIndex) throw new NoSuchElementException(); return SortedArraySet.this.elementData[end-1]; } protected void removeRange(int arg0, int arg1) { checkModCount(); int s = this.startIndex; SortedArraySet.this.removeRange(s + arg0, s + arg1); } private int checkIndex(int arg0) { if (arg0 < 0) throw new IndexOutOfBoundsException(arg0+" < 0"); checkModCount(); int start = this.startIndex; int i = start + arg0; int end = this.endIndex; if (i >= end) throw new IndexOutOfBoundsException(arg0+" >= "+(end-start)); return i; } /** * @see java.util.AbstractList#get(int) */ public Object get(int arg0) { int i = checkIndex(arg0); return SortedArraySet.this.elementData[i]; } public void add(int arg0, Object arg1) { if (DISALLOW_DIRECT_MODIFICATIONS) throw new UnsupportedOperationException(); if (arg0 < 0) throw new IndexOutOfBoundsException(arg0+" < 0"); checkModCount(); int start = this.startIndex; int i = start + arg0; int end = this.endIndex; if (i > end) throw new IndexOutOfBoundsException(arg0+" > "+(end-start)); SortedArraySet.this.add(i, arg1); } /** * @see java.util.List#remove(int) */ public Object remove(int arg0) { if (DISALLOW_DIRECT_MODIFICATIONS) throw new UnsupportedOperationException(); int i = checkIndex(arg0); return SortedArraySet.this.remove(i); } /** * @see java.util.List#set(int, java.lang.Object) */ public Object set(int arg0, Object arg1) { if (DISALLOW_DIRECT_MODIFICATIONS) throw new UnsupportedOperationException(); int i = checkIndex(arg0); return SortedArraySet.this.set(i, arg1); } /** * @see java.util.List#indexOf(java.lang.Object) */ public int indexOf(Object arg0) { int i = SortedArraySet.this.indexOf(arg0); checkModCount(); int s = this.startIndex; if (i < s) return -1; if (i >= this.endIndex) return -1; return i - s; } /** * @see java.util.List#lastIndexOf(java.lang.Object) */ public int lastIndexOf(Object arg0) { return this.lastIndexOf(arg0); } /** * @see java.util.List#subList(int, int) */ public List subList(int arg0, int arg1) { Object lo = this.get(arg0); if (arg1 == this.size()) return (List) this.tailSet(lo); Object hi = this.get(arg1); return (List) this.subSet(lo, hi); } /** * @see java.util.Collection#contains(java.lang.Object) */ public boolean contains(Object arg0) { return this.indexOf(arg0) != -1; } } /** * @see java.util.Collection#add(java.lang.Object) */ public boolean add(Object arg0) { int i = whereDoesItGo(arg0); int s = this.size; if (i != s && elementData[i].equals(arg0)) return false; ensureCapacity(s+1); // increments modCount System.arraycopy(this.elementData, i, this.elementData, i + 1, s - i); elementData[i] = arg0; this.size++; return true; } /** * Increases the capacity of this SortedArraySet instance, if * necessary, to ensure that it can hold at least the number of elements * specified by the minimum capacity argument. * * @param minCapacity the desired minimum capacity. */ public void ensureCapacity(int minCapacity) { this.modCount++; int oldCapacity = elementData.length; if (minCapacity > oldCapacity) { Object oldData[] = elementData; int newCapacity = ((oldCapacity * 3) >> 1) + 1; if (newCapacity < minCapacity) newCapacity = minCapacity; this.elementData = new Object[newCapacity]; System.arraycopy(oldData, 0, this.elementData, 0, this.size); } } public static final boolean DISALLOW_DIRECT_MODIFICATIONS = false; /** * @see java.util.List#add(int, java.lang.Object) */ public void add(int arg0, Object arg1) { if (DISALLOW_DIRECT_MODIFICATIONS) throw new UnsupportedOperationException(); int s = this.size; if (arg0 > s) { throw new IndexOutOfBoundsException("Index: "+arg0+", Size: "+s); } ensureCapacity(s+1); // increments modCount System.arraycopy(this.elementData, arg0, this.elementData, arg0 + 1, s - arg0); elementData[arg0] = arg1; this.size++; } /** * @see java.util.List#remove(int) */ public Object remove(int arg0) { checkAgainstSize(arg0); Object oldValue = elementData[arg0]; this.modCount++; int numMoved = this.size - arg0 - 1; if (numMoved > 0) System.arraycopy(elementData, arg0+1, elementData, arg0, numMoved); elementData[--this.size] = null; // for gc return oldValue; } /** * @see java.util.List#set(int, java.lang.Object) */ public Object set(int arg0, Object arg1) { if (DISALLOW_DIRECT_MODIFICATIONS) throw new UnsupportedOperationException(); checkAgainstSize(arg0); Object oldValue = elementData[arg0]; elementData[arg0] = arg1; return oldValue; } // Set this to true if allocations are more expensive than arraycopy. public static final boolean REDUCE_ALLOCATIONS = false; public boolean addAll(Collection that) { if (that instanceof SortedSet) return addAll((SortedSet)that); return super.addAll(that); } public boolean addAll(SortedSet that) { if (this == that) return false; Comparator c1 = this.comparator, c2 = that.comparator(); if (c1 != c2 && (c1 == null || !c1.equals(c2))) return super.addAll(that); int s1 = this.size, s2 = that.size(); Object[] e1 = this.elementData; int newSize = Math.max(e1.length, s1 + s2); int i1, new_i1=0; Object[] new_e1; if (REDUCE_ALLOCATIONS && newSize <= e1.length) { System.arraycopy(e1, 0, e1, s2, s1); new_e1 = e1; i1 = s2; s1 += s2; } else { new_e1 = new Object[newSize]; this.elementData = new_e1; i1 = 0; } Iterator i2 = that.iterator(); boolean change = false; for (;;) { if (!i2.hasNext()) { System.arraycopy(e1, i1, new_e1, new_i1, s1-i1); this.size = new_i1 + s1 - i1; return change; } Object o2 = i2.next(); for (;;) { if (i1 == s1) { new_e1[new_i1++] = o2; while (i2.hasNext()) new_e1[new_i1++] = i2.next(); this.size = new_i1; return true; } Object o1 = e1[i1]; int r = compare(o1, o2); if (r <= 0) { new_e1[new_i1++] = o1; if (REDUCE_ALLOCATIONS && new_e1 == (Object)e1) e1[i1] = null; i1++; if (r == 0) break; } else { new_e1[new_i1++] = o2; change = true; break; } } } } /** * @see java.util.List#indexOf(java.lang.Object) */ public int indexOf(Object arg0) { int i = whereDoesItGo(arg0); if (i == size || !arg0.equals(this.elementData[i])) return -1; return i; } /** * @see java.util.List#lastIndexOf(java.lang.Object) */ public int lastIndexOf(Object arg0) { return this.indexOf(arg0); } public boolean equals(Object arg0) { if (arg0 instanceof SortedSet) return equals((SortedSet)arg0); if (arg0 instanceof Collection) return equals((Collection)arg0); return false; } public boolean equals(SortedSet that) { if (this.size != that.size()) return false; Object[] e = this.elementData; int k = 0; for (Iterator i=that.iterator(); i.hasNext(); ) { if (!e[k++].equals(i.next())) return false; } return true; } public boolean equals(Collection that) { if (this.size != that.size()) return false; for (Iterator i=that.iterator(); i.hasNext(); ) { if (!this.contains(i.next())) return false; } return true; } public int hashCode() { int hash = 0; for (int i=0; i<this.size; ++i) { hash += this.elementData[i].hashCode(); } return hash; } /** * @see java.util.AbstractList#removeRange(int, int) */ protected void removeRange(int arg0, int arg1) { this.modCount++; Object[] e = this.elementData; int s = this.size; System.arraycopy(e, arg1, e, arg0, s - arg1); int i = this.size = s - arg1 + arg0; for ( ; i < s; ++i) e[i] = null; } /** * @see java.util.List#subList(int, int) */ public List subList(int arg0, int arg1) { if (arg0 >= size || arg1 > size) throw new IndexOutOfBoundsException(); Object[] e = this.elementData; if (arg1 == this.size) return (List) this.tailSet(e[arg0]); return (List) this.subSet(e[arg0], e[arg1]); } /** * @see java.util.Collection#contains(java.lang.Object) */ public boolean contains(Object arg0) { return this.indexOf(arg0) != -1; } /** * @see java.util.Collection#remove(java.lang.Object) */ public boolean remove(Object arg0) { int i = this.indexOf(arg0); if (i == -1) return false; this.remove(i); return true; } public Object clone() { try { SortedArraySet s = (SortedArraySet) super.clone(); int initialCapacity = this.elementData.length; s.elementData = new Object[initialCapacity]; s.size = this.size; //s.comparator = comparator; System.arraycopy(this.elementData, 0, s.elementData, 0, this.size); return s; } catch (CloneNotSupportedException _) { return null; } } public static final SortedArraySetFactory FACTORY = new SortedArraySetFactory(); public static class SortedArraySetFactory extends SetFactory { /** * Version ID for serialization. */ private static final long serialVersionUID = 3258407318323475251L; private SortedArraySetFactory() {} public static final boolean TEST = false; public static final boolean PROFILE = false; public Set makeSet(Comparator c) { if (TEST) return new CollectionTestWrapper(new TreeSet(c), new SortedArraySet(c)); if (PROFILE) return new InstrumentedSetWrapper(new SortedArraySet(c)); return new SortedArraySet(c); } public Set makeSet(int capacity) { if (TEST) return new CollectionTestWrapper(new LinkedHashSet(capacity), new SortedArraySet(capacity)); if (PROFILE) return new InstrumentedSetWrapper(new SortedArraySet(capacity)); return new SortedArraySet(capacity); } public Set makeSet(Collection c) { if (TEST) return new CollectionTestWrapper(new LinkedHashSet(c), new SortedArraySet(c)); if (PROFILE) return new InstrumentedSetWrapper(new SortedArraySet(c)); return new SortedArraySet(c); } } }
package com.linkedin.cubert.operator; import com.linkedin.cubert.block.Block; import com.linkedin.cubert.block.BlockProperties; import com.linkedin.cubert.block.BlockSchema; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; import org.codehaus.jackson.JsonNode; import org.codehaus.jackson.node.ObjectNode; import org.testng.Assert; import org.testng.annotations.Test; import java.io.IOException; import java.util.Arrays; import java.util.HashMap; import java.util.Map; import static com.linkedin.cubert.utils.JsonUtils.createArrayNode; import static com.linkedin.cubert.utils.JsonUtils.createObjectNode; /** * @author Maneesh Varshney */ public class TestReduceJoin { // @BeforeClass public void setUp() throws IOException { } private void test(Object[][] input, int numJoinKeys, boolean isOuter, Object[][] expected) throws PreconditionException, IOException, InterruptedException { int numColumns = input[0].length; String[] colNames = new String[numColumns]; String[] joinKeys = new String[numJoinKeys]; for (int i = 0; i < numColumns - 1; i++) colNames[i] = "col_" + i; colNames[numColumns - 1] = "___tag"; for (int i = 0; i < numJoinKeys; i++) joinKeys[i] = "col_" + i; Block block = new ArrayBlock(Arrays.asList(input), colNames); TupleOperator operator = new RSJoinOperator(); BlockSchema inSchema = block.getProperties().getSchema(); PostCondition condition = new PostCondition(inSchema, null, null); JsonNode json = createObjectNode("joinKeys", createArrayNode(joinKeys)); if (isOuter) ((ObjectNode) json).put("joinType", "LEFT OUTER"); BlockSchema outSchema = operator.getPostCondition(makeMap("", condition), json).getSchema(); BlockProperties props = new BlockProperties("", outSchema, (BlockProperties) null); operator.setInput(makeMap("", block), json, props); Tuple expectedTuple = TupleFactory.getInstance().newTuple(numColumns - 1); for (int idx = 0; idx < expected.length; idx++) { Tuple tuple = operator.next(); Assert.assertNotNull(tuple); for (int i = 0; i < numColumns - 1; i++) { expectedTuple.set(i, expected[idx][i]); } Assert.assertEquals(tuple, expectedTuple); } Assert.assertNull(operator.next()); } private void test(Object[][] input, int numJoinKeys, Object[][] expected) throws PreconditionException, IOException, InterruptedException { test(input, numJoinKeys, false, expected); } @Test public void testReduceInnerJoin() throws PreconditionException, IOException, InterruptedException { // simple case: one pivot only Object[][] rows = { { 1000, null, 100, 0 }, { 1000, 20, null, 2 }, { 1000, 21, null, 2 }}; Object[][] expected = {{1000, 20, 100}, {1000, 21, 100}}; test(rows, 1, expected); // one pivot; no right rows rows = new Object[][]{ { 1000, 20, null, 2 }, { 1000, 20, null, 2 }, { 1000, 21, null, 2 }}; expected = new Object[][] {}; test(rows, 1,expected); // one pivot; no left rows rows = new Object[][]{ { 1000, 20, null, 0 }}; expected = new Object[][] {}; test(rows, 1, expected); // two pivots; simple case rows = new Object[][]{ { 1000, null, 100, 0 }, { 1000, 20, null, 2 }, { 1001, null, 101, 0 }, { 1001, 21, null, 2 }}; expected = new Object[][]{{1000, 20, 100}, {1001, 21, 101}}; test(rows, 1, expected); // multi pivots; no right rows rows = new Object[][]{ { 1000, 20, null, 2 }, { 1001, 20, null, 2 }, { 1002, 21, null, 2 }}; expected = new Object[][] {}; test(rows, 1, expected); // multi pivots; no left rows rows = new Object[][]{ { 1000, 20, null, 0 }, { 1001, 20, null, 0 }, { 1002, 20, null, 0 }}; expected = new Object[][] {}; test(rows, 1, expected); // multipivots: hasleft - noleft - hasleft rows = new Object[][]{ { 1000, null, 20, 0 }, { 1000, 100, null, 2 }, { 1001, null, 20, 0 }, { 1002, null, 21, 0 }, { 1002, 101, null, 2 }}; expected = new Object[][] {{1000, 100, 20}, {1002, 101, 21}}; test(rows, 1, expected); // multipivots: hasleft - noright - hasleft rows = new Object[][]{ { 1000, null, 20, 0 }, { 1000, 100, null, 2 }, { 1001, null, 20, 2 }, { 1002, null, 21, 0 }, { 1002, 101, null, 2 }}; expected = new Object[][] {{1000, 100, 20}, {1002, 101, 21}}; test(rows, 1, expected); } @Test public void testReduceLeftOuterJoin() throws PreconditionException, IOException, InterruptedException { // simple case: one pivot only Object[][] rows = { { 1000, null, 100, 0 }, { 1000, 20, null, 2 }, { 1000, 21, null, 2 }}; Object[][] expected = {{1000, 20, 100}, {1000, 21, 100}}; test(rows, 1, true, expected); // one pivot; no right rows rows = new Object[][]{ { 1000, 20, null, 2 }, { 1000, 20, null, 2 }, { 1000, 21, null, 2 }}; expected = new Object[][] {{ 1000, 20, null }, { 1000, 20, null }, { 1000, 21, null }}; test(rows, 1, true, expected); // one pivot; no left rows rows = new Object[][]{ { 1000, 20, null, 0 }}; expected = new Object[][] {}; test(rows, 1, true, expected); // two pivots; simple case rows = new Object[][]{ { 1000, null, 100, 0 }, { 1000, 20, null, 2 }, { 1001, null, 101, 0 }, { 1001, 21, null, 2 }}; expected = new Object[][]{{1000, 20, 100}, {1001, 21, 101}}; test(rows, 1, true, expected); // multi pivots; no right rows rows = new Object[][]{ { 1000, 20, null, 2 }, { 1001, 20, null, 2 }, { 1002, 21, null, 2 }}; expected = new Object[][] {{ 1000, 20, null }, { 1001, 20, null }, { 1002, 21, null }}; test(rows, 1, true, expected); // multi pivots; no left rows rows = new Object[][]{ { 1000, 20, null, 0 }, { 1001, 20, null, 0 }, { 1002, 20, null, 0 }}; expected = new Object[][] {}; test(rows, 1, true, expected); // multipivots: hasleft - noleft - hasleft rows = new Object[][]{ { 1000, null, 20, 0 }, { 1000, 100, null, 2 }, { 1001, null, 20, 0 }, { 1002, null, 21, 0 }, { 1002, 101, null, 2 }}; expected = new Object[][] {{1000, 100, 20}, {1002, 101, 21}}; test(rows, 1, true, expected); // multipivots: hasleft - noright - hasleft rows = new Object[][]{ { 1000, null, 20, 0 }, { 1000, 100, null, 2 }, { 1001, null, null, 2 }, { 1002, null, 21, 0 }, { 1002, 101, null, 2 }}; expected = new Object[][] {{1000, 100, 20}, { 1001, null, null }, {1002, 101, 21}}; test(rows, 1, true, expected); } <T> Map<String, T> makeMap(String key, T value) { Map<String, T> map = new HashMap<String, T>(); map.put(key, value); return map; } // public static void main(String[] args) throws InterruptedException, IOException, PreconditionException // { // new TestReduceJoin().testReduceInnerJoin(); //// char[] chars = new char[] {'\\', 'u', '0', '0', '1', 'a'}; //// String s = new String(chars); //// System.out.println(s); //// System.out.println(s.length()); // // } }
package turtle; import gui.MainWindow; import java.io.PrintStream; import java.util.HashMap; import java.util.Scanner; import turtle.util.Direction; import turtle.util.Pen; import turtle.util.Position; import turtle.util.Rotation; public class TurtleInterpreter { private static final int DEFAULT_PAPER_SIZE = 100; private Scanner scanner; private HashMap<String, Turtle> turtles; private Paper paper; private PrintStream out; private MainWindow gui; public TurtleInterpreter(Scanner scanner, PrintStream out, MainWindow gui) { this.scanner = scanner; this.out = out; turtles = new HashMap<String, Turtle>(); paper = new Paper(DEFAULT_PAPER_SIZE, DEFAULT_PAPER_SIZE); this.gui = gui; } /** * Processes all the commands from the input source * */ public void process() { // print ready symbol gui.setTxtCommandsText(">>> "); // System.out.print(">>> "); while (scanner.hasNext()) { // read token String tk = scanner.next(); // call correct method using switch switch (tk) { case ">>>": break; case "help": System.out.println(Help.getHelp()); break; case "paper": processPaper(); break; case "new": processNew(); break; case "pen": processPen(); break; case "move": processMove(); break; case "right": processRight(); break; case "left": processLeft(); break; case "show": processShow(); break; case "status": processStatus(); break; case "exit": // exits interpreter System.out.println("Exiting interpreter"); gui.closeAndExit(); return; default: // invalid command System.out.println("SKIPPING invalid command: " + tk); break; } // print ready symbol gui.setTxtCommandsText(">>> "); } System.out.println(); } /** * Processes the paper command */ private void processPaper() { int width = 0; // scan width try { width = scanInt("paper", "width"); } catch (NumberFormatException x) { System.out.println("Invalid number format. Skipping command"); return; } // error if distance is negative if (width < 0) { System.out.println("Error in PAPER, parameter WIDTH " + "cannot be negative.\nSkipping.."); return; } int height = 0; // scan height try { height = scanInt("paper", "height"); } catch (NumberFormatException x) { System.out.println("Invalid number format. Skipping command"); return; } // error if distance is negative if (height < 0) { System.out.println("Error in PAPER, parameter HEIGHT " + "cannot be negative.\nSkipping.."); } // create the paper paper = new Paper(width, height); // remove all turtles turtles = new HashMap<String, Turtle>(); gui.printDone(); } /** * Processes the new command */ private void processNew() { // scan type String type = scanString("new", "type"); // scan name String name = scanString("new", "name"); // print error when turtle name already exists if (turtles.containsKey(name)) { System.out.println("Turtle " + name.toUpperCase() + " already exists. Overwriting " + "existing one."); } // create the turtle at position x y // with pen UP turtles.put(name, createNewTurtle(type, name, "")); } /** * Creates a new turtle * * @param type * the type of the turtle * @param name * the name of the turtle * @param superName * the name of the parent turtle * @return the new created turtle */ private Turtle createNewTurtle(String type, String name, String superName) { int x = 0; // scan x or cluster size try { x = scanInt("new", "x"); } catch (NumberFormatException m) { System.out.println("Invalid number format. Skipping command"); return createNewTurtle(type,name,superName); //x = scanInt("new", "x"); } int y = 0; if (!type.equals("cluster")) { // scan y or cluster size try { y = scanInt("new", "y"); } catch (NumberFormatException m) { System.out.println("Invalid number format. Skipping command"); return createNewTurtle(type,name,superName); //y = scanInt("new", "y"); } } // if there is a parent cluster, // parentname. is added in front of the cluster name String fullName = name; if (superName.length() > 0) { fullName = superName + "." + name; } switch (type) { case "cluster": gui.printDone(); return createTurtleCluster(x, name, superName); case "normal": gui.printDone(); return new TurtleNormal(new Position(x, y), Direction.NORTH, Pen.UP, paper, fullName); case "continuous": gui.printDone(); return new TurtleContinuous(new Position(x, y), Direction.NORTH, Pen.UP, paper, fullName); case "bouncy": gui.printDone(); return new TurtleBouncy(new Position(x, y), Direction.NORTH, Pen.UP, paper, fullName); case "reflecting": gui.printDone(); return new TurtleReflecting(new Position(x, y), Direction.NORTH, Pen.UP, paper, fullName); case "wrapping": gui.printDone(); return new TurtleWrapping(new Position(x, y), Direction.NORTH, Pen.UP, paper, fullName); default: System.out.println("Error in NEW, invalid TYPE " + type); return null; } } /** * Creates a cluster turtle * * @param n * the size of the cluster * @param name * the name of the cluster * @param superName * the name of the parent cluster (if any) * @return the new created cluster */ private TurtleCluster createTurtleCluster(int n, String name, String superName) { // if there is a parent cluster, // parentname. is added in front of the cluster name String fullName = name; if (superName.length() > 0) { fullName = superName + "." + name; } TurtleCluster out = new TurtleCluster(turtles, fullName); for (int i = 0; i < n; i++) { gui.setTxtCommandsText(">>> (" + (i+1) + ") >> "); //System.out.print(">>> (" + (i+1) + ") >> "); // scan command String command; do { command = scanString("newCluster", "command"); if (command.equals("exit")) { return null; } } while (!command.equals("new")); // scan type String type = scanString("newCluster", "type"); // scan name String tname = scanString("newCluster", "name"); out.put(tname, createNewTurtle(type, tname, fullName)); } return out; } /** * Processes the pen command */ private void processPen() { // scan name String name = scanString("pen", "name"); // scan state String stateString = scanString("pen", "state"); // print error if turtle not found if (!turtles.containsKey(name)) { printErrorTurtleNotFound(name); return; } if (stateString.length() == 1) { // changes brush turtles.get(name).changeBrush(stateString.charAt(0)); gui.printDone(); } else if (stateString.equals("up")) { turtles.get(name).liftPen(); gui.printDone(); } else if (stateString.equals("down")) { turtles.get(name).putPen(); gui.printDone(); } else { System.out.println("PEN command, invalid STATE: " + stateString); return; } } /** * Processes the move command */ private void processMove() { // scan name String name = scanString("move", "name"); int distance = 0; // scan distance try { distance = scanInt("move", "distance"); } catch (NumberFormatException x) { System.out.println("Invalid number format. Skipping command"); return; } // error if distance is negative if (distance < 0) { System.out.println("Error in MOVE, parameter DISTANCE " + "cannot be negative.\nSkipping command."); return; } // print error if turtle not found if (!turtles.containsKey(name)) { printErrorTurtleNotFound(name); return; } // perform move turtles.get(name).move(distance); gui.printDone(); } /** * Processes the rotate right command */ private void processRight() { // scan name String name = scanString("right", "name"); int angle = 0; // scan angle try { angle = scanInt("right", "angle"); } catch (NumberFormatException x) { System.out.println("Invalid number format. Skipping command"); return; } // is angle correct? if ((angle % 45) != 0) { System.out.println("Error in RIGHT, parameter ANGLE must be " + "a multiple of 45."); return; } // print error if turtle not found if (!turtles.containsKey(name)) { printErrorTurtleNotFound(name); return; } // perform rotation turtles.get(name).rotate(Rotation.RIGHT, angle); gui.printDone(); } /** * Processes the rotate left command */ private void processLeft() { // scan name String name = scanString("left", "name"); int angle = 0; // scan angle try { angle = scanInt("left", "angle"); } catch (NumberFormatException x) { System.out.println("Invalid number format. Skipping command"); return; } // is angle correct? if ((angle % 45) != 0) { System.out.println("Error in LEFT, parameter ANGLE must be " + "a multiple of 45."); return; } // print error if turtle not found if (!turtles.containsKey(name)) { printErrorTurtleNotFound(name); return; } // perform rotation turtles.get(name).rotate(Rotation.LEFT, angle); gui.printDone(); } /** * Prints the paper on selected printstream */ private void processShow() { gui.drawPaper(paper.toImage()); // out.println(paper); } /** * Scans a string from input stream scanner. Prints an error message if * necessary * * @param command * Error message, which command called it * @param par * Error message, which parameter is being read * @return the token */ private String scanString(String command, String par) { if (scanner.hasNext()) { return scanner.next(); } else { System.out.println("Error in " + command.toUpperCase() + ", needs parameter " + par.toUpperCase()); return null; } } /** * Scans the next int from input stream * * @param command * Error message, which command is being called * @param par * Error message, which parameter is being read * @return the next token */ private int scanInt(String command, String par) throws NumberFormatException { if (scanner.hasNext()) { return Integer.parseInt(scanner.next()); } else { System.out.println("Error in " + command.toUpperCase() + ", needs parameter " + par.toUpperCase()); return -1; } } /** * Prints an error message for turtle not found problem * * @param name */ private void printErrorTurtleNotFound(String name) { System.out.println("Turtle " + name.toUpperCase() + " not found."); } private void processStatus() { out.println(paper.status()); int n = 0; for (Turtle t : turtles.values()) { out.println(t.status()); n += 1; } out.println("There are " + n + " active turtles."); } }
// // ======================================================================== // Copyright (c) 1995-2013 Mort Bay Consulting Pty. Ltd. // ------------------------------------------------------------------------ // All rights reserved. This program and the accompanying materials // are made available under the terms of the Eclipse Public License v1.0 // and Apache License v2.0 which accompanies this distribution. // // The Eclipse Public License is available at // http://www.eclipse.org/legal/epl-v10.html // // The Apache License v2.0 is available at // http://www.opensource.org/licenses/apache2.0.php // // You may elect to redistribute this code under either of these licenses. // ======================================================================== // package org.eclipse.jetty.util.resource; import java.io.File; import java.io.IOException; import java.net.JarURLConnection; import java.net.MalformedURLException; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.List; import java.util.jar.JarEntry; import java.util.jar.JarFile; import org.eclipse.jetty.util.log.Log; import org.eclipse.jetty.util.log.Logger; /* ------------------------------------------------------------ */ class JarFileResource extends JarResource { private static final Logger LOG = Log.getLogger(JarFileResource.class); private JarFile _jarFile; private File _file; private String[] _list; private JarEntry _entry; private boolean _directory; private String _jarUrl; private String _path; private boolean _exists; /* -------------------------------------------------------- */ JarFileResource(URL url) { super(url); } /* ------------------------------------------------------------ */ JarFileResource(URL url, boolean useCaches) { super(url, useCaches); } /* ------------------------------------------------------------ */ @Override public synchronized void release() { _list=null; _entry=null; _file=null; //if the jvm is not doing url caching, then the JarFiles will not be cached either, //and so they are safe to close if (!getUseCaches()) { if ( _jarFile != null ) { try { LOG.debug("Closing JarFile "+_jarFile.getName()); _jarFile.close(); } catch ( IOException ioe ) { LOG.ignore(ioe); } } } _jarFile=null; super.release(); } /* ------------------------------------------------------------ */ @Override protected boolean checkConnection() { try { super.checkConnection(); } finally { if (_jarConnection==null) { _entry=null; _file=null; _jarFile=null; _list=null; } } return _jarFile!=null; } /* ------------------------------------------------------------ */ @Override protected synchronized void newConnection() throws IOException { super.newConnection(); _entry=null; _file=null; _jarFile=null; _list=null; int sep = _urlString.indexOf("!/"); _jarUrl=_urlString.substring(0,sep+2); _path=_urlString.substring(sep+2); if (_path.length()==0) _path=null; _jarFile=_jarConnection.getJarFile(); _file=new File(_jarFile.getName()); } /* ------------------------------------------------------------ */ /** * Returns true if the represented resource exists. */ @Override public boolean exists() { if (_exists) return true; if (_urlString.endsWith("!/")) { String file_url=_urlString.substring(4,_urlString.length()-2); try{return newResource(file_url).exists();} catch(Exception e) {LOG.ignore(e); return false;} } boolean check=checkConnection(); // Is this a root URL? if (_jarUrl!=null && _path==null) { // Then if it exists it is a directory _directory=check; return true; } else { // Can we find a file for it? JarFile jarFile=null; if (check) // Yes jarFile=_jarFile; else { // No - so lets look if the root entry exists. try { JarURLConnection c=(JarURLConnection)((new URL(_jarUrl)).openConnection()); c.setUseCaches(getUseCaches()); jarFile=c.getJarFile(); } catch(Exception e) { LOG.ignore(e); } } // Do we need to look more closely? if (jarFile!=null && _entry==null && !_directory) { // OK - we have a JarFile, lets look at the entries for our path Enumeration<JarEntry> e=jarFile.entries(); while(e.hasMoreElements()) { JarEntry entry = (JarEntry) e.nextElement(); String name=entry.getName().replace('\\','/'); // Do we have a match if (name.equals(_path)) { _entry=entry; // Is the match a directory _directory=_path.endsWith("/"); break; } else if (_path.endsWith("/")) { if (name.startsWith(_path)) { _directory=true; break; } } else if (name.startsWith(_path) && name.length()>_path.length() && name.charAt(_path.length())=='/') { _directory=true; break; } } if (_directory && !_urlString.endsWith("/")) { _urlString+="/"; try { _url=new URL(_urlString); } catch(MalformedURLException ex) { LOG.warn(ex); } } } } _exists= ( _directory || _entry!=null); return _exists; } /* ------------------------------------------------------------ */ /** * Returns true if the represented resource is a container/directory. * If the resource is not a file, resources ending with "/" are * considered directories. */ @Override public boolean isDirectory() { return _urlString.endsWith("/") || exists() && _directory; } /* ------------------------------------------------------------ */ /** * Returns the last modified time */ @Override public long lastModified() { if (checkConnection() && _file!=null) { if (exists() && _entry!=null) return _entry.getTime(); return _file.lastModified(); } return -1; } /* ------------------------------------------------------------ */ @Override public synchronized String[] list() { if (isDirectory() && _list==null) { List<String> list = null; try { list = listEntries(); } catch (Exception e) { //Sun's JarURLConnection impl for jar: protocol will close a JarFile in its connect() method if //useCaches == false (eg someone called URLConnection with defaultUseCaches==true). //As their sun.net.www.protocol.jar package caches JarFiles and/or connections, we can wind up in //the situation where the JarFile we have remembered in our _jarFile member has actually been closed //by other code. //So, do one retry to drop a connection and get a fresh JarFile LOG.warn("Retrying list:"+e); LOG.debug(e); release(); list = listEntries(); } if (list != null) { _list=new String[list.size()]; list.toArray(_list); } } return _list; } /* ------------------------------------------------------------ */ private List<String> listEntries () { checkConnection(); ArrayList<String> list = new ArrayList<String>(32); JarFile jarFile=_jarFile; if(jarFile==null) { try { JarURLConnection jc=(JarURLConnection)((new URL(_jarUrl)).openConnection()); jc.setUseCaches(getUseCaches()); jarFile=jc.getJarFile(); } catch(Exception e) { e.printStackTrace(); LOG.ignore(e); } } Enumeration<JarEntry> e=jarFile.entries(); String dir=_urlString.substring(_urlString.indexOf("!/")+2); while(e.hasMoreElements()) { JarEntry entry = e.nextElement(); String name=entry.getName().replace('\\','/'); if(!name.startsWith(dir) || name.length()==dir.length()) { continue; } String listName=name.substring(dir.length()); int dash=listName.indexOf('/'); if (dash>=0) { //when listing jar:file urls, you get back one //entry for the dir itself, which we ignore if (dash==0 && listName.length()==1) continue; //when listing jar:file urls, all files and //subdirs have a leading /, which we remove if (dash==0) listName=listName.substring(dash+1, listName.length()); else listName=listName.substring(0,dash+1); if (list.contains(listName)) continue; } list.add(listName); } return list; } /* ------------------------------------------------------------ */ /** * Return the length of the resource */ @Override public long length() { if (isDirectory()) return -1; if (_entry!=null) return _entry.getSize(); return -1; } /* ------------------------------------------------------------ */ /** Encode according to this resource type. * File URIs are not encoded. * @param uri URI to encode. * @return The uri unchanged. */ @Override public String encode(String uri) { return uri; } /** * Take a Resource that possibly might use URLConnection caching * and turn it into one that doesn't. * @param resource * @return the non-caching resource */ public static Resource getNonCachingResource (Resource resource) { if (!(resource instanceof JarFileResource)) return resource; JarFileResource oldResource = (JarFileResource)resource; JarFileResource newResource = new JarFileResource(oldResource.getURL(), false); return newResource; } /** * Check if this jar:file: resource is contained in the * named resource. Eg <code>jar:file:///a/b/c/foo.jar!/x.html</code> isContainedIn <code>file:///a/b/c/foo.jar</code> * @param resource * @return true if resource is contained in the named resource * @throws MalformedURLException */ @Override public boolean isContainedIn (Resource resource) throws MalformedURLException { String string = _urlString; int index = string.indexOf("!/"); if (index > 0) string = string.substring(0,index); if (string.startsWith("jar:")) string = string.substring(4); URL url = new URL(string); return url.sameFile(resource.getURL()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.stress.settings; import java.io.Serializable; import java.util.*; import org.apache.commons.cli.*; import org.apache.commons.cli.Option; public class Legacy implements Serializable { // command line options public static final Options availableOptions = new Options(); private static final String SSL_TRUSTSTORE = "truststore"; private static final String SSL_TRUSTSTORE_PW = "truststore-password"; private static final String SSL_PROTOCOL = "ssl-protocol"; private static final String SSL_ALGORITHM = "ssl-alg"; private static final String SSL_STORE_TYPE = "store-type"; private static final String SSL_CIPHER_SUITES = "ssl-ciphers"; static { availableOptions.addOption("h", "help", false, "Show this help message and exit"); availableOptions.addOption("n", "num-keys", true, "Number of keys, default:1000000"); availableOptions.addOption("F", "num-different-keys", true, "Number of different keys (if < NUM-KEYS, the same key will re-used multiple times), default:NUM-KEYS"); availableOptions.addOption("t", "threadCount", true, "Number of threadCount to use, default:50"); availableOptions.addOption("c", "columns", true, "Number of columns per key, default:5"); availableOptions.addOption("S", "column-size", true, "Size of column values in bytes, default:34"); availableOptions.addOption("C", "unique columns", true, "Max number of unique columns per key, default:50"); availableOptions.addOption("RC", "unique rows", true, "Max number of unique rows, default:50"); availableOptions.addOption("d", "nodes", true, "Host nodes (comma separated), default:locahost"); availableOptions.addOption("D", "nodesfile", true, "File containing host nodes (one per line)"); availableOptions.addOption("s", "stdev", true, "Standard Deviation for gaussian read key generation, default:0.1"); availableOptions.addOption("r", "random", false, "Use random key generator for read key generation (STDEV will have no effect), default:false"); availableOptions.addOption("f", "file", true, "Write output to given file"); availableOptions.addOption("p", "port", true, "Thrift port, default:9160"); availableOptions.addOption("o", "operation", true, "Operation to perform (WRITE, READ, READWRITE, RANGE_SLICE, INDEXED_RANGE_SLICE, MULTI_GET, COUNTERWRITE, COUNTER_GET), default:WRITE"); availableOptions.addOption("u", "supercolumns", true, "Number of super columns per key, default:1"); availableOptions.addOption("y", "family-type", true, "Column Family Type (Super, Standard), default:Standard"); availableOptions.addOption("K", "keep-trying", true, "Retry on-going operation N times (in case of failure). positive integer, default:10"); availableOptions.addOption("k", "keep-going", false, "Ignore errors inserting or reading (when set, --keep-trying has no effect), default:false"); availableOptions.addOption("i", "progress-interval", true, "Progress Report Interval (seconds), default:10"); availableOptions.addOption("g", "keys-per-call", true, "Number of keys to get_range_slices or multiget per call, default:1000"); availableOptions.addOption("l", "replication-factor", true, "Replication Factor to use when creating needed column families, default:1"); availableOptions.addOption("L", "enable-cql", false, "Perform queries using CQL2 (Cassandra Query Language v 2.0.0)"); availableOptions.addOption("L3", "enable-cql3", false, "Perform queries using CQL3 (Cassandra Query Language v 3.0.0)"); availableOptions.addOption("b", "enable-native-protocol", false, "Use the binary native protocol (only work along with -L3)"); availableOptions.addOption("P", "use-prepared-statements", false, "Perform queries using prepared statements (only applicable to CQL)."); availableOptions.addOption("e", "consistency-level", true, "Consistency Level to use (ONE, QUORUM, LOCAL_QUORUM, EACH_QUORUM, ALL, ANY), default:ONE"); availableOptions.addOption("x", "create-index", true, "Type of index to create on needed column families (KEYS)"); availableOptions.addOption("R", "replication-strategy", true, "Replication strategy to use (only on insert if keyspace does not exist), default:org.apache.cassandra.locator.SimpleStrategy"); availableOptions.addOption("O", "strategy-properties", true, "Replication strategy properties in the following format <dc_name>:<num>,<dc_name>:<num>,..."); availableOptions.addOption("V", "average-size-values", false, "Generate column values of average rather than specific size"); availableOptions.addOption("T", "send-to", true, "Send this as a request to the stress daemon at specified address."); availableOptions.addOption("I", "compression", true, "Specify the compression to use for sstable, default:no compression"); availableOptions.addOption("Q", "query-names", true, "Comma-separated list of column names to retrieve from each row."); availableOptions.addOption("Z", "compaction-strategy", true, "CompactionStrategy to use."); availableOptions.addOption("U", "comparator", true, "Column Comparator to use. Currently supported types are: TimeUUIDType, AsciiType, UTF8Type."); availableOptions.addOption("tf", "transport-factory", true, "Fully-qualified TTransportFactory class name for creating a connection. Note: For Thrift over SSL, use org.apache.cassandra.stress.SSLTransportFactory."); availableOptions.addOption("ns", "no-statistics", false, "Turn off the aggegate statistics that is normally output after completion."); availableOptions.addOption("ts", SSL_TRUSTSTORE, true, "SSL: full path to truststore"); availableOptions.addOption("tspw", SSL_TRUSTSTORE_PW, true, "SSL: full path to truststore"); availableOptions.addOption("prtcl", SSL_PROTOCOL, true, "SSL: connections protocol to use (default: TLS)"); availableOptions.addOption("alg", SSL_ALGORITHM, true, "SSL: algorithm (default: SunX509)"); availableOptions.addOption("st", SSL_STORE_TYPE, true, "SSL: type of store"); availableOptions.addOption("ciphers", SSL_CIPHER_SUITES, true, "SSL: comma-separated list of encryption suites to use"); availableOptions.addOption("th", "throttle", true, "Throttle the total number of operations per second to a maximum amount."); } public static StressSettings build(String[] arguments) { CommandLineParser parser = new PosixParser(); final Converter r = new Converter(); try { CommandLine cmd = parser.parse(availableOptions, arguments); if (cmd.getArgs().length > 0) { System.err.println("Application does not allow arbitrary arguments: " + Arrays.asList(cmd.getArgList())); System.exit(1); } if (cmd.hasOption("h")) printHelpMessage(); if (cmd.hasOption("C")) System.out.println("Ignoring deprecated option -C"); if (cmd.hasOption("o")) r.setCommand(cmd.getOptionValue("o").toLowerCase()); else r.setCommand("insert"); if (cmd.hasOption("K")) r.add("command", "tries=" + cmd.getOptionValue("K")); if (cmd.hasOption("k")) { if (!cmd.hasOption("K")) r.add("command", "retry=1"); r.add("command", "ignore_errors"); } if (cmd.hasOption("g")) r.add("command", "at-once=" + cmd.getOptionValue("g")); if (cmd.hasOption("e")) r.add("command", "cl=" + cmd.getOptionValue("e")); String numKeys; if (cmd.hasOption("n")) numKeys = cmd.getOptionValue("n"); else numKeys = "1000000"; r.add("command", "n=" + numKeys); String uniqueKeys; if (cmd.hasOption("F")) uniqueKeys = cmd.getOptionValue("F"); else uniqueKeys = numKeys; if (r.opts.containsKey("write") || r.opts.containsKey("counterwrite")) { if (!uniqueKeys.equals(numKeys)) r.add("-key", "populate=1.." + uniqueKeys); } else if (cmd.hasOption("r")) { r.add("-key", "dist=uniform(1.." + uniqueKeys + ")"); } else { if (!cmd.hasOption("s")) r.add("-key", "dist=gauss(1.." + uniqueKeys + ",5)"); else r.add("-key", String.format("dist=gauss(1..%s,%.2f)", uniqueKeys, 0.5 / Float.parseFloat(cmd.getOptionValue("s")))); } String colCount; if (cmd.hasOption("c")) colCount = cmd.getOptionValue("c"); else colCount = "5"; String colSize; if (cmd.hasOption("S")) colSize = cmd.getOptionValue("S"); else colSize = "34"; r.add("-col", "n=fixed(" + colCount + ")"); if (cmd.hasOption("V")) { r.add("-col", "size=uniform(1.." + Integer.parseInt(colSize) * 2 + ")"); r.add("-col", "data=rand()"); } else { r.add("-col", "size=fixed(" + colSize + ")"); r.add("-col", "data=repeat(1)"); } if (cmd.hasOption("Q")) r.add("-col", "names=" + cmd.getOptionValue("Q")); if (cmd.hasOption("U")) r.add("-col", "comparator=" + cmd.getOptionValue("U")); if (cmd.hasOption("y") && cmd.getOptionValue("y").equals("Super")) r.add("-col", "super=" + (cmd.hasOption("u") ? cmd.getOptionValue("u") : "1")); if (cmd.hasOption("t")) r.add("-rate", "threads=" + cmd.getOptionValue("t")); else r.add("-rate", "threads=50"); if (cmd.hasOption("th")) r.add("-rate", "limit=" + cmd.getOptionValue("th") + "/s"); if (cmd.hasOption("f")) r.add("-log", "file=" + cmd.getOptionValue("f")); if (cmd.hasOption("p")) r.add("-port", cmd.getOptionValue("p")); if (cmd.hasOption("i")) r.add("-log", "interval=" + cmd.getOptionValue("i")); else r.add("-log", "interval=10"); if (cmd.hasOption("x")) r.add("-schema", "index=" + cmd.getOptionValue("x")); if (cmd.hasOption("R") || cmd.hasOption("l") || cmd.hasOption("O")) { StringBuilder rep = new StringBuilder(); if (cmd.hasOption("R")) rep.append("strategy=" + cmd.getOptionValue("R")); if (cmd.hasOption("l")) { if (rep.length() > 0) rep.append(","); rep.append("factor=" + cmd.getOptionValue("l")); } if (cmd.hasOption("O")) { if (rep.length() > 0) rep.append(","); rep.append(cmd.getOptionValue("O").replace(':','=')); } r.add("-schema", "replication(" + rep + ")"); } if (cmd.hasOption("L")) r.add("-mode", cmd.hasOption("P") ? "prepared cql2" : "cql2"); else if (cmd.hasOption("L3")) r.add("-mode", (cmd.hasOption("P") ? "prepared" : "") + (cmd.hasOption("b") ? "native" : "") + "cql3"); else r.add("-mode", "thrift"); if (cmd.hasOption("I")) r.add("-schema", "compression=" + cmd.getOptionValue("I")); if (cmd.hasOption("d")) r.add("-node", cmd.getOptionValue("d")); if (cmd.hasOption("D")) r.add("-node", "file=" + cmd.getOptionValue("D")); if (cmd.hasOption("send-to")) r.add("-send-to", cmd.getOptionValue("send-to")); if (cmd.hasOption("Z")) r.add("-schema", "compaction=" + cmd.getOptionValue("Z")); if (cmd.hasOption("ns")) r.add("-log", "no-summary"); if (cmd.hasOption("tf")) r.add("-transport", "factory=" + cmd.getOptionValue("tf")); if(cmd.hasOption(SSL_TRUSTSTORE)) r.add("-transport", "truststore=" + cmd.getOptionValue(SSL_TRUSTSTORE)); if(cmd.hasOption(SSL_TRUSTSTORE_PW)) r.add("-transport", "truststore-password=" + cmd.getOptionValue(SSL_TRUSTSTORE_PW)); if(cmd.hasOption(SSL_PROTOCOL)) r.add("-transport", "ssl-protocol=" + cmd.getOptionValue(SSL_PROTOCOL)); if(cmd.hasOption(SSL_ALGORITHM)) r.add("-transport", "ssl-alg=" + cmd.getOptionValue(SSL_ALGORITHM)); if(cmd.hasOption(SSL_STORE_TYPE)) r.add("-transport", "store-type=" + cmd.getOptionValue(SSL_STORE_TYPE)); if(cmd.hasOption(SSL_CIPHER_SUITES)) r.add("-transport", "ssl-ciphers=" + cmd.getOptionValue(SSL_CIPHER_SUITES)); } catch (ParseException e) { printHelpMessage(); System.exit(1); } r.printNewCommand(); return r.get(); } private static final class Converter { private Map<String, List<String>> opts = new LinkedHashMap<>(); List<String> command; public void add(String option, String suboption) { if (option.equals("command")) { command.add(suboption); return; } List<String> params = opts.get(option); if (params == null) opts.put(option, params = new ArrayList()); params.add(suboption); } StressSettings get(){ Map<String, String[]> clArgs = new HashMap<>(); for (Map.Entry<String, List<String>> e : opts.entrySet()) clArgs .put(e.getKey(), e.getValue().toArray(new String[0])); return StressSettings.get(clArgs); } void setCommand(String command) { command = Command.get(command).toString().toLowerCase(); opts.put(command, this.command = new ArrayList<>()); } void printNewCommand() { StringBuilder sb = new StringBuilder("stress"); for (Map.Entry<String, List<String>> e : opts.entrySet()) { sb.append(" "); sb.append(e.getKey()); for (String opt : e.getValue()) { sb.append(" "); sb.append(opt); } } System.out.println("Running in legacy support mode. Translating command to: "); System.out.println(sb.toString()); } } public static void printHelpMessage() { System.out.println("Usage: ./bin/cassandra-stress legacy [options]\n\nOptions:"); System.out.println("THIS IS A LEGACY SUPPORT MODE"); for(Object o : availableOptions.getOptions()) { Option option = (Option) o; String upperCaseName = option.getLongOpt().toUpperCase(); System.out.println(String.format("-%s%s, --%s%s%n\t\t%s%n", option.getOpt(), (option.hasArg()) ? " "+upperCaseName : "", option.getLongOpt(), (option.hasArg()) ? "="+upperCaseName : "", option.getDescription())); } } public static Runnable helpPrinter() { return new Runnable() { @Override public void run() { printHelpMessage(); } }; } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.mypages.service; import com.dytech.edge.common.Constants; import com.dytech.edge.common.FileInfo; import com.dytech.edge.common.ScriptContext; import com.dytech.edge.wizard.WizardException; import com.google.common.base.Function; import com.google.common.base.Throwables; import com.google.common.io.CharStreams; import com.google.inject.Provider; import com.tle.annotation.Nullable; import com.tle.beans.entity.Schema; import com.tle.beans.entity.itemdef.ItemDefinition; import com.tle.beans.item.Item; import com.tle.beans.item.ItemId; import com.tle.beans.item.ItemKey; import com.tle.beans.item.ItemPack; import com.tle.beans.item.ItemStatus; import com.tle.beans.item.attachments.Attachment; import com.tle.beans.item.attachments.AttachmentType; import com.tle.beans.item.attachments.Attachments; import com.tle.beans.item.attachments.FileAttachment; import com.tle.beans.item.attachments.HtmlAttachment; import com.tle.beans.item.attachments.ItemNavigationNode; import com.tle.beans.item.attachments.ItemNavigationTab; import com.tle.beans.item.attachments.UnmodifiableAttachments; import com.tle.common.Check; import com.tle.common.PathUtils; import com.tle.common.filesystem.handle.FileHandle; import com.tle.common.filesystem.handle.StagingFile; import com.tle.common.quota.exception.QuotaExceededException; import com.tle.common.workflow.Workflow; import com.tle.core.filesystem.ItemFile; import com.tle.core.guice.Bind; import com.tle.core.guice.BindFactory; import com.tle.core.hibernate.equella.service.InitialiserService; import com.tle.core.item.navigation.NavigationNodeHelper; import com.tle.core.item.operations.WorkflowOperation; import com.tle.core.item.service.ItemFileService; import com.tle.core.item.service.ItemLockingService; import com.tle.core.item.service.ItemService; import com.tle.core.item.standard.ItemOperationFactory; import com.tle.core.item.standard.operations.EditMetadataOperation; import com.tle.core.item.standard.operations.workflow.StatusOperation; import com.tle.core.quota.service.QuotaService; import com.tle.core.services.FileSystemService; import com.tle.mycontent.MyContentConstants; import com.tle.mycontent.service.MyContentFields; import com.tle.mycontent.workflow.operations.OperationFactory; import com.tle.mypages.MyPagesConstants; import com.tle.mypages.parse.ConvertHtmlService; import com.tle.mypages.parse.conversion.HrefConversion; import com.tle.mypages.parse.conversion.StagingConversion; import com.tle.mypages.web.MyPagesRepository; import com.tle.mypages.web.MyPagesState; import com.tle.mypages.web.event.SaveItemEvent; import com.tle.mypages.web.event.SavePageEvent; import com.tle.mypages.workflow.operation.UnusedContentCleanupOperation.UnusedContentCleanupOperationFactory; import com.tle.web.htmleditor.service.HtmlEditorService; import com.tle.web.sections.SectionInfo; import com.tle.web.sections.SectionTree; import com.tle.web.viewable.ViewableItem; import com.tle.web.viewable.impl.ViewableItemFactory; import com.tle.web.viewurl.ViewableResource; import com.tle.web.viewurl.attachments.AttachmentResourceService; import com.tle.web.viewurl.attachments.ItemNavigationService; import com.tle.web.wizard.WizardService; import com.tle.web.wizard.WizardStateInterface; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStreamWriter; import java.io.Reader; import java.io.StringReader; import java.io.Writer; import java.util.ArrayList; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; import javax.inject.Inject; import javax.inject.Singleton; /** @author aholland */ // TODO: make subclass of WizardService and override methods @Bind(MyPagesService.class) @Singleton public class MyPagesServiceImpl implements MyPagesService { @Inject private ItemService itemService; @Inject private ItemLockingService lockService; @Inject private ItemFileService itemFileService; @Inject private ItemNavigationService itemNavService; @Inject private NavigationNodeHelper navHelper; @Inject private QuotaService quotaService; @Inject private InitialiserService initialiserService; @Inject private FileSystemService fileSystemService; @Inject private ViewableItemFactory viewableItemFactory; @Inject private ConvertHtmlService convertHtmlService; @Inject private AttachmentResourceService attachmentResourceService; @Inject private WizardService wizardService; @Inject private Provider<MyPagesRepository> repoProvider; @Inject private UnusedContentCleanupOperationFactory unusedFactory; @Inject private ItemOperationFactory workflowFactory; @Inject private OperationFactory myContentFactory; @Inject private com.tle.mypages.workflow.operation.OperationFactory myPagesFactory; @Inject private MyPagesStateFactory myPagesStateFactory; @Override public WorkflowOperation getEditOperation( MyContentFields fields, String filename, InputStream inputStream, boolean removeExistingAttachments, boolean useExistingAttachments) { return myPagesFactory.create( fields, filename, inputStream, removeExistingAttachments, useExistingAttachments); } @Override public WizardStateInterface getState(SectionInfo info, String sessionId) { final WizardStateInterface state = wizardService.getFromSession(info, sessionId); if (state instanceof MyPagesState) { MyPagesState myPagesState = (MyPagesState) state; if (myPagesState.getWorkflowStatus() == null) { StatusOperation statOp = myContentFactory.status(); try { itemService.operation(state.getItemId(), new WorkflowOperation[] {statOp}); } catch (Exception e) { throw new RuntimeException(e); } myPagesState.setWorkflowStatus(statOp.getStatus()); } } return state; } @Override public void updateSession(SectionInfo info, WizardStateInterface state) { wizardService.updateSession(info, state); } @Override public void removeFromSession(SectionInfo info, String id) { wizardService.removeFromSession(info, id, false); } @Override public void convertPreviewUrlsToItemUrls(final WizardStateInterface state) { // TODO: Make this only execute if we're actually editing the html final List<HtmlAttachment> htmls = state.getAttachments().getList(AttachmentType.HTML); for (HtmlAttachment htmlAttach : htmls) { final FileHandle staging = state.getFileHandle(); final String pageFilename = htmlAttach.getFilename(); String newHtml = forFile( staging, pageFilename, new Function<Reader, String>() { @Override @Nullable public String apply(@Nullable Reader input) { final String wizid = state.getWizid(); return convertHtmlService.convert( input, false, new StagingConversion(false, state.getItemId(), wizid, state.getStagingId()) .getConversions()); } }); saveHtml(staging, pageFilename, newHtml); } } @Override public void commitDraft( final WizardStateInterface state, final boolean leaveAsPreview, SectionInfo info) { // copy stuff over from the draft folder: final StagingFile staging = (StagingFile) state.getFileHandle(); final List<HtmlAttachment> htmls = state.getAttachments().getList(AttachmentType.HTML); for (HtmlAttachment htmlAttach : htmls) { final String draftFolder = htmlAttach.getDraftFolder(); final String normalFolder = htmlAttach.getNormalFolder(); if (htmlAttach.isDelete()) { state.getAttachments().removeAttachment(htmlAttach); fileSystemService.removeFile(staging, draftFolder); fileSystemService.removeFile(staging, normalFolder); } else if (htmlAttach.isDraft()) { final String draftFilename = htmlAttach.getFilename(); String newHtml = forFile( staging, draftFilename, new Function<Reader, String>() { @Override @Nullable public String apply(@Nullable Reader input) { ItemKey destItem = (leaveAsPreview ? null : state.getItemId()); StagingConversion conversion = new StagingConversion( false, destItem, state.getWizid(), state.getStagingId(), draftFolder, normalFolder); return convertHtmlService.convert(input, false, conversion.getConversions()); } }); saveHtml(staging, draftFilename, newHtml); // FIXME: this is dangerous, if move fails below we are in a // world of pain if (fileSystemService.fileExists(staging, normalFolder)) { fileSystemService.removeFile(staging, normalFolder); } fileSystemService.move(staging, draftFolder, normalFolder); htmlAttach.setDraft(false); } htmlAttach.setNew(false); } // delete the root draft folder fileSystemService.removeFile(staging, HtmlAttachment.DRAFT_FOLDER); } @Override public void clearDraft(WizardStateInterface state) { final List<Attachment> newAttachments = new ArrayList<Attachment>(); // if it is a new attachment (i.e. hasn't been saved before) get rid of // it // first. final List<Attachment> allAttachments = state.getAttachments().getList(AttachmentType.HTML); for (Attachment attachment : allAttachments) { final HtmlAttachment htmlAttach = (HtmlAttachment) attachment; if (htmlAttach.isDelete()) { htmlAttach.setDelete(false); } else if (htmlAttach.isNew()) { newAttachments.add(attachment); } if (htmlAttach.isDraft()) { final StagingFile staging = (StagingFile) state.getFileHandle(); final String draftFolder = htmlAttach.getDraftFolder(); if (fileSystemService.fileExists(staging, draftFolder)) { fileSystemService.removeFile(staging, draftFolder); } htmlAttach.setDraft(false); } } state.getAttachments().removeAll(newAttachments); } @Override public void commitDraft(SectionInfo info, String sessionId) { WizardStateInterface state = this.getState(info, sessionId); this.commitDraft(state, true, info); } @Override public void clearDraft(SectionInfo info, String sessionId) { WizardStateInterface state = this.getState(info, sessionId); this.clearDraft(state); } protected MyPagesState doSave(MyPagesState state, SectionInfo info) { try { commitDraft(state, false, info); final String stagingId = state.getStagingId(); if (!Check.isEmpty(stagingId)) { try { quotaService.checkQuotaAndReturnNewItemSize( state.getItemPack().getItem(), new StagingFile(stagingId)); } catch (QuotaExceededException e) { throw new WizardException(e.getMessage()); } } final List<WorkflowOperation> oplist = new ArrayList<WorkflowOperation>(); oplist.add(myContentFactory.status()); EditMetadataOperation editMeta = workflowFactory.editMetadata(state.getItemPack()); editMeta.setInitialStatus(ItemStatus.PERSONAL); oplist.add(editMeta); oplist.add( workflowFactory.saveWithOperations( true, new ArrayList<WorkflowOperation>(), Collections.singletonList((WorkflowOperation) unusedFactory.create()))); final ItemPack pack = itemService.operation( state.getItemId(), oplist.toArray(new WorkflowOperation[oplist.size()])); setItemPack(pack, state); state.setNewItem(false); return state; } catch (Exception e) { throw new RuntimeException(e); } } @Override public MyPagesState loadItem(SectionInfo info, ItemId id) { try { MyPagesState state = myPagesStateFactory.createState(); state.setNewItem(false); state.setItemId(id); // always unlock a myPages item lockService.unlock(itemService.get(id), true); StatusOperation statop = myContentFactory.status(); List<WorkflowOperation> ops = new ArrayList<WorkflowOperation>(); ops.add(workflowFactory.startEdit(true)); ops.add(statop); itemService.operation(state.getItemId(), ops.toArray(new WorkflowOperation[ops.size()])); ItemPack itemPack = statop.getItemPack(); setItemPack(itemPack, state); wizardService.addToSession(info, state, false); return state; } catch (Exception e) { throw new RuntimeException(e); } } @Override public MyPagesState newItem(SectionInfo info, ItemDefinition itemDef) { try { ItemPack<Item> pack = itemService.operation(null, workflowFactory.create(itemDef, ItemStatus.PERSONAL)); pack.getXml() .setNode(MyContentConstants.CONTENT_TYPE_NODE, MyPagesConstants.MYPAGES_CONTENT_TYPE); MyPagesState state = myPagesStateFactory.createState(); Item item = pack.getItem(); item.setItemDefinition(itemDef); setItemPack(pack, state); state.setItemId(new ItemId(item.getUuid(), item.getVersion())); state.setNewItem(true); wizardService.addToSession(info, state, false); return state; } catch (Exception e) { throw new RuntimeException(e); } } private void setItemPack(ItemPack<Item> pack, MyPagesState state) { Item item = pack.getItem(); ItemDefinition collection = item.getItemDefinition(); Schema schema = initialiserService.initialise(collection.getSchema()); Workflow workflow = initialiserService.initialise(collection.getWorkflow()); collection = initialiserService.initialise(collection); item = initialiserService.initialise(item); item.setItemDefinition(collection); collection.setSchema(schema); collection.setWorkflow(workflow); state.setItemPack(pack); } @Override public HtmlAttachment newPage(SectionInfo info, String sessionId, String pageName) { try { final HtmlAttachment page = new HtmlAttachment(); page.setDescription(pageName); page.setNew(true); page.setDraft(true); final MyPagesRepository repos = repoProvider.get(); repos.setState(getState(info, sessionId)); repos.getAttachments().addAttachment(page); return page; } catch (Exception e) { throw new RuntimeException(e); } } @Override public HtmlAttachment clonePage( final WizardStateInterface state, final Item sourceItem, final HtmlAttachment sourcePage, boolean draft) { final HtmlAttachment newPage = (HtmlAttachment) sourcePage.clone(); newPage.setUuid(UUID.randomUUID().toString()); newPage.setId(0); newPage.setNew(draft); newPage.setDraft(draft); ItemFile itemFile = itemFileService.getItemFile(sourceItem); final String newHtml = forFile( itemFile, sourcePage.getFilename(), new Function<Reader, String>() { @Override @Nullable public String apply(@Nullable Reader input) { List<HrefConversion> conversions = new StagingConversion( true, sourceItem.getItemId(), state.getWizid(), state.getStagingId(), sourcePage.getFolder(), newPage.getFolder()) .getConversions(); return convertHtmlService.convert(input, false, conversions); } }); final FileHandle destHandle = state.getFileHandle(); // clone attachments first fileSystemService.copy(itemFile, sourcePage.getFolder(), destHandle, newPage.getFolder()); saveHtml(destHandle, newPage.getFilename(), newHtml); return newPage; } @Override public String getDraftHtml( final WizardStateInterface state, SectionInfo info, final HtmlAttachment attachment, final ItemKey itemId) { final String sessionId = state.getWizid(); final FileHandle staging = state.getFileHandle(); if (!attachment.isDraft()) { // copy files to the draft folder and then make it draft. fileSystemService.copy(staging, attachment.getNormalFolder(), attachment.getDraftFolder()); attachment.setDraft(true); } if (!fileSystemService.fileExists(staging, attachment.getFilename())) { return Constants.BLANK; } return forFile( staging, attachment.getFilename(), new Function<Reader, String>() { @Override @Nullable public String apply(@Nullable Reader input) { StagingConversion draftConversion = new StagingConversion( true, itemId, sessionId, state.getStagingId(), attachment.getNormalFolder(), attachment.getDraftFolder()); StagingConversion stagingConversion = new StagingConversion(true, itemId, sessionId, state.getStagingId()); final List<HrefConversion> conversions = new ArrayList<HrefConversion>(); conversions.addAll(draftConversion.getConversions()); conversions.addAll(stagingConversion.getConversions()); return convertHtmlService.convert(input, true, conversions); } }); } @SuppressWarnings("nls") @Override public void setHtml(SectionInfo info, String sessionId, HtmlAttachment attachment, String html) { try { final FileInfo finfo = saveHtml( getState(info, sessionId).getFileHandle(), attachment.getFilename(), (html == null ? "" : html)); attachment.setSize(finfo.getLength()); } catch (Exception e) { throw new RuntimeException(e); } } @SuppressWarnings("nls") @Override public FileAttachment uploadStream( SectionInfo info, String sessionId, String pageUuid, String fileName, String description, InputStream input) { try { MyPagesRepository repos = repoProvider.get(); repos.setState(getState(info, sessionId)); String fullpath; if (pageUuid != null && !pageUuid.equals(HtmlEditorService.CONTENT_DIRECTORY)) { HtmlAttachment page = getPageAttachment(info, sessionId, null, pageUuid); if (page != null) { fullpath = PathUtils.filePath(page.getFolder(), fileName); } else { throw new IllegalArgumentException("Page '" + pageUuid + "' does not exist"); } } else { fullpath = PathUtils.filePath(HtmlEditorService.CONTENT_DIRECTORY, fileName); } FileInfo finfo = repos.uploadStream(fullpath, input, false); FileAttachment newAttach = new FileAttachment(); newAttach.setFilename(finfo.getFilename()); newAttach.setDescription(description); newAttach.setSize(finfo.getLength()); newAttach.setUrl(newAttach.getFilename()); return newAttach; } catch (Exception e) { throw new RuntimeException(e); } } @Override public boolean saveItem(SectionInfo info, SectionTree tree, String sessionId) { MyPagesState state = (MyPagesState) getState(info, sessionId); treeMeUp(sessionId, info, state.getItem()); final Set<String> handled = new HashSet<String>(); navHelper.save(state.getItemxml(), state.getItemPack(), handled); for (String h : handled) { state.getItemxml().deleteNode(h); } // throw out the event SaveItemEvent save = new SaveItemEvent(state.getItemPack(), sessionId); info.processEvent(save, tree); if (save.isCommit()) { doSave(state, info); return true; } return false; } protected void treeMeUp(String sessionId, SectionInfo info, Item item) { item.getTreeNodes().clear(); itemNavService.populateTreeNavigationFromAttachments( item, item.getTreeNodes(), getPageAttachments(info, sessionId, null), new ItemNavigationService.NodeAddedCallback() { @Override public void execute(int index, ItemNavigationNode node) { for (ItemNavigationTab tab : node.getTabs()) { tab.setViewer("myPagesViewer"); // $NON-NLS-1$ TODO: constantize } } }); } @Override public void savePage(SectionInfo info, SectionTree tree, String sessionId, String pageUuid) { if (!Check.isEmpty(pageUuid)) { HtmlAttachment page = getPageAttachment(info, sessionId, null, pageUuid); SavePageEvent event = new SavePageEvent(page, sessionId); info.processEvent(event, tree); } } @Override public HtmlAttachment getFirstAvailablePage(SectionInfo info, String sessionId) { // move to the first available page List<HtmlAttachment> allPages = getPageAttachments(info, sessionId, null); if (allPages != null) { for (HtmlAttachment page : allPages) { if (!page.isDelete()) { return page; } } } return null; } @Override public HtmlAttachment findNextAvailablePage(SectionInfo info, String sessionId, String pageUuid) { final List<HtmlAttachment> allPages = getPageAttachments(info, sessionId, null); if (allPages != null) { HtmlAttachment prev = null; boolean returnNext = false; for (HtmlAttachment page : allPages) { if (!page.isDelete()) { if (returnNext) { return page; } prev = page; } if (page.getUuid().equals(pageUuid)) { returnNext = true; } } return prev; } return null; } @Override public void deletePage(SectionInfo info, String sessionId, String pageUuid) { HtmlAttachment page = getPageAttachment(info, sessionId, null, pageUuid); page.setDelete(true); } @Override public void deletePageFiles(SectionInfo info, String sessionId, HtmlAttachment htmlAttach) { WizardStateInterface state = getState(info, sessionId); final StagingFile staging = (StagingFile) state.getFileHandle(); final String draftFolder = htmlAttach.getDraftFolder(); final String normalFolder = htmlAttach.getNormalFolder(); fileSystemService.removeFile(staging, draftFolder); fileSystemService.removeFile(staging, normalFolder); } @Override public HtmlAttachment getPageAttachment( SectionInfo info, String sessionId, String itemId, String pageUuid) { for (HtmlAttachment attachment : getPageAttachments(info, sessionId, itemId)) { if (attachment.getUuid().equals(pageUuid)) { return attachment; } } return null; } @Override public List<HtmlAttachment> getPageAttachments( SectionInfo info, String sessionId, String itemId) { return getAttachments(info, sessionId, itemId).getList(AttachmentType.HTML); } @Override public List<HtmlAttachment> getNonDeletedPageAttachments( SectionInfo info, String sessionId, String itemId) { final List<HtmlAttachment> pageAttachments = getPageAttachments(info, sessionId, itemId); List<HtmlAttachment> nonDeletedAttachments = new ArrayList<HtmlAttachment>(); for (HtmlAttachment attach : pageAttachments) { if (!attach.isDelete()) { nonDeletedAttachments.add(attach); } } return nonDeletedAttachments; } @Override public Attachments getAttachments(SectionInfo info, String sessionId, String itemId) { return new UnmodifiableAttachments(getItem(info, sessionId, itemId)); } protected Item getItem(SectionInfo info, String sessionId, String itemId) { if (!Check.isEmpty(sessionId)) { return getState(info, sessionId).getItem(); } else { return itemService.get(new ItemId(itemId)); } } @Override public ViewableResource cloneMyContent( SectionInfo info, ViewableResource vres, String sessionId, String pageUuid) { FileHandle fileHandle = vres.getViewableItem().getFileHandle(); ItemKey itemId = vres.getViewableItem().getItemId(); String originalFilename = vres.getFilepath(); // Page may be null when using the html editor control HtmlAttachment page = getPageAttachment(info, sessionId, itemId.toString(), pageUuid); String folder = ""; // $NON-NLS-1$ if (page != null) { folder = page.getFolder(); } else { folder = HtmlEditorService.CONTENT_DIRECTORY; } // Use the scrapbook item's uuid and version to avoid filename clashes String newFilename = PathUtils.filePath( folder, itemId.getUuid(), Integer.toString(itemId.getVersion()), originalFilename); WizardStateInterface state = getState(info, sessionId); FileHandle staging = state.getFileHandle(); assert staging instanceof StagingFile; fileSystemService.copy(fileHandle, originalFilename, staging, newFilename); ViewableItem vitem = viewableItemFactory.createPreviewItem(sessionId, state.getStagingId()); FileAttachment fakeFile = new FileAttachment(); fakeFile.setFilename(newFilename); return attachmentResourceService.getViewableResource(info, vitem, fakeFile); } @Override public FileInfo saveHtml(FileHandle handle, String filename, String html) { return saveHtml(handle, filename, new StringReader(html)); } @Override public FileInfo saveHtml(FileHandle handle, String filename, Reader html) { try (Writer wrt = getWriter(handle, filename)) { CharStreams.copy(html, wrt); } catch (IOException ex) { throw Throwables.propagate(ex); } return fileSystemService.getFileInfo(handle, filename); } @Override public <T> T forFile(FileHandle handle, String filename, Function<Reader, T> withReader) { try (Reader reader = new InputStreamReader(fileSystemService.read(handle, filename), Constants.UTF8)) { return withReader.apply(reader); } catch (Exception e) { throw Throwables.propagate(e); } } protected Writer getWriter(FileHandle handle, String filename) { try { return new OutputStreamWriter( fileSystemService.getOutputStream(handle, filename, false), Constants.UTF8); } catch (Exception e) { throw new RuntimeException(e); } } @Override public ScriptContext createScriptContext(WizardStateInterface state) { return wizardService.createScriptContext(state.getItemPack(), null, null, null); } @BindFactory public interface MyPagesStateFactory { MyPagesState createState(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.test.integration.functions.unary.matrix; import java.util.HashMap; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.apache.sysml.api.DMLScript; import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM; import org.apache.sysml.hops.OptimizerUtils; import org.apache.sysml.lops.LopProperties.ExecType; import org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex; import org.apache.sysml.test.integration.AutomatedTestBase; import org.apache.sysml.test.integration.TestConfiguration; import org.apache.sysml.test.utils.TestUtils; import org.apache.sysml.utils.Statistics; /** * * */ public class FullCummaxTest extends AutomatedTestBase { private final static String TEST_NAME = "Cummax"; private final static String TEST_DIR = "functions/unary/matrix/"; private static final String TEST_CLASS_DIR = TEST_DIR + FullCummaxTest.class.getSimpleName() + "/"; private final static double eps = 1e-10; private final static int rowsMatrix = 1201; private final static int colsMatrix = 1103; private final static double spSparse = 0.1; private final static double spDense = 0.9; private enum InputType { COL_VECTOR, ROW_VECTOR, MATRIX } @Override public void setUp() { addTestConfiguration(TEST_NAME,new TestConfiguration(TEST_CLASS_DIR, TEST_NAME,new String[]{"B"})); if (TEST_CACHE_ENABLED) { setOutAndExpectedDeletionDisabled(true); } } @BeforeClass public static void init() { TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR); } @AfterClass public static void cleanUp() { if (TEST_CACHE_ENABLED) { TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR); } } @Test public void testCummaxColVectorDenseCP() { runColAggregateOperationTest(InputType.COL_VECTOR, false, ExecType.CP); } @Test public void testCummaxRowVectorDenseCP() { runColAggregateOperationTest(InputType.ROW_VECTOR, false, ExecType.CP); } @Test public void testCummaxRowVectorDenseNoRewritesCP() { runColAggregateOperationTest(InputType.ROW_VECTOR, false, ExecType.CP, false); } @Test public void testCummaxMatrixDenseCP() { runColAggregateOperationTest(InputType.MATRIX, false, ExecType.CP); } @Test public void testCummaxColVectorSparseCP() { runColAggregateOperationTest(InputType.COL_VECTOR, true, ExecType.CP); } @Test public void testCummaxRowVectorSparseCP() { runColAggregateOperationTest(InputType.ROW_VECTOR, true, ExecType.CP); } @Test public void testCummaxRowVectorSparseNoRewritesCP() { runColAggregateOperationTest(InputType.ROW_VECTOR, true, ExecType.CP, false); } @Test public void testCummaxMatrixSparseCP() { runColAggregateOperationTest(InputType.MATRIX, true, ExecType.CP); } @Test public void testCummaxColVectorDenseMR() { runColAggregateOperationTest(InputType.COL_VECTOR, false, ExecType.MR); } @Test public void testCummaxRowVectorDenseMR() { runColAggregateOperationTest(InputType.ROW_VECTOR, false, ExecType.MR); } @Test public void testCummaxRowVectorDenseNoRewritesMR() { runColAggregateOperationTest(InputType.ROW_VECTOR, false, ExecType.MR, false); } @Test public void testCummaxMatrixDenseMR() { runColAggregateOperationTest(InputType.MATRIX, false, ExecType.MR); } @Test public void testCummaxColVectorSparseMR() { runColAggregateOperationTest(InputType.COL_VECTOR, true, ExecType.MR); } @Test public void testCummaxRowVectorSparseNoRewritesMR() { runColAggregateOperationTest(InputType.ROW_VECTOR, true, ExecType.MR, false); } @Test public void testCummaxMatrixSparseMR() { runColAggregateOperationTest(InputType.MATRIX, true, ExecType.MR); } @Test public void testCummaxColVectorDenseSP() { runColAggregateOperationTest(InputType.COL_VECTOR, false, ExecType.SPARK); } @Test public void testCummaxRowVectorDenseSP() { runColAggregateOperationTest(InputType.ROW_VECTOR, false, ExecType.SPARK); } @Test public void testCummaxRowVectorDenseNoRewritesSP() { runColAggregateOperationTest(InputType.ROW_VECTOR, false, ExecType.SPARK, false); } @Test public void testCummaxMatrixDenseSP() { runColAggregateOperationTest(InputType.MATRIX, false, ExecType.SPARK); } @Test public void testCummaxColVectorSparseSP() { runColAggregateOperationTest(InputType.COL_VECTOR, true, ExecType.SPARK); } @Test public void testCummaxRowVectorSparseSP() { runColAggregateOperationTest(InputType.ROW_VECTOR, true, ExecType.SPARK); } @Test public void testCummaxRowVectorSparseNoRewritesSP() { runColAggregateOperationTest(InputType.ROW_VECTOR, true, ExecType.SPARK, false); } @Test public void testCummaxMatrixSparseSP() { runColAggregateOperationTest(InputType.MATRIX, true, ExecType.SPARK); } /** * * @param type * @param sparse * @param instType */ private void runColAggregateOperationTest( InputType type, boolean sparse, ExecType instType) { //by default we apply algebraic simplification rewrites runColAggregateOperationTest(type, sparse, instType, true); } /** * * @param sparseM1 * @param sparseM2 * @param instType */ private void runColAggregateOperationTest( InputType type, boolean sparse, ExecType instType, boolean rewrites) { RUNTIME_PLATFORM platformOld = rtplatform; switch( instType ){ case MR: rtplatform = RUNTIME_PLATFORM.HADOOP; break; case SPARK: rtplatform = RUNTIME_PLATFORM.SPARK; break; default: rtplatform = RUNTIME_PLATFORM.HYBRID; break; } boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG; if( rtplatform == RUNTIME_PLATFORM.SPARK ) DMLScript.USE_LOCAL_SPARK_CONFIG = true; //rewrites boolean oldFlagRewrites = OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION; OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = rewrites; try { int cols = (type==InputType.COL_VECTOR) ? 1 : colsMatrix; int rows = (type==InputType.ROW_VECTOR) ? 1 : rowsMatrix; double sparsity = (sparse) ? spSparse : spDense; String TEST_CACHE_DIR = ""; if (TEST_CACHE_ENABLED) { TEST_CACHE_DIR = type.ordinal() + "_" + sparsity + "/"; } TestConfiguration config = getTestConfiguration(TEST_NAME); loadTestConfiguration(config, TEST_CACHE_DIR); // This is for running the junit test the new way, i.e., construct the arguments directly String HOME = SCRIPT_DIR + TEST_DIR; fullDMLScriptName = HOME + TEST_NAME + ".dml"; programArgs = new String[]{"-explain", "-args", input("A"), output("B") }; fullRScriptName = HOME + TEST_NAME + ".R"; rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir(); //generate actual dataset double[][] A = getRandomMatrix(rows, cols, -0.05, 1, sparsity, 7); writeInputMatrixWithMTD("A", A, true); runTest(true, false, null, -1); if( instType==ExecType.CP || instType==ExecType.SPARK ) //in CP no MR jobs should be executed Assert.assertEquals("Unexpected number of executed MR jobs.", 0, Statistics.getNoOfExecutedMRJobs()); runRScript(true); //compare matrices HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B"); HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B"); TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R"); } finally { rtplatform = platformOld; DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld; OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = oldFlagRewrites; } } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License; * you may not use this file except in compliance with the Elastic License. */ package org.elasticsearch.xpack.ilm.history; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BackoffPolicy; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkProcessor; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.client.Client; import org.elasticsearch.client.OriginSettingClient; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.io.IOException; import java.util.Arrays; import java.util.Map; import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.ClientHelper.INDEX_LIFECYCLE_ORIGIN; import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING; import static org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry.INDEX_TEMPLATE_VERSION; import static org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry.TEMPLATE_ILM_HISTORY; /** * The {@link ILMHistoryStore} handles indexing {@link ILMHistoryItem} documents into the * appropriate index. It sets up a {@link BulkProcessor} for indexing in bulk, and handles creation * of the index/alias as needed for ILM policies. */ public class ILMHistoryStore implements Closeable { private static final Logger logger = LogManager.getLogger(ILMHistoryStore.class); public static final String ILM_HISTORY_INDEX_PREFIX = "ilm-history-" + INDEX_TEMPLATE_VERSION + "-"; public static final String ILM_HISTORY_ALIAS = "ilm-history-" + INDEX_TEMPLATE_VERSION; private final boolean ilmHistoryEnabled; private final BulkProcessor processor; private final ThreadPool threadPool; public ILMHistoryStore(Settings nodeSettings, Client client, ClusterService clusterService, ThreadPool threadPool) { this.ilmHistoryEnabled = LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.get(nodeSettings); this.threadPool = threadPool; this.processor = BulkProcessor.builder( new OriginSettingClient(client, INDEX_LIFECYCLE_ORIGIN)::bulk, new BulkProcessor.Listener() { @Override public void beforeBulk(long executionId, BulkRequest request) { // Prior to actually performing the bulk, we should ensure the index exists, and // if we were unable to create it or it was in a bad state, we should not // attempt to index documents. try { final CompletableFuture<Boolean> indexCreated = new CompletableFuture<>(); ensureHistoryIndex(client, clusterService.state(), ActionListener.wrap(indexCreated::complete, ex -> { logger.warn("failed to create ILM history store index prior to issuing bulk request", ex); indexCreated.completeExceptionally(ex); })); indexCreated.get(2, TimeUnit.MINUTES); } catch (Exception e) { logger.warn(new ParameterizedMessage("unable to index the following ILM history items:\n{}", request.requests().stream() .filter(dwr -> (dwr instanceof IndexRequest)) .map(dwr -> ((IndexRequest) dwr)) .map(IndexRequest::sourceAsMap) .map(Object::toString) .collect(Collectors.joining("\n"))), e); throw new ElasticsearchException(e); } if (logger.isTraceEnabled()) { logger.info("about to index: {}", request.requests().stream() .map(dwr -> ((IndexRequest) dwr).sourceAsMap()) .map(Objects::toString) .collect(Collectors.joining(","))); } } @Override public void afterBulk(long executionId, BulkRequest request, BulkResponse response) { long items = request.numberOfActions(); if (logger.isTraceEnabled()) { logger.trace("indexed [{}] items into ILM history index [{}], items: {}", items, Arrays.stream(response.getItems()) .map(BulkItemResponse::getIndex) .distinct() .collect(Collectors.joining(",")), request.requests().stream() .map(dwr -> ((IndexRequest) dwr).sourceAsMap()) .map(Objects::toString) .collect(Collectors.joining(","))); } if (response.hasFailures()) { Map<String, String> failures = Arrays.stream(response.getItems()) .filter(BulkItemResponse::isFailed) .collect(Collectors.toMap(BulkItemResponse::getId, BulkItemResponse::getFailureMessage)); logger.error("failures: [{}]", failures); } } @Override public void afterBulk(long executionId, BulkRequest request, Throwable failure) { long items = request.numberOfActions(); logger.error(new ParameterizedMessage("failed to index {} items into ILM history index", items), failure); } }) .setBulkActions(100) .setBulkSize(new ByteSizeValue(5, ByteSizeUnit.MB)) .setFlushInterval(TimeValue.timeValueSeconds(5)) .setConcurrentRequests(1) .setBackoffPolicy(BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(1000), 3)) .build(); } /** * Attempts to asynchronously index an ILM history entry */ public void putAsync(ILMHistoryItem item) { if (ilmHistoryEnabled == false) { logger.trace("not recording ILM history item because [{}] is [false]: [{}]", LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING.getKey(), item); return; } logger.trace("queueing ILM history item for indexing [{}]: [{}]", ILM_HISTORY_ALIAS, item); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { item.toXContent(builder, ToXContent.EMPTY_PARAMS); IndexRequest request = new IndexRequest(ILM_HISTORY_ALIAS).source(builder); // TODO: remove the threadpool wrapping when the .add call is non-blocking // (it can currently execute the bulk request occasionally) // see: https://github.com/elastic/elasticsearch/issues/50440 threadPool.executor(ThreadPool.Names.GENERIC).execute(() -> { try { processor.add(request); } catch (Exception e) { logger.error(new ParameterizedMessage("failed add ILM history item to queue for index [{}]: [{}]", ILM_HISTORY_ALIAS, item), e); } }); } catch (IOException exception) { logger.error(new ParameterizedMessage("failed to queue ILM history item in index [{}]: [{}]", ILM_HISTORY_ALIAS, item), exception); } } /** * Checks if the ILM history index exists, and if not, creates it. * * @param client The client to use to create the index if needed * @param state The current cluster state, to determine if the alias exists * @param listener Called after the index has been created. `onResponse` called with `true` if the index was created, * `false` if it already existed. */ @SuppressWarnings("unchecked") static void ensureHistoryIndex(Client client, ClusterState state, ActionListener<Boolean> listener) { final String initialHistoryIndexName = ILM_HISTORY_INDEX_PREFIX + "000001"; final IndexAbstraction ilmHistory = state.metadata().getIndicesLookup().get(ILM_HISTORY_ALIAS); final IndexAbstraction initialHistoryIndex = state.metadata().getIndicesLookup().get(initialHistoryIndexName); if (ilmHistory == null && initialHistoryIndex == null) { // No alias or index exists with the expected names, so create the index with appropriate alias logger.debug("creating ILM history index [{}]", initialHistoryIndexName); // Template below should be already defined as real index template but it can be deleted. To avoid race condition with its // recreation we apply settings and mappings ourselves byte[] templateBytes = TEMPLATE_ILM_HISTORY.loadBytes(); Map<String, Object> templateAsMap = XContentHelper.convertToMap(new BytesArray(templateBytes, 0, templateBytes.length), false, XContentType.JSON).v2(); client.admin().indices().prepareCreate(initialHistoryIndexName) .setSettings((Map<String, ?>) templateAsMap.get("settings")) .setMapping((Map<String, Object>) templateAsMap.get("mappings")) .setWaitForActiveShards(1) .addAlias(new Alias(ILM_HISTORY_ALIAS).writeIndex(true).isHidden(true)) .execute(new ActionListener<>() { @Override public void onResponse(CreateIndexResponse response) { listener.onResponse(true); } @Override public void onFailure(Exception e) { if (e instanceof ResourceAlreadyExistsException) { // The index didn't exist before we made the call, there was probably a race - just ignore this logger.debug("index [{}] was created after checking for its existence, likely due to a concurrent call", initialHistoryIndexName); listener.onResponse(false); } else { listener.onFailure(e); } } }); } else if (ilmHistory == null) { // alias does not exist but initial index does, something is broken listener.onFailure(new IllegalStateException("ILM history index [" + initialHistoryIndexName + "] already exists but does not have alias [" + ILM_HISTORY_ALIAS + "]")); } else if (ilmHistory.getType() == IndexAbstraction.Type.ALIAS) { if (ilmHistory.getWriteIndex() != null) { // The alias exists and has a write index, so we're good listener.onResponse(false); } else { // The alias does not have a write index, so we can't index into it listener.onFailure(new IllegalStateException("ILM history alias [" + ILM_HISTORY_ALIAS + "does not have a write index")); } } else if (ilmHistory.getType() != IndexAbstraction.Type.ALIAS) { // This is not an alias, error out listener.onFailure(new IllegalStateException("ILM history alias [" + ILM_HISTORY_ALIAS + "] already exists as " + ilmHistory.getType().getDisplayName())); } else { logger.error("unexpected IndexOrAlias for [{}]: [{}]", ILM_HISTORY_ALIAS, ilmHistory); assert false : ILM_HISTORY_ALIAS + " cannot be both an alias and not an alias simultaneously"; } } @Override public void close() { try { processor.awaitClose(10, TimeUnit.SECONDS); } catch (InterruptedException e) { logger.warn("failed to shut down ILM history bulk processor after 10 seconds", e); } } }
package com.example.hantenks.vms; import android.content.Context; import android.content.Intent; import android.support.v7.app.ActionBarActivity; import android.app.Activity; import android.support.v7.app.ActionBar; import android.support.v4.app.Fragment; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.content.SharedPreferences; import android.content.res.Configuration; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.Toast; /** * Fragment used for managing interactions for and presentation of a navigation drawer. * See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction"> * design guidelines</a> for a complete explanation of the behaviors implemented here. */ public class NavigationDrawerFragment extends Fragment { /** * Remember the position of the selected item. */ private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position"; /** * Per the design guidelines, you should show the drawer on launch until the user manually * expands it. This shared preference tracks this. */ private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned"; /** * A pointer to the current callbacks instance (the Activity). */ private NavigationDrawerCallbacks mCallbacks; /** * Helper component that ties the action bar to the navigation drawer. */ private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private ListView mDrawerListView; private View mFragmentContainerView; private int mCurrentSelectedPosition = 0; private boolean mFromSavedInstanceState; private boolean mUserLearnedDrawer; public NavigationDrawerFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Read in the flag indicating whether or not the user has demonstrated awareness of the // drawer. See PREF_USER_LEARNED_DRAWER for details. SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false); if (savedInstanceState != null) { mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION); mFromSavedInstanceState = true; } // Select either the default item (0) or the last selected item. selectItem(mCurrentSelectedPosition); } @Override public void onActivityCreated(Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Indicate that this fragment would like to influence the set of actions in the action bar. setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mDrawerListView = (ListView) inflater.inflate( R.layout.fragment_navigation_drawer, container, false); mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { Toast.makeText(getActivity(),"Please Wait a Moment",Toast.LENGTH_SHORT).show(); switch(position){ case 0: Intent intent0 = new Intent(getActivity(),CampusMap.class); startActivity(intent0); break; case 1: Intent intent1 = new Intent(getActivity(),t_report.class); startActivity(intent1); break; case 2: Intent intent2 = new Intent(getActivity(),SuspiciousVehicle.class); startActivity(intent2); break; case 3: Intent intent3 = new Intent(getActivity(),parking.class); startActivity(intent3); break; case 4: Intent intent4 = new Intent(getActivity(),bus.class); startActivity(intent4); break; } selectItem(position); } }); mDrawerListView.setAdapter(new ArrayAdapter<String>( getActionBar().getThemedContext(), android.R.layout.simple_list_item_activated_1, android.R.id.text1, new String[]{ getString(R.string.title_section1), getString(R.string.title_section2), getString(R.string.title_section3), getString(R.string.title_section4), getString(R.string.title_section5) })); mDrawerListView.setItemChecked(mCurrentSelectedPosition, true); return mDrawerListView; } public boolean isDrawerOpen() { return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView); } /** * Users of this fragment must call this method to set up the navigation drawer interactions. * * @param fragmentId The android:id of this fragment in its activity's layout. * @param drawerLayout The DrawerLayout containing this fragment's UI. */ public void setUp(int fragmentId, DrawerLayout drawerLayout) { mFragmentContainerView = getActivity().findViewById(fragmentId); mDrawerLayout = drawerLayout; // set a custom shadow that overlays the main content when the drawer opens mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START); // set up the drawer's list view with items and click listener ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); // ActionBarDrawerToggle ties together the the proper interactions // between the navigation drawer and the action bar app icon. mDrawerToggle = new ActionBarDrawerToggle( getActivity(), /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */ R.string.navigation_drawer_open, /* "open drawer" description for accessibility */ R.string.navigation_drawer_close /* "close drawer" description for accessibility */ ) { @Override public void onDrawerClosed(View drawerView) { super.onDrawerClosed(drawerView); if (!isAdded()) { return; } getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu() } @Override public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); if (!isAdded()) { return; } if (!mUserLearnedDrawer) { // The user manually opened the drawer; store this flag to prevent auto-showing // the navigation drawer automatically in the future. mUserLearnedDrawer = true; SharedPreferences sp = PreferenceManager .getDefaultSharedPreferences(getActivity()); sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply(); } getActivity().supportInvalidateOptionsMenu(); // calls onPrepareOptionsMenu() } }; // If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer, // per the navigation drawer design guidelines. if (!mUserLearnedDrawer && !mFromSavedInstanceState) { mDrawerLayout.openDrawer(mFragmentContainerView); } // Defer code dependent on restoration of previous instance state. mDrawerLayout.post(new Runnable() { @Override public void run() { mDrawerToggle.syncState(); } }); mDrawerLayout.setDrawerListener(mDrawerToggle); } private void selectItem(int position) { mCurrentSelectedPosition = position; if (mDrawerListView != null) { mDrawerListView.setItemChecked(position, true); } if (mDrawerLayout != null) { mDrawerLayout.closeDrawer(mFragmentContainerView); } if (mCallbacks != null) { mCallbacks.onNavigationDrawerItemSelected(position); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mCallbacks = (NavigationDrawerCallbacks) activity; } catch (ClassCastException e) { throw new ClassCastException("Activity must implement NavigationDrawerCallbacks."); } } @Override public void onDetach() { super.onDetach(); mCallbacks = null; } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); // Forward the new configuration the drawer toggle component. mDrawerToggle.onConfigurationChanged(newConfig); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // If the drawer is open, show the global app actions in the action bar. See also // showGlobalContextActionBar, which controls the top-left area of the action bar. if (mDrawerLayout != null && isDrawerOpen()) { inflater.inflate(R.menu.global, menu); showGlobalContextActionBar(); } super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } if (item.getItemId() == R.id.action_example) { Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show(); return true; } return super.onOptionsItemSelected(item); } /** * Per the navigation drawer design guidelines, updates the action bar to show the global app * 'context', rather than just what's in the current screen. */ private void showGlobalContextActionBar() { ActionBar actionBar = getActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setTitle(R.string.app_name); } private ActionBar getActionBar() { return ((ActionBarActivity) getActivity()).getSupportActionBar(); } /** * Callbacks interface that all activities using this fragment must implement. */ public static interface NavigationDrawerCallbacks { /** * Called when an item in the navigation drawer is selected. */ void onNavigationDrawerItemSelected(int position); } }
/* * Copyright 2016-2026 TinyZ * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: GpbN.proto package org.ogcs.okra.example.game.generated; public final class GpbN { private GpbN() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface PushOrBuilder extends // @@protoc_insertion_point(interface_extends:Push) com.google.protobuf.GeneratedMessage. ExtendableMessageOrBuilder<Push> { /** * <code>optional int32 id = 1;</code> */ boolean hasId(); /** * <code>optional int32 id = 1;</code> */ int getId(); } /** * Protobuf type {@code Push} */ public static final class Push extends com.google.protobuf.GeneratedMessage.ExtendableMessage< Push> implements // @@protoc_insertion_point(message_implements:Push) PushOrBuilder { // Use Push.newBuilder() to construct. private Push(com.google.protobuf.GeneratedMessage.ExtendableBuilder<org.ogcs.okra.example.game.generated.GpbN.Push, ?> builder) { super(builder); this.unknownFields = builder.getUnknownFields(); } private Push(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } private static final Push defaultInstance; public static Push getDefaultInstance() { return defaultInstance; } public Push getDefaultInstanceForType() { return defaultInstance; } private final com.google.protobuf.UnknownFieldSet unknownFields; @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private Push( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } case 8: { bitField0_ |= 0x00000001; id_ = input.readInt32(); break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException( e.getMessage()).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.ogcs.okra.example.game.generated.GpbN.internal_static_Push_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.ogcs.okra.example.game.generated.GpbN.internal_static_Push_fieldAccessorTable .ensureFieldAccessorsInitialized( org.ogcs.okra.example.game.generated.GpbN.Push.class, org.ogcs.okra.example.game.generated.GpbN.Push.Builder.class); } public static com.google.protobuf.Parser<Push> PARSER = new com.google.protobuf.AbstractParser<Push>() { public Push parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new Push(input, extensionRegistry); } }; @java.lang.Override public com.google.protobuf.Parser<Push> getParserForType() { return PARSER; } private int bitField0_; public static final int ID_FIELD_NUMBER = 1; private int id_; /** * <code>optional int32 id = 1;</code> */ public boolean hasId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 id = 1;</code> */ public int getId() { return id_; } private void initFields() { id_ = 0; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; if (!extensionsAreInitialized()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); com.google.protobuf.GeneratedMessage .ExtendableMessage<org.ogcs.okra.example.game.generated.GpbN.Push>.ExtensionWriter extensionWriter = newExtensionWriter(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeInt32(1, id_); } extensionWriter.writeUntil(536870912, output); getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeInt32Size(1, id_); } size += extensionsSerializedSize(); size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return PARSER.parseDelimitedFrom(input); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseDelimitedFrom(input, extensionRegistry); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return PARSER.parseFrom(input); } public static org.ogcs.okra.example.game.generated.GpbN.Push parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return PARSER.parseFrom(input, extensionRegistry); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.ogcs.okra.example.game.generated.GpbN.Push prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * Protobuf type {@code Push} */ public static final class Builder extends com.google.protobuf.GeneratedMessage.ExtendableBuilder< org.ogcs.okra.example.game.generated.GpbN.Push, Builder> implements // @@protoc_insertion_point(builder_implements:Push) org.ogcs.okra.example.game.generated.GpbN.PushOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.ogcs.okra.example.game.generated.GpbN.internal_static_Push_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.ogcs.okra.example.game.generated.GpbN.internal_static_Push_fieldAccessorTable .ensureFieldAccessorsInitialized( org.ogcs.okra.example.game.generated.GpbN.Push.class, org.ogcs.okra.example.game.generated.GpbN.Push.Builder.class); } // Construct using org.ogcs.okra.example.game.generated.GpbN.Push.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); id_ = 0; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.ogcs.okra.example.game.generated.GpbN.internal_static_Push_descriptor; } public org.ogcs.okra.example.game.generated.GpbN.Push getDefaultInstanceForType() { return org.ogcs.okra.example.game.generated.GpbN.Push.getDefaultInstance(); } public org.ogcs.okra.example.game.generated.GpbN.Push build() { org.ogcs.okra.example.game.generated.GpbN.Push result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } public org.ogcs.okra.example.game.generated.GpbN.Push buildPartial() { org.ogcs.okra.example.game.generated.GpbN.Push result = new org.ogcs.okra.example.game.generated.GpbN.Push(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.id_ = id_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.ogcs.okra.example.game.generated.GpbN.Push) { return mergeFrom((org.ogcs.okra.example.game.generated.GpbN.Push)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.ogcs.okra.example.game.generated.GpbN.Push other) { if (other == org.ogcs.okra.example.game.generated.GpbN.Push.getDefaultInstance()) return this; if (other.hasId()) { setId(other.getId()); } this.mergeExtensionFields(other); this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!extensionsAreInitialized()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { org.ogcs.okra.example.game.generated.GpbN.Push parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (org.ogcs.okra.example.game.generated.GpbN.Push) e.getUnfinishedMessage(); throw e; } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int id_ ; /** * <code>optional int32 id = 1;</code> */ public boolean hasId() { return ((bitField0_ & 0x00000001) == 0x00000001); } /** * <code>optional int32 id = 1;</code> */ public int getId() { return id_; } /** * <code>optional int32 id = 1;</code> */ public Builder setId(int value) { bitField0_ |= 0x00000001; id_ = value; onChanged(); return this; } /** * <code>optional int32 id = 1;</code> */ public Builder clearId() { bitField0_ = (bitField0_ & ~0x00000001); id_ = 0; onChanged(); return this; } // @@protoc_insertion_point(builder_scope:Push) } static { defaultInstance = new Push(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:Push) } private static final com.google.protobuf.Descriptors.Descriptor internal_static_Push_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_Push_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\nGpbN.proto\"\034\n\004Push\022\n\n\002id\030\001 \001(\005*\010\010\002\020\200\200\200" + "\200\002B&\n\034org.ogcs.okra.game.generatedB\004GpbN" + "H\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); internal_static_Push_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_Push_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_Push_descriptor, new java.lang.String[] { "Id", }); } // @@protoc_insertion_point(outer_class_scope) }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.felix.sigil.common.core.internal.model.osgi; import java.net.URI; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; import org.apache.felix.sigil.common.model.AbstractCompoundModelElement; import org.apache.felix.sigil.common.model.InvalidModelException; import org.apache.felix.sigil.common.model.eclipse.ILibraryImport; import org.apache.felix.sigil.common.model.osgi.IBundleModelElement; import org.apache.felix.sigil.common.model.osgi.IPackageExport; import org.apache.felix.sigil.common.model.osgi.IPackageImport; import org.apache.felix.sigil.common.model.osgi.IRequiredBundle; import org.osgi.framework.Version; public class BundleModelElement extends AbstractCompoundModelElement implements IBundleModelElement { /** * */ private static final long serialVersionUID = 1L; // required obr values private URI updateLocation; private String symbolicName; private Version version = Version.emptyVersion; private IPackageImport[] imports; private IPackageExport[] exports; private IRequiredBundle[] requires; private URI sourceLocation; private String[] classpathElements; private IRequiredBundle fragmentHost; // human readable values private String name; private String description; private String category; private URI licenseURI; private URI docURI; private String vendor; private String contactAddress; private String copyright; // internal values private String activator; private Set<ILibraryImport> libraries; public BundleModelElement() { super("OSGi Bundle"); this.imports = new IPackageImport[0]; this.exports = new IPackageExport[0]; this.requires = new IRequiredBundle[0]; this.classpathElements = new String[0]; this.libraries = new HashSet<ILibraryImport>(); } public String getActivator() { return activator; } public void setActivator(String activator) { this.activator = activator; } public void addLibraryImport(ILibraryImport library) { libraries.add(library); } public Set<ILibraryImport> getLibraryImports() { return libraries; } public void removeLibraryImport(ILibraryImport library) { libraries.remove(library); } public String getCategory() { return category; } public void setCategory(String category) { this.category = category; } public String getContactAddress() { return contactAddress; } public void setContactAddress(String contactAddress) { this.contactAddress = contactAddress; } public String getCopyright() { return copyright; } public void setCopyright(String copyright) { this.copyright = copyright; } public URI getDocURI() { return docURI; } public void setDocURI(URI docURI) { this.docURI = docURI; } public Collection<IPackageExport> getExports() { return Arrays.asList(exports); } public void addExport(IPackageExport packageExport) { HashSet<IPackageExport> tmp = new HashSet<IPackageExport>(getExports()); if (tmp.add(packageExport)) { exports = tmp.toArray(new IPackageExport[tmp.size()]); packageExport.setParent(this); } } public void removeExport(IPackageExport packageExport) { HashSet<IPackageExport> tmp = new HashSet<IPackageExport>(getExports()); if (tmp.remove(packageExport)) { exports = tmp.toArray(new IPackageExport[tmp.size()]); packageExport.setParent(null); } } public Collection<IPackageImport> getImports() { return Arrays.asList(imports); } public void addImport(IPackageImport packageImport) { HashSet<IPackageImport> tmp = new HashSet<IPackageImport>(getImports()); if (tmp.add(packageImport)) { imports = tmp.toArray(new IPackageImport[tmp.size()]); packageImport.setParent(this); } } public void removeImport(IPackageImport packageImport) { HashSet<IPackageImport> tmp = new HashSet<IPackageImport>(getImports()); if (tmp.remove(packageImport)) { imports = tmp.toArray(new IPackageImport[tmp.size()]); packageImport.setParent(null); } } public Collection<IRequiredBundle> getRequiredBundles() { return Arrays.asList(requires); } public void addRequiredBundle(IRequiredBundle bundle) { HashSet<IRequiredBundle> tmp = new HashSet<IRequiredBundle>(getRequiredBundles()); if (tmp.add(bundle)) { requires = tmp.toArray(new IRequiredBundle[tmp.size()]); bundle.setParent(this); } } public void removeRequiredBundle(IRequiredBundle bundle) { HashSet<IRequiredBundle> tmp = new HashSet<IRequiredBundle>(getRequiredBundles()); if (tmp.remove(bundle)) { requires = tmp.toArray(new IRequiredBundle[tmp.size()]); bundle.setParent(null); } } public URI getLicenseURI() { return licenseURI; } public void setLicenseURI(URI licenseURI) { this.licenseURI = licenseURI; } public URI getSourceLocation() { return sourceLocation; } public void setSourceLocation(URI sourceLocation) { this.sourceLocation = sourceLocation; } public String getSymbolicName() { return symbolicName; } public void setSymbolicName(String symbolicName) { this.symbolicName = symbolicName == null ? null : symbolicName.intern(); } public URI getUpdateLocation() { return updateLocation; } public void setUpdateLocation(URI updateLocation) { this.updateLocation = updateLocation; } public String getVendor() { return vendor; } public void setVendor(String vendor) { this.vendor = vendor; } public Version getVersion() { return version; } public void setVersion(Version version) { this.version = version == null ? Version.emptyVersion : version; } public void checkValid() throws InvalidModelException { if (symbolicName == null) throw new InvalidModelException(this, "Bundle symbolic name not set"); } public BundleModelElement clone() { BundleModelElement bd = (BundleModelElement) super.clone(); bd.imports = new IPackageImport[imports.length]; bd.exports = new IPackageExport[exports.length]; bd.requires = new IRequiredBundle[requires.length]; for (int i = 0; i < imports.length; i++) { bd.imports[i] = (IPackageImport) imports[i].clone(); } for (int i = 0; i < exports.length; i++) { bd.exports[i] = (IPackageExport) exports[i].clone(); } for (int i = 0; i < requires.length; i++) { bd.requires[i] = (IRequiredBundle) requires[i].clone(); } return bd; } public String toString() { StringBuffer buf = new StringBuffer(); buf.append("BundleModelElement["); buf.append(symbolicName); buf.append(", "); buf.append(version); buf.append("]"); return buf.toString(); } public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public void addClasspath(String path) { HashSet<String> tmp = new HashSet<String>(Arrays.asList(classpathElements)); if (tmp.add(path)) { classpathElements = tmp.toArray(new String[tmp.size()]); } } public Collection<String> getClasspaths() { return classpathElements.length == 0 ? Collections.singleton(".") : Arrays.asList(classpathElements); } public void removeClasspath(String path) { HashSet<String> tmp = new HashSet<String>(Arrays.asList(classpathElements)); if (tmp.remove(path)) { classpathElements = tmp.toArray(new String[tmp.size()]); } } public IRequiredBundle getFragmentHost() { return fragmentHost; } public void setFragmentHost(IRequiredBundle fragmentHost) { this.fragmentHost = fragmentHost; } }
package org.bridgedb.rdb.construct; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.Statement; import org.bridgedb.IDMapperException; import org.bridgedb.Xref; public class GdbConstructImpl3 implements GdbConstruct { private static int GDB_COMPAT_VERSION = 3; private final Connection con; //TODO: initialize private final DBConnector dbConnector; //TODO private String dbName; //TODO private Exception recentException = null; public GdbConstructImpl3(String dbName, DBConnector dbConnector, int props) throws IDMapperException { con = dbConnector.createConnection(dbName, props); this.dbConnector = dbConnector; this.dbName = dbName; } /** compact the database. @throws IDMapperException on failure */ final public void compact() throws IDMapperException { dbConnector.compact(con); } /** finalize the database. @throws IDMapperException on failure */ final public void finalize() throws IDMapperException { dbConnector.compact(con); createGdbIndices(); dbConnector.closeConnection(con, DBConnector.PROP_FINALIZE); String newDb = dbConnector.finalizeNewDatabase(dbName); dbName = newDb; recentException = null; } private PreparedStatement pstGene = null; private PreparedStatement pstLink = null; private PreparedStatement pstAttr = null; /** {@inheritDoc} */ public int addGene(Xref ref) { recentException = null; //TODO: bpText is unused if (pstGene == null) throw new NullPointerException(); try { pstGene.setString(1, ref.getId()); pstGene.setString(2, ref.getDataSource().getSystemCode()); pstGene.executeUpdate(); } catch (SQLException e) { recentException = e; return 1; } return 0; } /** {@inheritDoc} */ public int addAttribute(Xref ref, String attr, String val) { recentException = null; try { pstAttr.setString(1, attr); pstAttr.setString(2, val); pstAttr.setString(3, ref.getId()); pstAttr.setString(4, ref.getDataSource().getSystemCode()); pstAttr.executeUpdate(); } catch (SQLException e) { recentException = e; return 1; } return 0; } /** {@inheritDoc} */ public int addLink(Xref left, Xref right) { if (pstLink == null) throw new NullPointerException(); recentException = null; try { pstLink.setString(1, left.getId()); pstLink.setString(2, left.getDataSource().getSystemCode()); pstLink.setString(3, right.getId()); pstLink.setString(4, right.getDataSource().getSystemCode()); pstLink.executeUpdate(); } catch (SQLException e) { recentException = e; return 1; } return 0; } /** prepare for inserting genes and/or links. @throws IDMapperException on failure */ public void preInsert() throws IDMapperException { try { con.setAutoCommit(false); pstGene = con.prepareStatement( "INSERT INTO datanode " + " (id, code)" + "VALUES (?, ?)" ); pstLink = con.prepareStatement( "INSERT INTO link " + " (idLeft, codeLeft," + " idRight, codeRight)" + "VALUES (?, ?, ?, ?)" ); pstAttr = con.prepareStatement( "INSERT INTO attribute " + " (attrname, attrvalue, id, code)" + "VALUES (?, ?, ?, ?)" ); } catch (SQLException e) { throw new IDMapperException (e); } } public void setInfo(String key, String value) throws IDMapperException { try { /** * This is a bit awkward because we store keys as columns. * TODO: in a future schema version this should be a regular 2-column table. */ if (!key.matches("^\\w+$")) throw new IllegalArgumentException("key: '" + key + "' contains invalid characters"); PreparedStatement pstInfo1 = con.prepareStatement ( "ALTER TABLE info " + "ADD COLUMN " + key + " VARCHAR (50)" ); pstInfo1.execute(); PreparedStatement pstInfo2 = con.prepareStatement ( "UPDATE info SET " + key + " = ? " + "WHERE schemaversion = " + GDB_COMPAT_VERSION ); pstInfo2.setString(1, value); pstInfo2.execute(); } catch (SQLException ex) { throw new IDMapperException(ex); } } /** Create indices on the database You can call this at any time after creating the tables, but it is good to do it only after inserting all data. @throws IDMapperException on failure */ public void createGdbIndices() throws IDMapperException { try { Statement sh = con.createStatement(); sh.execute( "CREATE INDEX i_codeLeft" + " ON link(codeLeft)" ); sh.execute( "CREATE INDEX i_idRight" + " ON link(idRight)" ); sh.execute( "CREATE INDEX i_codeRight" + " ON link(codeRight)" ); sh.execute( "CREATE INDEX i_code" + " ON " + "datanode" + "(code)" ); } catch (SQLException e) { throw new IDMapperException (e); } } /** * Executes several SQL statements to create the tables and indexes in the database the given * connection is connected to * Note: Official GDB's are created by AP, not with this code. * This is just here for testing purposes. * @throws IDMapperException */ public void createGdbTables() throws IDMapperException { // Logger.log.info("Info: Creating tables"); try { Statement sh = con.createStatement(); // sh.execute("DROP TABLE info"); // sh.execute("DROP TABLE link"); // sh.execute("DROP TABLE datanode"); // sh.execute("DROP TABLE attribute"); // sh.close(); // sh = con.createStatement(); sh.execute( "CREATE TABLE " + " info " + "( schemaversion INTEGER PRIMARY KEY " + ")"); // Logger.log.info("Info table created"); sh.execute( //Add compatibility version of GDB "INSERT INTO info VALUES ( " + GDB_COMPAT_VERSION + ")"); // Logger.log.info("Version stored in info"); sh.execute( "CREATE TABLE " + " link " + " ( idLeft VARCHAR(50) NOT NULL, " + " codeLeft VARCHAR(50) NOT NULL, " + " idRight VARCHAR(50) NOT NULL, " + " codeRight VARCHAR(50) NOT NULL, " + " bridge VARCHAR(50), " + " PRIMARY KEY (idLeft, codeLeft, " + " idRight, codeRight) " + " ) "); // Logger.log.info("Link table created"); sh.execute( "CREATE TABLE " + " datanode " + " ( id VARCHAR(50), " + " code VARCHAR(50), " + " PRIMARY KEY (id, code) " + " ) "); // Logger.log.info("DataNode table created"); sh.execute( "CREATE TABLE " + " attribute " + " ( id VARCHAR(50), " + " code VARCHAR(50), " + " attrname VARCHAR(50), " + " attrvalue VARCHAR(255) " + " ) "); // Logger.log.info("Attribute table created"); } catch (SQLException e) { throw new IDMapperException (e); } } /** commit inserted data. @throws IDMapperException on failure */ final public void commit() throws IDMapperException { try { con.commit(); } catch (SQLException e) { throw new IDMapperException (e); } } /** * Older method to open a connection to a Gene database * using a DBConnector to handle differences * between different RDBMS-es. The other createInstance() is preferred. * <p> * Use this instead of constructor to create an instance of SimpleGdb that matches the schema version. * @param dbName The file containing the Gene Database. * @param newDbConnector handles the differences between types of RDBMS. * A new instance of DbConnector class is instantiated automatically. * @param props PROP_RECREATE if you want to create a new database, overwriting any existing ones. Otherwise, PROP_NONE. * @return a new Gdb * @throws IDMapperException on failure */ public static GdbConstruct createInstance(String dbName, DBConnector newDbConnector, int props) throws IDMapperException { try { // create a fresh db connector of the correct type. DBConnector dbConnector = newDbConnector.getClass().newInstance(); return new GdbConstructImpl3(dbName, dbConnector, props); } catch (InstantiationException e) { throw new IDMapperException (e); } catch (IllegalAccessException e) { throw new IDMapperException (e); } } @Override public Exception recentException() { return recentException; } }
/*_############################################################################ _## _## SNMP4J - OctetString.java _## _## Copyright (C) 2003-2009 Frank Fock and Jochen Katz (SNMP4J.org) _## _## Licensed under the Apache License, Version 2.0 (the "License"); _## you may not use this file except in compliance with the License. _## You may obtain a copy of the License at _## _## http://www.apache.org/licenses/LICENSE-2.0 _## _## Unless required by applicable law or agreed to in writing, software _## distributed under the License is distributed on an "AS IS" BASIS, _## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. _## See the License for the specific language governing permissions and _## limitations under the License. _## _##########################################################################*/ package org.snmp4j.smi; import java.io.*; import java.util.*; import org.snmp4j.asn1.BER; import org.snmp4j.asn1.BERInputStream; /** * The <code>OctetString</code> class represents the SMI type OCTET STRING. * * @author Frank Fock * @version 1.8 * @since 1.0 */ public class OctetString extends AbstractVariable implements AssignableFromByteArray, AssignableFromString { private static final long serialVersionUID = 4125661211046256289L; private static final char DEFAULT_HEX_DELIMITER = ':'; private byte[] value = new byte[0]; /** * Creates a zero length octet string. */ public OctetString() { } /** * Creates an octet string from an byte array. * @param rawValue * an array of bytes. */ public OctetString(byte[] rawValue) { this(rawValue, 0, rawValue.length); } /** * Creates an octet string from an byte array. * @param rawValue * an array of bytes. * @param offset * the position (zero based) of the first byte to be copied from * <code>rawValue</code>into the new <code>OctetSring</code>. * @param length * the number of bytes to be copied. */ public OctetString(byte[] rawValue, int offset, int length) { value = new byte[length]; System.arraycopy(rawValue, offset, value, 0, length); } /** * Creates an octet string from a java string. * * @param stringValue * a Java string. */ public OctetString(String stringValue) { this.value = stringValue.getBytes(); } /** * Creates an octet string from another OctetString by cloning its value. * * @param other * an <code>OctetString</code> instance. */ public OctetString(OctetString other) { this.value = new byte[0]; append(other); } /** * Appends a single byte to this octet string. * @param b * a byte value. */ public void append(byte b) { byte[] newValue = new byte[value.length+1]; System.arraycopy(value, 0, newValue, 0, value.length); newValue[value.length] = b; value = newValue; } /** * Appends an array of bytes to this octet string. * @param bytes * an array of bytes. */ public void append(byte[] bytes) { byte[] newValue = new byte[value.length + bytes.length]; System.arraycopy(value, 0, newValue, 0, value.length); System.arraycopy(bytes, 0, newValue, value.length, bytes.length); value = newValue; } /** * Appends an octet string. * @param octetString * an <code>OctetString</code> to append to this octet string. */ public void append(OctetString octetString) { append(octetString.getValue()); } /** * Appends the supplied string to this <code>OctetString</code>. Calling this * method is identical to <I>append(string.getBytes())</I>. * @param string * a String instance. */ public void append(String string) { append(string.getBytes()); } /** * Sets the value of the octet string to a zero length string. */ public void clear() { value = new byte[0]; } public void encodeBER(OutputStream outputStream) throws java.io.IOException { BER.encodeString(outputStream, BER.OCTETSTRING, getValue()); } public void decodeBER(BERInputStream inputStream) throws java.io.IOException { BER.MutableByte type = new BER.MutableByte(); byte[] v = BER.decodeString(inputStream, type); if (type.getValue() != BER.OCTETSTRING) { throw new IOException("Wrong type encountered when decoding OctetString: "+ type.getValue()); } setValue(v); } public int getBERLength() { return value.length + BER.getBERLengthOfLength(value.length) + 1; } public int getSyntax() { return SMIConstants.SYNTAX_OCTET_STRING; } /** * Gets the byte at the specified index. * @param index * a zero-based index into the octet string. * @return * the byte value at the specified index. * @throws ArrayIndexOutOfBoundsException * if <code>index</code> &lt; 0 or &gt; {@link #length()}. */ public final byte get(int index) { return value[index]; } /** * Sets the byte value at the specified index. * @param index * an index value greater or equal 0 and less than {@link #length()}. * @param b * the byte value to set. * @since v1.2 */ public final void set(int index, byte b) { value[index] = b; } public int hashCode() { int hash = 0; for (int i=0; i<value.length; i++) { hash += value[i]*31^((value.length-1)-i); } return hash; } public boolean equals(Object o) { if (o instanceof OctetString) { OctetString other = (OctetString)o; return Arrays.equals(value, other.value); } else if (o instanceof byte[]) { return Arrays.equals(value, (byte[])o); } return false; } public int compareTo(Object o) { if (o instanceof OctetString) { OctetString other = (OctetString)o; int maxlen = Math.min(value.length, other.value.length); for (int i=0; i<maxlen; i++) { if (value[i] != other.value[i]) { if ((value[i] & 0xFF) < (other.value[i] & 0xFF)) { return -1; } else { return 1; } } } return (value.length - other.value.length); } throw new ClassCastException(o.getClass().getName()); } /** * Returns a new string that is a substring of this string. The substring * begins at the specified <code>beginIndex</code> and extends to the * character at index <code>endIndex - 1</code>. * Thus the length of the substring is <code>endIndex-beginIndex</code>. * @param beginIndex * the beginning index, inclusive. * @param endIndex * the ending index, exclusive. * @return * the specified substring. * @since 1.3 */ public OctetString substring(int beginIndex, int endIndex) { if ((beginIndex < 0) || (endIndex > length())) { throw new IndexOutOfBoundsException(); } byte[] substring = new byte[endIndex - beginIndex]; System.arraycopy(value, beginIndex, substring, 0, substring.length); return new OctetString(substring); } /** * Tests if this octet string starts with the specified prefix. * @param prefix * the prefix. * @return * <code>true</code> if the bytes of this octet string up to the length * of <code>prefix</code> equal those of <code>prefix</code>. * @since 1.2 */ public boolean startsWith(OctetString prefix) { if ((prefix == null) || prefix.length() > length()) { return false; } for (int i=0; i<prefix.length(); i++) { if (prefix.get(i) != value[i]) { return false; } } return true; } /** * Determines whether this octet string contains non ISO control characters * only. * @return * <code>false</code> if this octet string contains any ISO control * characters as defined by <code>Character.isISOControl(char)</code> * except if these ISO control characters are all whitespace characters * as defined by <code>Character.isWhitespace(char)</code> and not * <code>'&#92;u001C'</code>-<code>'&#92;u001F'</code>. */ public boolean isPrintable() { for (int i=0; i<value.length; i++) { char c = (char)value[i]; if ((Character.isISOControl(c) || ((c & 0xFF) >= 0x80)) && ((!Character.isWhitespace(c)) || (((c & 0xFF) >= 0x1C)) && ((c & 0xFF) <= 0x1F))) { return false; } } return true; } public String toString() { if (isPrintable()) { return new String(value); } return toHexString(); } public String toHexString() { return toHexString(DEFAULT_HEX_DELIMITER); } public String toHexString(char separator) { return toString(separator, 16); } public static OctetString fromHexString(String hexString) { return fromHexString(hexString, DEFAULT_HEX_DELIMITER); } public static OctetString fromHexString(String hexString, char delimiter) { return OctetString.fromString(hexString, delimiter, 16); } public static OctetString fromString(String string, char delimiter, int radix) { String delim = ""; delim += delimiter; StringTokenizer st = new StringTokenizer(string, delim); byte[] value = new byte[st.countTokens()]; for (int n=0; st.hasMoreTokens(); n++) { String s = st.nextToken(); value[n] = (byte)Integer.parseInt(s, radix); } return new OctetString(value); } /** * Creates an OctetString from a string represantation in the specified * radix. * @param string * the string representation of an octet string. * @param radix * the radix of the string represantion. * @return * the OctetString instance. * @since 1.6 */ public static OctetString fromString(String string, int radix) { int digits = (int)(Math.round((float)Math.log(256)/Math.log(radix))); byte[] value = new byte[string.length()/digits]; for (int n=0; n<string.length(); n+=digits) { String s = string.substring(n, n+digits); value[n/digits] = (byte)Integer.parseInt(s, radix); } return new OctetString(value); } public String toString(char separator, int radix) { int digits = (int)(Math.round((float)Math.log(256)/Math.log(radix))); StringBuffer buf = new StringBuffer(value.length*(digits+1)); for (int i=0; i<value.length; i++) { if (i > 0) { buf.append(separator); } int v = (value[i] & 0xFF); String val = Integer.toString(v, radix); for (int j=0; j < digits - val.length(); j++) { buf.append('0'); } buf.append(val); } return buf.toString(); } /** * Returns a string representation of this octet string in the radix * specified. There will be no separation characters, but each byte will * be represented by <code>round(log(256)/log(radix))</code> digits. * * @param radix * the radix to use in the string representation. * @return * a string representation of this ocetet string in the specified radix. * @since 1.6 */ public String toString(int radix) { int digits = (int)(Math.round((float)Math.log(256)/Math.log(radix))); StringBuffer buf = new StringBuffer(value.length*(digits+1)); for (int i=0; i<value.length; i++) { int v = (value[i] & 0xFF); String val = Integer.toString(v, radix); for (int j=0; j < digits - val.length(); j++) { buf.append('0'); } buf.append(val); } return buf.toString(); } /** * Formats the content into a ASCII string. Non-printable characters are * replaced by the supplied placeholder character. * @param placeholder * a placeholder character, for example '.'. * @return * the contents of this octet string as ASCII formatted string. * @since 1.6 */ public String toASCII(char placeholder) { StringBuffer buf = new StringBuffer(value.length); for (int i=0; i<value.length; i++) { if ((Character.isISOControl((char)value[i])) || ((value[i] & 0xFF) >= 0x80)) { buf.append(placeholder); } else { buf.append((char) value[i]); } } return buf.toString(); } public void setValue(String value) { setValue(value.getBytes()); } public void setValue(byte[] value) { if (value == null) { throw new IllegalArgumentException( "OctetString must not be assigned a null value"); } this.value = value; } public byte[] getValue() { return value; } /** * Gets the length of the byte string. * @return * an integer >= 0. */ public final int length() { return value.length; } public Object clone() { return new OctetString(value); } /** * Returns the length of the payload of this <code>BERSerializable</code> * object in bytes when encoded according to the Basic Encoding Rules (BER). * * @return the BER encoded length of this variable. */ public int getBERPayloadLength() { return value.length; } public int toInt() { throw new UnsupportedOperationException(); } public long toLong() { throw new UnsupportedOperationException(); } /** * Returns a copy of this OctetString where each bit not set in the supplied * mask zeros the corresponding bit in the returned OctetString. * @param mask * a mask where the n-th bit corresponds to the n-th bit in the returned * OctetString. * @return * the masked OctetString. * @since 1.7 */ public OctetString mask(OctetString mask) { byte[] masked = new byte[value.length]; System.arraycopy(value, 0, masked, 0, value.length); for (int i=0; (i<mask.length()) && (i<masked.length); i++) { masked[i] = (byte)(masked[i] & mask.get(i)); } return new OctetString(masked); } public OID toSubIndex(boolean impliedLength) { int[] subIndex; int offset = 0; if (!impliedLength) { subIndex = new int[length()+1]; subIndex[offset++] = length(); } else { subIndex = new int[length()]; } for (int i=0; i<length(); i++) { subIndex[offset+i] = get(i) & 0xFF; } return new OID(subIndex); } public void fromSubIndex(OID subIndex, boolean impliedLength) { if (impliedLength) { setValue(subIndex.toByteArray()); } else { OID suffix = new OID(subIndex.getValue(), 1, subIndex.size() - 1); setValue(suffix.toByteArray()); } } /** * Splits an <code>OctetString</code> using a set of delimiter characters * similar to how a StringTokenizer would do it. * @param octetString * the input string to tokenize. * @param delimOctets * a set of delimiter octets. * @return * a Collection of OctetString instances that contain the tokens. */ public static final Collection split(OctetString octetString, OctetString delimOctets) { List parts = new LinkedList(); int maxDelim = -1; for (int i = 0; i<delimOctets.length(); i++) { int delim = delimOctets.get(i) & 0xFF; if (delim > maxDelim) { maxDelim = delim; } } int startPos = 0; for (int i = 0; i<octetString.length(); i++) { int c = octetString.value[i] & 0xFF; boolean isDelim = false; if (c <= maxDelim) { for (int j=0; j<delimOctets.length(); j++) { if (c == (delimOctets.get(j) & 0xFF)) { if ((startPos >= 0) && (i > startPos)) { parts.add(new OctetString(octetString.value, startPos, i - startPos)); } startPos = -1; isDelim = true; } } } if (!isDelim && (startPos < 0)) { startPos = i; } } if (startPos >= 0) { parts.add(new OctetString(octetString.value, startPos, octetString.length() - startPos)); } return parts; } /** * Creates an <code>OctetString</code> from an byte array. * @param value * a byte array that is copied into the value of the created * <code>OctetString</code> or <code>null</code>. * @return * an OctetString or <code>null</code> if <code>value</code> * is <code>null</code>. * @since 1.7 */ public static OctetString fromByteArray(byte[] value) { if (value == null) { return null; } return new OctetString(value); } public byte[] toByteArray() { return getValue(); } }
/* * Copyright 2015-2020 OpenCB * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.opencb.opencga.catalog.db.api; import org.apache.commons.collections.map.LinkedMap; import org.apache.commons.lang3.StringUtils; import org.opencb.commons.datastore.core.ObjectMap; import org.opencb.commons.datastore.core.Query; import org.opencb.commons.datastore.core.QueryOptions; import org.opencb.commons.datastore.core.QueryParam; import org.opencb.opencga.catalog.exceptions.CatalogAuthenticationException; import org.opencb.opencga.catalog.exceptions.CatalogAuthorizationException; import org.opencb.opencga.catalog.exceptions.CatalogDBException; import org.opencb.opencga.catalog.exceptions.CatalogParameterException; import org.opencb.opencga.core.models.user.User; import org.opencb.opencga.core.models.user.UserFilter; import org.opencb.opencga.core.response.OpenCGAResult; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import static org.opencb.commons.datastore.core.QueryParam.Type.*; /** * @author Jacobo Coll &lt;jacobo167@gmail.com&gt; */ public interface UserDBAdaptor extends DBAdaptor<User> { /* * User methods */ default boolean exists(String userId) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { return count(new Query(QueryParams.ID.key(), userId)).getNumMatches() > 0; } default void checkId(String userId) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { if (StringUtils.isEmpty(userId)) { throw CatalogDBException.newInstance("User id '{}' is not valid: ", userId); } if (!exists(userId)) { throw CatalogDBException.newInstance("User id '{}' does not exist", userId); } } default void checkIds(List<String> userIds) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException { if (userIds == null || userIds.isEmpty()) { throw CatalogDBException.newInstance("No users to be checked."); } Set<String> userSet = new HashSet<>(userIds); Query query = new Query(QueryParams.ID.key(), userSet); if (count(query).getNumMatches() < userSet.size()) { throw CatalogDBException.newInstance("Some users do not exist."); } } void authenticate(String userId, String password) throws CatalogDBException, CatalogAuthenticationException; OpenCGAResult insert(User user, String password, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException; OpenCGAResult<User> get(String userId, QueryOptions options) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException; // @Deprecated // default OpenCGAResult<User> modifyUser(String userId, ObjectMap parameters) throws CatalogDBException { // return update(userId, parameters); // } OpenCGAResult update(String userId, ObjectMap parameters) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException; // @Deprecated // default OpenCGAResult<User> deleteUser(String userId) throws CatalogDBException { // return delete(userId, false); // } OpenCGAResult delete(String userId, QueryOptions queryOptions) throws CatalogDBException, CatalogParameterException, CatalogAuthorizationException; OpenCGAResult changePassword(String userId, String oldPassword, String newPassword) throws CatalogDBException, CatalogAuthenticationException; OpenCGAResult resetPassword(String userId, String email, String newCryptPass) throws CatalogDBException; // Config operations OpenCGAResult setConfig(String userId, String name, Map<String, Object> config) throws CatalogDBException; OpenCGAResult deleteConfig(String userId, String name) throws CatalogDBException; // Filter operations OpenCGAResult addFilter(String userId, UserFilter filter) throws CatalogDBException; OpenCGAResult updateFilter(String userId, String name, ObjectMap params) throws CatalogDBException; OpenCGAResult deleteFilter(String userId, String name) throws CatalogDBException; enum QueryParams implements QueryParam { ID("id", TEXT_ARRAY, ""), NAME("name", TEXT_ARRAY, ""), EMAIL("email", TEXT_ARRAY, ""), ORGANIZATION("organization", TEXT_ARRAY, ""), INTERNAL_STATUS_NAME("internal.status.name", TEXT, ""), INTERNAL_STATUS_DATE("internal.status.date", TEXT, ""), ACCOUNT("account", TEXT_ARRAY, ""), SIZE("size", INTEGER_ARRAY, ""), QUOTA("quota", INTEGER_ARRAY, ""), ATTRIBUTES("attributes", TEXT, ""), // "Format: <key><operation><stringValue> where <operation> is [<|<=|>|>=|==|!=|~|!~]" NATTRIBUTES("nattributes", DECIMAL, ""), // "Format: <key><operation><numericalValue> where <operation> is [<|<=|>|>=|==|!=|~|!~]" BATTRIBUTES("battributes", BOOLEAN, ""), // "Format: <key><operation><true|false> where <operation> is [==|!=]" PROJECTS("projects", TEXT_ARRAY, ""), PROJECTS_UID("projects.uid", INTEGER_ARRAY, ""), PROJECTS_ID("projects.id", INTEGER_ARRAY, ""), PROJECT_NAME("projects.name", TEXT_ARRAY, ""), PROJECT_ORGANIZATION("projects.organization", TEXT_ARRAY, ""), PROJECT_STATUS("projects.status", TEXT_ARRAY, ""), TOOL_ID("tools.id", INTEGER_ARRAY, ""), TOOL_NAME("tools.name", TEXT_ARRAY, ""), TOOL_ALIAS("tools.alias", TEXT_ARRAY, ""), // TOCHECK: Pedro. Check whether login, logout makes sense. SESSIONS("sessions", TEXT_ARRAY, ""), SESSION_ID("sessions.id", TEXT_ARRAY, ""), SESSION_IP("sessions.ip", TEXT_ARRAY, ""), SESSION_LOGIN("sessions.login", TEXT_ARRAY, ""), SESSION_LOGOUT("sessions.logout", TEXT_ARRAY, ""), CONFIGS("configs", TEXT_ARRAY, ""), FILTERS("filters", TEXT_ARRAY, ""), FILTERS_ID("filters.id", TEXT, ""); private static Map<String, QueryParams> map; static { map = new LinkedMap(); for (QueryParams params : QueryParams.values()) { map.put(params.key(), params); } } private final String key; private Type type; private String description; QueryParams(String key, Type type, String description) { this.key = key; this.type = type; this.description = description; } @Override public String key() { return key; } @Override public Type type() { return type; } @Override public String description() { return description; } public static Map<String, QueryParams> getMap() { return map; } public static QueryParams getParam(String key) { return map.get(key); } } enum FilterParams implements QueryParam { ID("id", TEXT, ""), DESCRIPTION("description", TEXT, ""), RESOURCE("resource", TEXT, ""), QUERY("query", TEXT, ""), OPTIONS("options", TEXT, ""); private static Map<String, FilterParams> map; static { map = new LinkedMap(); for (FilterParams params : FilterParams.values()) { map.put(params.key(), params); } } private final String key; private Type type; private String description; FilterParams(String key, Type type, String description) { this.key = key; this.type = type; this.description = description; } @Override public String key() { return key; } @Override public Type type() { return type; } @Override public String description() { return description; } public static Map<String, FilterParams> getMap() { return map; } public static FilterParams getParam(String key) { return map.get(key); } } enum ToolQueryParams implements QueryParam { ID("id", TEXT, ""), ALIAS("alias", TEXT, ""), NAME("name", TEXT, ""), DESCRIPTION("description", TEXT, ""), MANIFEST("manifest", TEXT, ""), RESULT("result", TEXT, ""), PATH("path", TEXT, ""), ACL_USER_ID("acl.userId", TEXT_ARRAY, ""), ACL_READ("acl.read", BOOLEAN, ""), ACL_WRITE("acl.write", BOOLEAN, ""), ACL_EXECUTE("acl.execute", BOOLEAN, ""), ACL_DELETE("acl.delete", BOOLEAN, ""); private static Map<String, ToolQueryParams> map; static { map = new LinkedMap(); for (ToolQueryParams params : ToolQueryParams.values()) { map.put(params.key(), params); } } private final String key; private Type type; private String description; ToolQueryParams(String key, Type type, String description) { this.key = key; this.type = type; this.description = description; } @Override public String key() { return key; } @Override public Type type() { return type; } @Override public String description() { return description; } public static Map<String, ToolQueryParams> getMap() { return map; } public static ToolQueryParams getParam(String key) { return map.get(key); } } /** * Project methods moved to ProjectDBAdaptor * *************************** */ // OpenCGAResult<Project> createProject(String userId, Project project, QueryOptions options) throws CatalogDBException; // // boolean projectExists(int projectId); // // OpenCGAResult<Project> getAllProjects(String userId, QueryOptions options) throws CatalogDBException; // // OpenCGAResult<Project> getProject(int project, QueryOptions options) throws CatalogDBException; // // OpenCGAResult<Integer> deleteProject(int projectId) throws CatalogDBException; // // OpenCGAResult renameProjectAlias(int projectId, String newProjectName) throws CatalogDBException; // // OpenCGAResult<Project> modifyProject(int projectId, ObjectMap parameters) throws CatalogDBException; // // int getProjectId(String userId, String projectAlias) throws CatalogDBException; // // String getProjectOwnerId(int projectId) throws CatalogDBException; // // OpenCGAResult<AclEntry> getProjectAcl(int projectId, String userId) throws CatalogDBException; // // OpenCGAResult setProjectAcl(int projectId, AclEntry newAcl) throws CatalogDBException; }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.openwire.commands; import io.openwire.utils.ExceptionSupport; import io.openwire.utils.OpenWireMarshallingSupport; import java.io.DataInput; import java.io.DataOutput; import java.io.EOFException; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; import javax.jms.JMSException; import org.fusesource.hawtbuf.Buffer; import org.fusesource.hawtbuf.DataByteArrayInputStream; import org.fusesource.hawtbuf.DataByteArrayOutputStream; /** * openwire:marshaller code="27" */ public class OpenWireStreamMessage extends OpenWireMessage { public static final byte DATA_STRUCTURE_TYPE = CommandTypes.OPENWIRE_STREAM_MESSAGE; @Override public OpenWireStreamMessage copy() { OpenWireStreamMessage copy = new OpenWireStreamMessage(); copy(copy); return copy; } private void copy(OpenWireStreamMessage copy) { storeContent(); super.copy(copy); } @Override public void onSend() throws JMSException { super.onSend(); storeContent(); } @Override public byte getDataStructureType() { return DATA_STRUCTURE_TYPE; } @Override public String getMimeType() { return "jms/stream-message"; } /** * Reads the contents of the StreamMessage instances into a single List<Object> instance * and returns it. The read starts from the current position of the message which implies * that the list might not be a complete view of the message if any prior read operations * were invoked. * * @return a List containing the objects store in this message starting from the current position. * * @throws JMSException if an error occurs while reading the message. */ public List<Object> readStreamToList() throws JMSException { if (!hasContent()) { return Collections.emptyList(); } Buffer payload = getPayload(); DataByteArrayInputStream dataIn = new DataByteArrayInputStream(payload); List<Object> result = new ArrayList<Object>(); while (true) { try { result.add(readNextElement(dataIn)); } catch (EOFException ex) { break; } catch (IOException e) { throw ExceptionSupport.create(e); } } return result; } /** * Given a DataInput instance, attempt to read OpenWireStreamMessage formatted values * and returned the next element. * * @param input * the input stream that contains the marshaled bytes. * * @return the next element encoded in the stream. * * @throws IOException if an error occurs while reading the next element from the stream * @throws EOFException */ protected Object readNextElement(DataInput input) throws IOException { int type = input.readByte(); if (type == -1) { throw new EOFException("Reached end of stream."); } if (type == OpenWireMarshallingSupport.NULL) { return null; } else if (type == OpenWireMarshallingSupport.BIG_STRING_TYPE) { return OpenWireMarshallingSupport.readUTF8(input); } else if (type == OpenWireMarshallingSupport.STRING_TYPE) { return input.readUTF(); } else if (type == OpenWireMarshallingSupport.LONG_TYPE) { return Long.valueOf(input.readLong()); } else if (type == OpenWireMarshallingSupport.INTEGER_TYPE) { return Integer.valueOf(input.readInt()); } else if (type == OpenWireMarshallingSupport.SHORT_TYPE) { return Short.valueOf(input.readShort()); } else if (type == OpenWireMarshallingSupport.BYTE_TYPE) { return Byte.valueOf(input.readByte()); } else if (type == OpenWireMarshallingSupport.FLOAT_TYPE) { return new Float(input.readFloat()); } else if (type == OpenWireMarshallingSupport.DOUBLE_TYPE) { return new Double(input.readDouble()); } else if (type == OpenWireMarshallingSupport.BOOLEAN_TYPE) { return input.readBoolean() ? Boolean.TRUE : Boolean.FALSE; } else if (type == OpenWireMarshallingSupport.CHAR_TYPE) { return Character.valueOf(input.readChar()); } else if (type == OpenWireMarshallingSupport.BYTE_ARRAY_TYPE) { int len = input.readInt(); byte[] value = new byte[len]; input.readFully(value); return value; } else { throw new IOException("unknown type read from encoded stream."); } } /** * Writes the given set of Objects to the messages stream. The elements in the list * must adhere to the supported types of a JMS StreamMessage or an exception will be * thrown. * * @param elements * the list of elements to store into the list. * * @throws JMSException if an error occurs while writing the elements to the message. */ public void writeListToStream(List<Object> elements) throws JMSException { if (elements != null && !elements.isEmpty()) { DataByteArrayOutputStream output = new DataByteArrayOutputStream(); for (Object value : elements) { try { writeElement(value, output); } catch (IOException e) { throw ExceptionSupport.create(e); } } try { output.close(); } catch (IOException e) { throw ExceptionSupport.create(e); } setPayload(output.toBuffer()); } } /** * Encodes the given object into the OpenWire marshaled form and writes it to the * given DataOutput instance. Each element is written with a type identifier to * allow for easy unmarshaling. * * @param value * @param output * @throws IOException */ protected void writeElement(Object value, DataOutput output) throws IOException { if (value == null) { OpenWireMarshallingSupport.marshalNull(output); } else if (value instanceof String) { OpenWireMarshallingSupport.marshalString(output, (String) value); } else if (value instanceof Character) { OpenWireMarshallingSupport.marshalChar(output, (Character) value); } else if (value instanceof Boolean) { OpenWireMarshallingSupport.marshalBoolean(output, (Boolean) value); } else if (value instanceof Byte) { OpenWireMarshallingSupport.marshalByte(output, (Byte) value); } else if (value instanceof Short) { OpenWireMarshallingSupport.marshalShort(output, (Short) value); } else if (value instanceof Integer) { OpenWireMarshallingSupport.marshalInt(output, (Integer) value); } else if (value instanceof Float) { OpenWireMarshallingSupport.marshalFloat(output, (Float) value); } else if (value instanceof Double) { OpenWireMarshallingSupport.marshalDouble(output, (Double) value); } else if (value instanceof byte[]) { OpenWireMarshallingSupport.marshalByteArray(output, (byte[]) value, 0, ((byte[]) value).length); } else if (value instanceof Long) { OpenWireMarshallingSupport.marshalLong(output, (Long) value); } else { throw new IOException("Unsupported Object type: " + value.getClass()); } } @Override public void compress() throws IOException { storeContent(); super.compress(); } @Override public String toString() { return super.toString() + " OpenWireStreamMessage{}"; } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.lang.common.visitor.base; import org.apache.asterix.common.exceptions.CompilationException; import org.apache.asterix.lang.common.clause.GroupbyClause; import org.apache.asterix.lang.common.clause.LetClause; import org.apache.asterix.lang.common.clause.LimitClause; import org.apache.asterix.lang.common.clause.OrderbyClause; import org.apache.asterix.lang.common.clause.UpdateClause; import org.apache.asterix.lang.common.clause.WhereClause; import org.apache.asterix.lang.common.expression.CallExpr; import org.apache.asterix.lang.common.expression.FieldAccessor; import org.apache.asterix.lang.common.expression.IfExpr; import org.apache.asterix.lang.common.expression.IndexAccessor; import org.apache.asterix.lang.common.expression.ListConstructor; import org.apache.asterix.lang.common.expression.ListSliceExpression; import org.apache.asterix.lang.common.expression.LiteralExpr; import org.apache.asterix.lang.common.expression.OperatorExpr; import org.apache.asterix.lang.common.expression.OrderedListTypeDefinition; import org.apache.asterix.lang.common.expression.QuantifiedExpression; import org.apache.asterix.lang.common.expression.RecordConstructor; import org.apache.asterix.lang.common.expression.RecordTypeDefinition; import org.apache.asterix.lang.common.expression.TypeReferenceExpression; import org.apache.asterix.lang.common.expression.UnaryExpr; import org.apache.asterix.lang.common.expression.UnorderedListTypeDefinition; import org.apache.asterix.lang.common.expression.VariableExpr; import org.apache.asterix.lang.common.statement.AdapterDropStatement; import org.apache.asterix.lang.common.statement.CompactStatement; import org.apache.asterix.lang.common.statement.ConnectFeedStatement; import org.apache.asterix.lang.common.statement.CreateAdapterStatement; import org.apache.asterix.lang.common.statement.CreateDataverseStatement; import org.apache.asterix.lang.common.statement.CreateFeedPolicyStatement; import org.apache.asterix.lang.common.statement.CreateFeedStatement; import org.apache.asterix.lang.common.statement.CreateFullTextConfigStatement; import org.apache.asterix.lang.common.statement.CreateFullTextFilterStatement; import org.apache.asterix.lang.common.statement.CreateFunctionStatement; import org.apache.asterix.lang.common.statement.CreateIndexStatement; import org.apache.asterix.lang.common.statement.CreateLibraryStatement; import org.apache.asterix.lang.common.statement.CreateSynonymStatement; import org.apache.asterix.lang.common.statement.DatasetDecl; import org.apache.asterix.lang.common.statement.DataverseDecl; import org.apache.asterix.lang.common.statement.DataverseDropStatement; import org.apache.asterix.lang.common.statement.DeleteStatement; import org.apache.asterix.lang.common.statement.DisconnectFeedStatement; import org.apache.asterix.lang.common.statement.DropDatasetStatement; import org.apache.asterix.lang.common.statement.FeedDropStatement; import org.apache.asterix.lang.common.statement.FeedPolicyDropStatement; import org.apache.asterix.lang.common.statement.FullTextConfigDropStatement; import org.apache.asterix.lang.common.statement.FullTextFilterDropStatement; import org.apache.asterix.lang.common.statement.FunctionDecl; import org.apache.asterix.lang.common.statement.FunctionDropStatement; import org.apache.asterix.lang.common.statement.IndexDropStatement; import org.apache.asterix.lang.common.statement.InsertStatement; import org.apache.asterix.lang.common.statement.LibraryDropStatement; import org.apache.asterix.lang.common.statement.LoadStatement; import org.apache.asterix.lang.common.statement.NodeGroupDropStatement; import org.apache.asterix.lang.common.statement.NodegroupDecl; import org.apache.asterix.lang.common.statement.Query; import org.apache.asterix.lang.common.statement.SetStatement; import org.apache.asterix.lang.common.statement.StartFeedStatement; import org.apache.asterix.lang.common.statement.StopFeedStatement; import org.apache.asterix.lang.common.statement.SynonymDropStatement; import org.apache.asterix.lang.common.statement.TypeDecl; import org.apache.asterix.lang.common.statement.TypeDropStatement; import org.apache.asterix.lang.common.statement.UpdateStatement; import org.apache.asterix.lang.common.statement.WriteStatement; public interface ILangVisitor<R, T> { R visit(Query q, T arg) throws CompilationException; R visit(FunctionDecl fd, T arg) throws CompilationException; R visit(TypeDecl td, T arg) throws CompilationException; R visit(NodegroupDecl ngd, T arg) throws CompilationException; R visit(DatasetDecl dd, T arg) throws CompilationException; R visit(LoadStatement stmtLoad, T arg) throws CompilationException; R visit(DropDatasetStatement del, T arg) throws CompilationException; R visit(InsertStatement insert, T arg) throws CompilationException; R visit(DeleteStatement del, T arg) throws CompilationException; R visit(UpdateStatement update, T arg) throws CompilationException; R visit(UpdateClause del, T arg) throws CompilationException; R visit(TypeReferenceExpression tre, T arg) throws CompilationException; R visit(RecordTypeDefinition tre, T arg) throws CompilationException; R visit(OrderedListTypeDefinition olte, T arg) throws CompilationException; R visit(UnorderedListTypeDefinition ulte, T arg) throws CompilationException; R visit(LiteralExpr l, T arg) throws CompilationException; R visit(VariableExpr v, T arg) throws CompilationException; R visit(ListConstructor lc, T arg) throws CompilationException; R visit(RecordConstructor rc, T arg) throws CompilationException; R visit(OperatorExpr ifbo, T arg) throws CompilationException; R visit(FieldAccessor fa, T arg) throws CompilationException; R visit(IndexAccessor ia, T arg) throws CompilationException; R visit(IfExpr ifexpr, T arg) throws CompilationException; R visit(QuantifiedExpression qe, T arg) throws CompilationException; R visit(LetClause lc, T arg) throws CompilationException; R visit(WhereClause wc, T arg) throws CompilationException; R visit(OrderbyClause oc, T arg) throws CompilationException; R visit(GroupbyClause gc, T arg) throws CompilationException; R visit(LimitClause lc, T arg) throws CompilationException; R visit(UnaryExpr u, T arg) throws CompilationException; R visit(CreateIndexStatement cis, T arg) throws CompilationException; R visit(CreateDataverseStatement del, T arg) throws CompilationException; R visit(CreateFullTextFilterStatement cis, T arg) throws CompilationException; R visit(CreateFullTextConfigStatement cis, T arg) throws CompilationException; R visit(IndexDropStatement del, T arg) throws CompilationException; R visit(FullTextFilterDropStatement del, T arg) throws CompilationException; R visit(FullTextConfigDropStatement del, T arg) throws CompilationException; R visit(NodeGroupDropStatement del, T arg) throws CompilationException; R visit(DataverseDropStatement del, T arg) throws CompilationException; R visit(TypeDropStatement del, T arg) throws CompilationException; R visit(WriteStatement ws, T arg) throws CompilationException; R visit(SetStatement ss, T arg) throws CompilationException; R visit(DisconnectFeedStatement del, T arg) throws CompilationException; R visit(ConnectFeedStatement del, T arg) throws CompilationException; R visit(StartFeedStatement sfs, T arg) throws CompilationException; R visit(StopFeedStatement sfs, T arg) throws CompilationException; R visit(CreateFeedStatement cfs, T arg) throws CompilationException; R visit(FeedDropStatement del, T arg) throws CompilationException; R visit(FeedPolicyDropStatement dfs, T arg) throws CompilationException; R visit(CreateFeedPolicyStatement cfps, T arg) throws CompilationException; R visit(CallExpr pf, T arg) throws CompilationException; R visit(DataverseDecl dv, T arg) throws CompilationException; R visit(CreateFunctionStatement cfs, T arg) throws CompilationException; R visit(FunctionDropStatement del, T arg) throws CompilationException; R visit(CreateAdapterStatement cfs, T arg) throws CompilationException; R visit(AdapterDropStatement del, T arg) throws CompilationException; R visit(CreateLibraryStatement cls, T arg) throws CompilationException; R visit(LibraryDropStatement del, T arg) throws CompilationException; R visit(CreateSynonymStatement css, T arg) throws CompilationException; R visit(SynonymDropStatement del, T arg) throws CompilationException; R visit(CompactStatement del, T arg) throws CompilationException; R visit(ListSliceExpression expression, T arg) throws CompilationException; }
package org.drip.analytics.holset; /* * -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*- */ /* * GENERATED on Wed Jul 04 22:35:36 EDT 2012 ---- DO NOT DELETE */ /*! * Copyright (C) 2012 Lakshmi Krishnamurthy * Copyright (C) 2011 Lakshmi Krishnamurthy * * This file is part of CreditAnalytics, a free-software/open-source library for * fixed income analysts and developers - http://www.credit-trader.org * * CreditAnalytics is a free, full featured, fixed income credit analytics library, developed with a special focus * towards the needs of the bonds and credit products community. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ public class PABHoliday implements org.drip.analytics.holset.LocationHoliday { public PABHoliday() { } public java.lang.String getHolidayLoc() { return "PAB"; } public org.drip.analytics.holiday.Locale getHolidaySet() { org.drip.analytics.holiday.Locale lh = new org.drip.analytics.holiday.Locale(); lh.addStaticHoliday ("01-JAN-1998", "New Years Day"); lh.addStaticHoliday ("12-JAN-1998", "Day of Mourning"); lh.addStaticHoliday ("23-FEB-1998", "Carnival Monday"); lh.addStaticHoliday ("24-FEB-1998", "Carnival Tuesday"); lh.addStaticHoliday ("10-APR-1998", "Good Friday"); lh.addStaticHoliday ("04-MAY-1998", "Labour Day"); lh.addStaticHoliday ("03-NOV-1998", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-1998", "Flag Day"); lh.addStaticHoliday ("09-NOV-1998", "First Cry of Independence Day"); lh.addStaticHoliday ("07-DEC-1998", "Mothers Day"); lh.addStaticHoliday ("25-DEC-1998", "Christmas Day"); lh.addStaticHoliday ("01-JAN-1999", "New Years Day"); lh.addStaticHoliday ("15-FEB-1999", "Carnival Monday"); lh.addStaticHoliday ("16-FEB-1999", "Carnival Tuesday"); lh.addStaticHoliday ("02-APR-1999", "Good Friday"); lh.addStaticHoliday ("03-NOV-1999", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-1999", "Flag Day"); lh.addStaticHoliday ("08-NOV-1999", "First Cry of Independence Day"); lh.addStaticHoliday ("06-DEC-1999", "Mothers Day"); lh.addStaticHoliday ("06-MAR-2000", "Carnival Monday"); lh.addStaticHoliday ("07-MAR-2000", "Carnival Tuesday"); lh.addStaticHoliday ("21-APR-2000", "Good Friday"); lh.addStaticHoliday ("01-MAY-2000", "Labour Day"); lh.addStaticHoliday ("15-AUG-2000", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2000", "Independence from Columbia Day"); lh.addStaticHoliday ("13-NOV-2000", "First Cry of Independence Day"); lh.addStaticHoliday ("27-NOV-2000", "Independence from Spain Day"); lh.addStaticHoliday ("11-DEC-2000", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2000", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2001", "New Years Day"); lh.addStaticHoliday ("08-JAN-2001", "Day of Mourning"); lh.addStaticHoliday ("26-FEB-2001", "Carnival Monday"); lh.addStaticHoliday ("27-FEB-2001", "Carnival Tuesday"); lh.addStaticHoliday ("13-APR-2001", "Good Friday"); lh.addStaticHoliday ("30-APR-2001", "Labour Day"); lh.addStaticHoliday ("15-AUG-2001", "Foundation of Panama City Day"); lh.addStaticHoliday ("26-NOV-2001", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2001", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2002", "New Years Day"); lh.addStaticHoliday ("07-JAN-2002", "Day of Mourning"); lh.addStaticHoliday ("11-FEB-2002", "Carnival Monday"); lh.addStaticHoliday ("12-FEB-2002", "Carnival Tuesday"); lh.addStaticHoliday ("29-MAR-2002", "Good Friday"); lh.addStaticHoliday ("29-APR-2002", "Labour Day"); lh.addStaticHoliday ("15-AUG-2002", "Foundation of Panama City Day"); lh.addStaticHoliday ("04-NOV-2002", "Flag Day"); lh.addStaticHoliday ("02-DEC-2002", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2002", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2003", "New Years Day"); lh.addStaticHoliday ("13-JAN-2003", "Day of Mourning"); lh.addStaticHoliday ("03-MAR-2003", "Carnival Monday"); lh.addStaticHoliday ("04-MAR-2003", "Carnival Tuesday"); lh.addStaticHoliday ("18-APR-2003", "Good Friday"); lh.addStaticHoliday ("05-MAY-2003", "Labour Day"); lh.addStaticHoliday ("15-AUG-2003", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2003", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2003", "Flag Day"); lh.addStaticHoliday ("10-NOV-2003", "First Cry of Independence Day"); lh.addStaticHoliday ("01-DEC-2003", "Independence from Spain Day"); lh.addStaticHoliday ("08-DEC-2003", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2003", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2004", "New Years Day"); lh.addStaticHoliday ("12-JAN-2004", "Day of Mourning"); lh.addStaticHoliday ("23-FEB-2004", "Carnival Monday"); lh.addStaticHoliday ("24-FEB-2004", "Carnival Tuesday"); lh.addStaticHoliday ("09-APR-2004", "Good Friday"); lh.addStaticHoliday ("03-NOV-2004", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2004", "Flag Day"); lh.addStaticHoliday ("08-NOV-2004", "First Cry of Independence Day"); lh.addStaticHoliday ("06-DEC-2004", "Mothers Day"); lh.addStaticHoliday ("07-FEB-2005", "Carnival Monday"); lh.addStaticHoliday ("08-FEB-2005", "Carnival Tuesday"); lh.addStaticHoliday ("25-MAR-2005", "Good Friday"); lh.addStaticHoliday ("15-AUG-2005", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2005", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2005", "Flag Day"); lh.addStaticHoliday ("14-NOV-2005", "First Cry of Independence Day"); lh.addStaticHoliday ("28-NOV-2005", "Independence from Spain Day"); lh.addStaticHoliday ("12-DEC-2005", "Mothers Day"); lh.addStaticHoliday ("09-JAN-2006", "Day of Mourning"); lh.addStaticHoliday ("27-FEB-2006", "Carnival Monday"); lh.addStaticHoliday ("28-FEB-2006", "Carnival Tuesday"); lh.addStaticHoliday ("14-APR-2006", "Good Friday"); lh.addStaticHoliday ("01-MAY-2006", "Labour Day"); lh.addStaticHoliday ("15-AUG-2006", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2006", "Independence from Columbia Day"); lh.addStaticHoliday ("13-NOV-2006", "First Cry of Independence Day"); lh.addStaticHoliday ("27-NOV-2006", "Independence from Spain Day"); lh.addStaticHoliday ("11-DEC-2006", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2006", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2007", "New Years Day"); lh.addStaticHoliday ("08-JAN-2007", "Day of Mourning"); lh.addStaticHoliday ("19-FEB-2007", "Carnival Monday"); lh.addStaticHoliday ("20-FEB-2007", "Carnival Tuesday"); lh.addStaticHoliday ("06-APR-2007", "Good Friday"); lh.addStaticHoliday ("30-APR-2007", "Labour Day"); lh.addStaticHoliday ("15-AUG-2007", "Foundation of Panama City Day"); lh.addStaticHoliday ("26-NOV-2007", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2007", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2008", "New Years Day"); lh.addStaticHoliday ("07-JAN-2008", "Day of Mourning"); lh.addStaticHoliday ("04-FEB-2008", "Carnival Monday"); lh.addStaticHoliday ("05-FEB-2008", "Carnival Tuesday"); lh.addStaticHoliday ("21-MAR-2008", "Good Friday"); lh.addStaticHoliday ("05-MAY-2008", "Labour Day"); lh.addStaticHoliday ("15-AUG-2008", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2008", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2008", "Flag Day"); lh.addStaticHoliday ("10-NOV-2008", "First Cry of Independence Day"); lh.addStaticHoliday ("01-DEC-2008", "Independence from Spain Day"); lh.addStaticHoliday ("08-DEC-2008", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2008", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2009", "New Years Day"); lh.addStaticHoliday ("12-JAN-2009", "Day of Mourning"); lh.addStaticHoliday ("23-FEB-2009", "Carnival Monday"); lh.addStaticHoliday ("24-FEB-2009", "Carnival Tuesday"); lh.addStaticHoliday ("10-APR-2009", "Good Friday"); lh.addStaticHoliday ("04-MAY-2009", "Labour Day"); lh.addStaticHoliday ("03-NOV-2009", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2009", "Flag Day"); lh.addStaticHoliday ("09-NOV-2009", "First Cry of Independence Day"); lh.addStaticHoliday ("07-DEC-2009", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2009", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2010", "New Years Day"); lh.addStaticHoliday ("15-FEB-2010", "Carnival Monday"); lh.addStaticHoliday ("16-FEB-2010", "Carnival Tuesday"); lh.addStaticHoliday ("02-APR-2010", "Good Friday"); lh.addStaticHoliday ("03-NOV-2010", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2010", "Flag Day"); lh.addStaticHoliday ("08-NOV-2010", "First Cry of Independence Day"); lh.addStaticHoliday ("06-DEC-2010", "Mothers Day"); lh.addStaticHoliday ("07-MAR-2011", "Carnival Monday"); lh.addStaticHoliday ("08-MAR-2011", "Carnival Tuesday"); lh.addStaticHoliday ("22-APR-2011", "Good Friday"); lh.addStaticHoliday ("15-AUG-2011", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2011", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2011", "Flag Day"); lh.addStaticHoliday ("14-NOV-2011", "First Cry of Independence Day"); lh.addStaticHoliday ("28-NOV-2011", "Independence from Spain Day"); lh.addStaticHoliday ("12-DEC-2011", "Mothers Day"); lh.addStaticHoliday ("09-JAN-2012", "Day of Mourning"); lh.addStaticHoliday ("20-FEB-2012", "Carnival Monday"); lh.addStaticHoliday ("21-FEB-2012", "Carnival Tuesday"); lh.addStaticHoliday ("06-APR-2012", "Good Friday"); lh.addStaticHoliday ("30-APR-2012", "Labour Day"); lh.addStaticHoliday ("15-AUG-2012", "Foundation of Panama City Day"); lh.addStaticHoliday ("26-NOV-2012", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2012", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2013", "New Years Day"); lh.addStaticHoliday ("07-JAN-2013", "Day of Mourning"); lh.addStaticHoliday ("11-FEB-2013", "Carnival Monday"); lh.addStaticHoliday ("12-FEB-2013", "Carnival Tuesday"); lh.addStaticHoliday ("29-MAR-2013", "Good Friday"); lh.addStaticHoliday ("29-APR-2013", "Labour Day"); lh.addStaticHoliday ("15-AUG-2013", "Foundation of Panama City Day"); lh.addStaticHoliday ("04-NOV-2013", "Flag Day"); lh.addStaticHoliday ("02-DEC-2013", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2013", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2014", "New Years Day"); lh.addStaticHoliday ("13-JAN-2014", "Day of Mourning"); lh.addStaticHoliday ("03-MAR-2014", "Carnival Monday"); lh.addStaticHoliday ("04-MAR-2014", "Carnival Tuesday"); lh.addStaticHoliday ("18-APR-2014", "Good Friday"); lh.addStaticHoliday ("05-MAY-2014", "Labour Day"); lh.addStaticHoliday ("15-AUG-2014", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2014", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2014", "Flag Day"); lh.addStaticHoliday ("10-NOV-2014", "First Cry of Independence Day"); lh.addStaticHoliday ("01-DEC-2014", "Independence from Spain Day"); lh.addStaticHoliday ("08-DEC-2014", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2014", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2015", "New Years Day"); lh.addStaticHoliday ("12-JAN-2015", "Day of Mourning"); lh.addStaticHoliday ("16-FEB-2015", "Carnival Monday"); lh.addStaticHoliday ("17-FEB-2015", "Carnival Tuesday"); lh.addStaticHoliday ("03-APR-2015", "Good Friday"); lh.addStaticHoliday ("04-MAY-2015", "Labour Day"); lh.addStaticHoliday ("03-NOV-2015", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2015", "Flag Day"); lh.addStaticHoliday ("09-NOV-2015", "First Cry of Independence Day"); lh.addStaticHoliday ("07-DEC-2015", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2015", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2016", "New Years Day"); lh.addStaticHoliday ("08-FEB-2016", "Carnival Monday"); lh.addStaticHoliday ("09-FEB-2016", "Carnival Tuesday"); lh.addStaticHoliday ("25-MAR-2016", "Good Friday"); lh.addStaticHoliday ("15-AUG-2016", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2016", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2016", "Flag Day"); lh.addStaticHoliday ("14-NOV-2016", "First Cry of Independence Day"); lh.addStaticHoliday ("28-NOV-2016", "Independence from Spain Day"); lh.addStaticHoliday ("12-DEC-2016", "Mothers Day"); lh.addStaticHoliday ("09-JAN-2017", "Day of Mourning"); lh.addStaticHoliday ("27-FEB-2017", "Carnival Monday"); lh.addStaticHoliday ("28-FEB-2017", "Carnival Tuesday"); lh.addStaticHoliday ("14-APR-2017", "Good Friday"); lh.addStaticHoliday ("01-MAY-2017", "Labour Day"); lh.addStaticHoliday ("15-AUG-2017", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2017", "Independence from Columbia Day"); lh.addStaticHoliday ("13-NOV-2017", "First Cry of Independence Day"); lh.addStaticHoliday ("27-NOV-2017", "Independence from Spain Day"); lh.addStaticHoliday ("11-DEC-2017", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2017", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2018", "New Years Day"); lh.addStaticHoliday ("08-JAN-2018", "Day of Mourning"); lh.addStaticHoliday ("12-FEB-2018", "Carnival Monday"); lh.addStaticHoliday ("13-FEB-2018", "Carnival Tuesday"); lh.addStaticHoliday ("30-MAR-2018", "Good Friday"); lh.addStaticHoliday ("30-APR-2018", "Labour Day"); lh.addStaticHoliday ("15-AUG-2018", "Foundation of Panama City Day"); lh.addStaticHoliday ("26-NOV-2018", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2018", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2019", "New Years Day"); lh.addStaticHoliday ("07-JAN-2019", "Day of Mourning"); lh.addStaticHoliday ("04-MAR-2019", "Carnival Monday"); lh.addStaticHoliday ("05-MAR-2019", "Carnival Tuesday"); lh.addStaticHoliday ("19-APR-2019", "Good Friday"); lh.addStaticHoliday ("29-APR-2019", "Labour Day"); lh.addStaticHoliday ("15-AUG-2019", "Foundation of Panama City Day"); lh.addStaticHoliday ("04-NOV-2019", "Flag Day"); lh.addStaticHoliday ("02-DEC-2019", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2019", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2020", "New Years Day"); lh.addStaticHoliday ("13-JAN-2020", "Day of Mourning"); lh.addStaticHoliday ("24-FEB-2020", "Carnival Monday"); lh.addStaticHoliday ("25-FEB-2020", "Carnival Tuesday"); lh.addStaticHoliday ("10-APR-2020", "Good Friday"); lh.addStaticHoliday ("04-MAY-2020", "Labour Day"); lh.addStaticHoliday ("03-NOV-2020", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2020", "Flag Day"); lh.addStaticHoliday ("09-NOV-2020", "First Cry of Independence Day"); lh.addStaticHoliday ("07-DEC-2020", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2020", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2021", "New Years Day"); lh.addStaticHoliday ("15-FEB-2021", "Carnival Monday"); lh.addStaticHoliday ("16-FEB-2021", "Carnival Tuesday"); lh.addStaticHoliday ("02-APR-2021", "Good Friday"); lh.addStaticHoliday ("03-NOV-2021", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2021", "Flag Day"); lh.addStaticHoliday ("08-NOV-2021", "First Cry of Independence Day"); lh.addStaticHoliday ("06-DEC-2021", "Mothers Day"); lh.addStaticHoliday ("28-FEB-2022", "Carnival Monday"); lh.addStaticHoliday ("01-MAR-2022", "Carnival Tuesday"); lh.addStaticHoliday ("15-APR-2022", "Good Friday"); lh.addStaticHoliday ("15-AUG-2022", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2022", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2022", "Flag Day"); lh.addStaticHoliday ("14-NOV-2022", "First Cry of Independence Day"); lh.addStaticHoliday ("28-NOV-2022", "Independence from Spain Day"); lh.addStaticHoliday ("12-DEC-2022", "Mothers Day"); lh.addStaticHoliday ("09-JAN-2023", "Day of Mourning"); lh.addStaticHoliday ("20-FEB-2023", "Carnival Monday"); lh.addStaticHoliday ("21-FEB-2023", "Carnival Tuesday"); lh.addStaticHoliday ("07-APR-2023", "Good Friday"); lh.addStaticHoliday ("01-MAY-2023", "Labour Day"); lh.addStaticHoliday ("15-AUG-2023", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2023", "Independence from Columbia Day"); lh.addStaticHoliday ("13-NOV-2023", "First Cry of Independence Day"); lh.addStaticHoliday ("27-NOV-2023", "Independence from Spain Day"); lh.addStaticHoliday ("11-DEC-2023", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2023", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2024", "New Years Day"); lh.addStaticHoliday ("08-JAN-2024", "Day of Mourning"); lh.addStaticHoliday ("12-FEB-2024", "Carnival Monday"); lh.addStaticHoliday ("13-FEB-2024", "Carnival Tuesday"); lh.addStaticHoliday ("29-MAR-2024", "Good Friday"); lh.addStaticHoliday ("29-APR-2024", "Labour Day"); lh.addStaticHoliday ("15-AUG-2024", "Foundation of Panama City Day"); lh.addStaticHoliday ("04-NOV-2024", "Flag Day"); lh.addStaticHoliday ("02-DEC-2024", "Independence from Spain Day"); lh.addStaticHoliday ("25-DEC-2024", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2025", "New Years Day"); lh.addStaticHoliday ("13-JAN-2025", "Day of Mourning"); lh.addStaticHoliday ("03-MAR-2025", "Carnival Monday"); lh.addStaticHoliday ("04-MAR-2025", "Carnival Tuesday"); lh.addStaticHoliday ("18-APR-2025", "Good Friday"); lh.addStaticHoliday ("05-MAY-2025", "Labour Day"); lh.addStaticHoliday ("15-AUG-2025", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2025", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2025", "Flag Day"); lh.addStaticHoliday ("10-NOV-2025", "First Cry of Independence Day"); lh.addStaticHoliday ("01-DEC-2025", "Independence from Spain Day"); lh.addStaticHoliday ("08-DEC-2025", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2025", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2026", "New Years Day"); lh.addStaticHoliday ("12-JAN-2026", "Day of Mourning"); lh.addStaticHoliday ("16-FEB-2026", "Carnival Monday"); lh.addStaticHoliday ("17-FEB-2026", "Carnival Tuesday"); lh.addStaticHoliday ("03-APR-2026", "Good Friday"); lh.addStaticHoliday ("04-MAY-2026", "Labour Day"); lh.addStaticHoliday ("03-NOV-2026", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2026", "Flag Day"); lh.addStaticHoliday ("09-NOV-2026", "First Cry of Independence Day"); lh.addStaticHoliday ("07-DEC-2026", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2026", "Christmas Day"); lh.addStaticHoliday ("01-JAN-2027", "New Years Day"); lh.addStaticHoliday ("08-FEB-2027", "Carnival Monday"); lh.addStaticHoliday ("09-FEB-2027", "Carnival Tuesday"); lh.addStaticHoliday ("26-MAR-2027", "Good Friday"); lh.addStaticHoliday ("03-NOV-2027", "Independence from Columbia Day"); lh.addStaticHoliday ("04-NOV-2027", "Flag Day"); lh.addStaticHoliday ("08-NOV-2027", "First Cry of Independence Day"); lh.addStaticHoliday ("06-DEC-2027", "Mothers Day"); lh.addStaticHoliday ("28-FEB-2028", "Carnival Monday"); lh.addStaticHoliday ("29-FEB-2028", "Carnival Tuesday"); lh.addStaticHoliday ("14-APR-2028", "Good Friday"); lh.addStaticHoliday ("01-MAY-2028", "Labour Day"); lh.addStaticHoliday ("15-AUG-2028", "Foundation of Panama City Day"); lh.addStaticHoliday ("03-NOV-2028", "Independence from Columbia Day"); lh.addStaticHoliday ("13-NOV-2028", "First Cry of Independence Day"); lh.addStaticHoliday ("27-NOV-2028", "Independence from Spain Day"); lh.addStaticHoliday ("11-DEC-2028", "Mothers Day"); lh.addStaticHoliday ("25-DEC-2028", "Christmas Day"); lh.addStandardWeekend(); return lh; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package opennlp.tools.postag; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import opennlp.maxent.DataStream; import opennlp.maxent.GISModel; import opennlp.maxent.io.SuffixSensitiveGISModelWriter; import opennlp.model.AbstractModel; import opennlp.model.EventStream; import opennlp.model.SequenceStream; import opennlp.model.TwoPassDataIndexer; import opennlp.perceptron.SimplePerceptronSequenceTrainer; import opennlp.perceptron.SuffixSensitivePerceptronModelWriter; import opennlp.tools.dictionary.Dictionary; import opennlp.tools.ngram.NGramModel; import opennlp.tools.util.ObjectStream; import opennlp.tools.util.StringList; /** * @deprecated Use {@link POSTaggerME#train(String, ObjectStream, opennlp.tools.util.model.ModelType, POSDictionary, Dictionary, int, int)} instead. */ @Deprecated public class POSTaggerTrainer { @Deprecated private static void usage() { System.err.println("Usage: POSTaggerTrainer [-encoding encoding] [-dict dict_file] -model [perceptron,maxnet] training_data model_file_name [cutoff] [iterations]"); System.err.println("This trains a new model on the specified training file and writes the trained model to the model file."); System.err.println("-encoding Specifies the encoding of the training file"); System.err.println("-dict Specifies that a dictionary file should be created for use in distinguising between rare and non-rare words"); System.err.println("-model [perceptron|maxent] Specifies what type of model should be used."); System.exit(1); } /** * * @param samples * @param tagDictionary * @param ngramDictionary * @param cutoff * * @throws IOException its throws if an {@link IOException} is thrown * during IO operations on a temp file which is created during training occur. */ public static POSModel train(String languageCode, ObjectStream<POSSample> samples, POSDictionary tagDictionary, Dictionary ngramDictionary, int cutoff, int iterations) throws IOException { GISModel posModel = opennlp.maxent.GIS.trainModel(iterations, new TwoPassDataIndexer(new POSSampleEventStream(samples, new DefaultPOSContextGenerator(ngramDictionary)), cutoff)); return new POSModel(languageCode, posModel, tagDictionary, ngramDictionary); } /** * Trains a new model. * * @param evc * @param modelFile * @throws IOException */ @Deprecated public static void trainMaxentModel(EventStream evc, File modelFile) throws IOException { AbstractModel model = trainMaxentModel(evc, 100,5); new SuffixSensitiveGISModelWriter(model, modelFile).persist(); } /** * Trains a new model * * @param es * @param iterations * @param cut * @return the new model * @throws IOException */ @Deprecated public static AbstractModel trainMaxentModel(EventStream es, int iterations, int cut) throws IOException { return opennlp.maxent.GIS.trainModel(iterations, new TwoPassDataIndexer(es, cut)); } public static AbstractModel trainPerceptronModel(EventStream es, int iterations, int cut, boolean useAverage) throws IOException { return new opennlp.perceptron.PerceptronTrainer().trainModel(iterations, new TwoPassDataIndexer(es, cut, false), cut, useAverage); } public static AbstractModel trainPerceptronModel(EventStream es, int iterations, int cut) throws IOException { return trainPerceptronModel(es,iterations,cut,true); } public static AbstractModel trainPerceptronSequenceModel(SequenceStream ss, int iterations, int cut, boolean useAverage) throws IOException { return new SimplePerceptronSequenceTrainer().trainModel(iterations, ss, cut,useAverage); } @Deprecated public static void test(AbstractModel model) throws IOException { POSTaggerME tagger = new POSTaggerME(model, (TagDictionary) null); BufferedReader in = new BufferedReader(new InputStreamReader(System.in)); for (String line = in.readLine(); line != null; line = in.readLine()) { System.out.println(tagger.tag(line)); } } @Deprecated public static void main(String[] args) throws IOException { if (args.length == 0){ usage(); } int ai=0; try { String encoding = null; String dict = null; boolean perceptron = false; boolean sequence = false; while (args[ai].startsWith("-")) { if (args[ai].equals("-encoding")) { ai++; if (ai < args.length) { encoding = args[ai++]; } else { usage(); } } else if (args[ai].equals("-dict")) { ai++; if (ai < args.length) { dict = args[ai++]; } else { usage(); } } else if (args[ai].equals("-sequence")) { ai++; sequence = true; } else if (args[ai].equals("-model")) { ai++; if (ai < args.length) { String type = args[ai++]; if (type.equals("perceptron")) { perceptron = true; } else if (type.equals("maxent")) { } else { usage(); } } else { usage(); } } else { System.err.println("Unknown option "+args[ai]); usage(); } } File inFile = new File(args[ai++]); File outFile = new File(args[ai++]); int cutoff = 5; int iterations = 100; if (args.length > ai) { cutoff = Integer.parseInt(args[ai++]); iterations = Integer.parseInt(args[ai++]); } AbstractModel mod; if (dict != null) { buildDictionary(dict, inFile, cutoff); } if (sequence) { POSSampleSequenceStream ss; if (encoding == null) { if (dict == null) { ss = new POSSampleSequenceStream(new WordTagSampleStream( new InputStreamReader(new FileInputStream(inFile)))); } else { POSContextGenerator cg = new DefaultPOSContextGenerator(new Dictionary(new FileInputStream(dict))); ss = new POSSampleSequenceStream(new WordTagSampleStream(( new InputStreamReader(new FileInputStream(inFile)))), cg); } } else { if (dict == null) { ss = new POSSampleSequenceStream(new WordTagSampleStream(( new InputStreamReader(new FileInputStream(inFile), encoding)))); } else { POSContextGenerator cg = new DefaultPOSContextGenerator(new Dictionary(new FileInputStream(dict))); ss = new POSSampleSequenceStream(new WordTagSampleStream(( new InputStreamReader(new FileInputStream(inFile), encoding))), cg); } } mod = new SimplePerceptronSequenceTrainer().trainModel(iterations, ss, cutoff, true); System.out.println("Saving the model as: " + outFile); new SuffixSensitivePerceptronModelWriter(mod, outFile).persist(); } else { POSSampleEventStream es; if (encoding == null) { if (dict == null) { es = new POSSampleEventStream(new WordTagSampleStream(( new InputStreamReader(new FileInputStream(inFile))))); } else { POSContextGenerator cg = new DefaultPOSContextGenerator(new Dictionary(new FileInputStream(dict))); es = new POSSampleEventStream(new WordTagSampleStream(( new InputStreamReader(new FileInputStream(inFile)))), cg); } } else { if (dict == null) { es = new POSSampleEventStream(new WordTagSampleStream(( new InputStreamReader(new FileInputStream(inFile), encoding)))); } else { POSContextGenerator cg = new DefaultPOSContextGenerator(new Dictionary(new FileInputStream(dict))); es = new POSSampleEventStream(new WordTagSampleStream(( new InputStreamReader(new FileInputStream(inFile), encoding))), cg); } } if (perceptron) { mod = trainPerceptronModel(es,iterations, cutoff); System.out.println("Saving the model as: " + outFile); new SuffixSensitivePerceptronModelWriter(mod, outFile).persist(); } else { mod = trainMaxentModel(es, iterations, cutoff); System.out.println("Saving the model as: " + outFile); new SuffixSensitiveGISModelWriter(mod, outFile).persist(); } } } catch (Exception e) { e.printStackTrace(); } } private static void buildDictionary(String dict, File inFile, int cutoff) throws FileNotFoundException, IOException { System.err.println("Building dictionary"); NGramModel ngramModel = new NGramModel(); DataStream data = new opennlp.maxent.PlainTextByLineDataStream(new java.io.FileReader(inFile)); while(data.hasNext()) { String tagStr = (String) data.nextToken(); String[] tt = tagStr.split(" "); String[] words = new String[tt.length]; for (int wi=0;wi<words.length;wi++) { words[wi] = tt[wi].substring(0,tt[wi].lastIndexOf('_')); } ngramModel.add(new StringList(words), 1, 1); } System.out.println("Saving the dictionary"); ngramModel.cutoff(cutoff, Integer.MAX_VALUE); Dictionary dictionary = ngramModel.toDictionary(true); dictionary.serialize(new FileOutputStream(dict)); } }
package com.shiznatix.mediacomrade.android.tools; import android.content.Context; import android.content.res.TypedArray; import android.graphics.Canvas; import android.graphics.Paint; import android.util.AttributeSet; import android.view.View; import android.view.ViewGroup; import com.shiznatix.mediacomrade.android.R; public class FlowLayout extends ViewGroup { public static final int HORIZONTAL = 0; public static final int VERTICAL = 1; private int horizontalSpacing = 0; private int verticalSpacing = 0; private int orientation = 0; private boolean debugDraw = false; public FlowLayout(Context context) { super(context); this.readStyleParameters(context, null); } public FlowLayout(Context context, AttributeSet attributeSet) { super(context, attributeSet); this.readStyleParameters(context, attributeSet); } public FlowLayout(Context context, AttributeSet attributeSet, int defStyle) { super(context, attributeSet, defStyle); this.readStyleParameters(context, attributeSet); } @Override protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) { int sizeWidth = MeasureSpec.getSize(widthMeasureSpec) - this.getPaddingRight() - this.getPaddingLeft(); int sizeHeight = MeasureSpec.getSize(heightMeasureSpec) - this.getPaddingRight() - this.getPaddingLeft(); int modeWidth = MeasureSpec.getMode(widthMeasureSpec); int modeHeight = MeasureSpec.getMode(heightMeasureSpec); int size; int mode; if (orientation == HORIZONTAL) { size = sizeWidth; mode = modeWidth; } else { size = sizeHeight; mode = modeHeight; } int lineThicknessWithSpacing = 0; int lineThickness = 0; int lineLengthWithSpacing = 0; int lineLength; int prevLinePosition = 0; int controlMaxLength = 0; int controlMaxThickness = 0; final int count = getChildCount(); for (int i = 0; i < count; i++) { final View child = getChildAt(i); if (child.getVisibility() == GONE) { continue; } child.measure( MeasureSpec.makeMeasureSpec(sizeWidth, modeWidth == MeasureSpec.EXACTLY ? MeasureSpec.AT_MOST : modeWidth), MeasureSpec.makeMeasureSpec(sizeHeight, modeHeight == MeasureSpec.EXACTLY ? MeasureSpec.AT_MOST : modeHeight) ); LayoutParams lp = (LayoutParams) child.getLayoutParams(); int hSpacing = this.getHorizontalSpacing(lp); int vSpacing = this.getVerticalSpacing(lp); int childWidth = child.getMeasuredWidth(); int childHeight = child.getMeasuredHeight(); int childLength; int childThickness; int spacingLength; int spacingThickness; if (orientation == HORIZONTAL) { childLength = childWidth; childThickness = childHeight; spacingLength = hSpacing; spacingThickness = vSpacing; } else { childLength = childHeight; childThickness = childWidth; spacingLength = vSpacing; spacingThickness = hSpacing; } lineLength = lineLengthWithSpacing + childLength; lineLengthWithSpacing = lineLength + spacingLength; boolean newLine = lp.newLine || (mode != MeasureSpec.UNSPECIFIED && lineLength > size); if (newLine) { prevLinePosition = prevLinePosition + lineThicknessWithSpacing; lineThickness = childThickness; lineLength = childLength; lineThicknessWithSpacing = childThickness + spacingThickness; lineLengthWithSpacing = lineLength + spacingLength; } lineThicknessWithSpacing = Math.max(lineThicknessWithSpacing, childThickness + spacingThickness); lineThickness = Math.max(lineThickness, childThickness); int posX; int posY; if (orientation == HORIZONTAL) { posX = getPaddingLeft() + lineLength - childLength; posY = getPaddingTop() + prevLinePosition; } else { posX = getPaddingLeft() + prevLinePosition; posY = getPaddingTop() + lineLength - childHeight; } lp.setPosition(posX, posY); controlMaxLength = Math.max(controlMaxLength, lineLength); controlMaxThickness = prevLinePosition + lineThickness; } /* need to take paddings into account */ if (orientation == HORIZONTAL) { controlMaxLength += getPaddingLeft() + getPaddingRight(); controlMaxThickness += getPaddingBottom() + getPaddingTop(); } else { controlMaxLength += getPaddingBottom() + getPaddingTop(); controlMaxThickness += getPaddingLeft() + getPaddingRight(); } if (orientation == HORIZONTAL) { this.setMeasuredDimension(resolveSize(controlMaxLength, widthMeasureSpec), resolveSize(controlMaxThickness, heightMeasureSpec)); } else { this.setMeasuredDimension(resolveSize(controlMaxThickness, widthMeasureSpec), resolveSize(controlMaxLength, heightMeasureSpec)); } } private int getVerticalSpacing(LayoutParams lp) { int vSpacing; if (lp.verticalSpacingSpecified()) { vSpacing = lp.verticalSpacing; } else { vSpacing = this.verticalSpacing; } return vSpacing; } private int getHorizontalSpacing(LayoutParams lp) { int hSpacing; if (lp.horizontalSpacingSpecified()) { hSpacing = lp.horizontalSpacing; } else { hSpacing = this.horizontalSpacing; } return hSpacing; } @Override protected void onLayout(boolean changed, int l, int t, int r, int b) { final int count = getChildCount(); for (int i = 0; i < count; i++) { View child = getChildAt(i); LayoutParams lp = (LayoutParams) child.getLayoutParams(); child.layout(lp.x, lp.y, lp.x + child.getMeasuredWidth(), lp.y + child.getMeasuredHeight()); } } @Override protected boolean drawChild(Canvas canvas, View child, long drawingTime) { boolean more = super.drawChild(canvas, child, drawingTime); this.drawDebugInfo(canvas, child); return more; } @Override protected boolean checkLayoutParams(ViewGroup.LayoutParams p) { return p instanceof LayoutParams; } @Override protected LayoutParams generateDefaultLayoutParams() { return new LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); } @Override public LayoutParams generateLayoutParams(AttributeSet attributeSet) { return new LayoutParams(getContext(), attributeSet); } @Override protected LayoutParams generateLayoutParams(ViewGroup.LayoutParams p) { return new LayoutParams(p); } private void readStyleParameters(Context context, AttributeSet attributeSet) { TypedArray a = context.obtainStyledAttributes(attributeSet, R.styleable.FlowLayout); try { horizontalSpacing = a.getDimensionPixelSize(R.styleable.FlowLayout_horizontalSpacing, 0); verticalSpacing = a.getDimensionPixelSize(R.styleable.FlowLayout_verticalSpacing, 0); orientation = a.getInteger(R.styleable.FlowLayout_orientation, HORIZONTAL); debugDraw = a.getBoolean(R.styleable.FlowLayout_debugDraw, false); } finally { a.recycle(); } } private void drawDebugInfo(Canvas canvas, View child) { if (!debugDraw) { return; } Paint childPaint = this.createPaint(0xffffff00); Paint layoutPaint = this.createPaint(0xff00ff00); Paint newLinePaint = this.createPaint(0xffff0000); LayoutParams lp = (LayoutParams) child.getLayoutParams(); if (lp.horizontalSpacing > 0) { float x = child.getRight(); float y = child.getTop() + child.getHeight() / 2.0f; canvas.drawLine(x, y, x + lp.horizontalSpacing, y, childPaint); canvas.drawLine(x + lp.horizontalSpacing - 4.0f, y - 4.0f, x + lp.horizontalSpacing, y, childPaint); canvas.drawLine(x + lp.horizontalSpacing - 4.0f, y + 4.0f, x + lp.horizontalSpacing, y, childPaint); } else if (this.horizontalSpacing > 0) { float x = child.getRight(); float y = child.getTop() + child.getHeight() / 2.0f; canvas.drawLine(x, y, x + this.horizontalSpacing, y, layoutPaint); canvas.drawLine(x + this.horizontalSpacing - 4.0f, y - 4.0f, x + this.horizontalSpacing, y, layoutPaint); canvas.drawLine(x + this.horizontalSpacing - 4.0f, y + 4.0f, x + this.horizontalSpacing, y, layoutPaint); } if (lp.verticalSpacing > 0) { float x = child.getLeft() + child.getWidth() / 2.0f; float y = child.getBottom(); canvas.drawLine(x, y, x, y + lp.verticalSpacing, childPaint); canvas.drawLine(x - 4.0f, y + lp.verticalSpacing - 4.0f, x, y + lp.verticalSpacing, childPaint); canvas.drawLine(x + 4.0f, y + lp.verticalSpacing - 4.0f, x, y + lp.verticalSpacing, childPaint); } else if (this.verticalSpacing > 0) { float x = child.getLeft() + child.getWidth() / 2.0f; float y = child.getBottom(); canvas.drawLine(x, y, x, y + this.verticalSpacing, layoutPaint); canvas.drawLine(x - 4.0f, y + this.verticalSpacing - 4.0f, x, y + this.verticalSpacing, layoutPaint); canvas.drawLine(x + 4.0f, y + this.verticalSpacing - 4.0f, x, y + this.verticalSpacing, layoutPaint); } if (lp.newLine) { if (orientation == HORIZONTAL) { float x = child.getLeft(); float y = child.getTop() + child.getHeight() / 2.0f; canvas.drawLine(x, y - 6.0f, x, y + 6.0f, newLinePaint); } else { float x = child.getLeft() + child.getWidth() / 2.0f; float y = child.getTop(); canvas.drawLine(x - 6.0f, y, x + 6.0f, y, newLinePaint); } } } private Paint createPaint(int color) { Paint paint = new Paint(); paint.setAntiAlias(true); paint.setColor(color); paint.setStrokeWidth(2.0f); return paint; } public static class LayoutParams extends ViewGroup.LayoutParams { private static int NO_SPACING = -1; private int x; private int y; private int horizontalSpacing = NO_SPACING; private int verticalSpacing = NO_SPACING; private boolean newLine = false; public LayoutParams(Context context, AttributeSet attributeSet) { super(context, attributeSet); this.readStyleParameters(context, attributeSet); } public LayoutParams(int width, int height) { super(width, height); } public LayoutParams(ViewGroup.LayoutParams layoutParams) { super(layoutParams); } public boolean horizontalSpacingSpecified() { return horizontalSpacing != NO_SPACING; } public boolean verticalSpacingSpecified() { return verticalSpacing != NO_SPACING; } public void setPosition(int x, int y) { this.x = x; this.y = y; } private void readStyleParameters(Context context, AttributeSet attributeSet) { TypedArray a = context.obtainStyledAttributes(attributeSet, R.styleable.FlowLayout_LayoutParams); try { horizontalSpacing = a.getDimensionPixelSize(R.styleable.FlowLayout_LayoutParams_layout_horizontalSpacing, NO_SPACING); verticalSpacing = a.getDimensionPixelSize(R.styleable.FlowLayout_LayoutParams_layout_verticalSpacing, NO_SPACING); newLine = a.getBoolean(R.styleable.FlowLayout_LayoutParams_layout_newLine, false); } finally { a.recycle(); } } } }
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT License. // Code generated by Microsoft (R) AutoRest Code Generator. package com.azure.resourcemanager.batch.models; import com.azure.core.http.rest.Response; import com.azure.core.management.Region; import com.azure.core.util.Context; import com.azure.resourcemanager.batch.fluent.models.BatchAccountInner; import java.util.List; import java.util.Map; /** An immutable client-side representation of BatchAccount. */ public interface BatchAccount { /** * Gets the id property: Fully qualified resource Id for the resource. * * @return the id value. */ String id(); /** * Gets the name property: The name of the resource. * * @return the name value. */ String name(); /** * Gets the type property: The type of the resource. * * @return the type value. */ String type(); /** * Gets the location property: The geo-location where the resource lives. * * @return the location value. */ String location(); /** * Gets the tags property: Resource tags. * * @return the tags value. */ Map<String, String> tags(); /** * Gets the identity property: The identity of the Batch account. * * @return the identity value. */ BatchAccountIdentity identity(); /** * Gets the accountEndpoint property: The account endpoint used to interact with the Batch service. * * @return the accountEndpoint value. */ String accountEndpoint(); /** * Gets the provisioningState property: The provisioned state of the resource. * * @return the provisioningState value. */ ProvisioningState provisioningState(); /** * Gets the poolAllocationMode property: The allocation mode for creating pools in the Batch account. * * @return the poolAllocationMode value. */ PoolAllocationMode poolAllocationMode(); /** * Gets the keyVaultReference property: Identifies the Azure key vault associated with a Batch account. * * @return the keyVaultReference value. */ KeyVaultReference keyVaultReference(); /** * Gets the publicNetworkAccess property: If not specified, the default value is 'enabled'. * * @return the publicNetworkAccess value. */ PublicNetworkAccessType publicNetworkAccess(); /** * Gets the privateEndpointConnections property: List of private endpoint connections associated with the Batch * account. * * @return the privateEndpointConnections value. */ List<PrivateEndpointConnection> privateEndpointConnections(); /** * Gets the autoStorage property: Contains information about the auto-storage account associated with a Batch * account. * * @return the autoStorage value. */ AutoStorageProperties autoStorage(); /** * Gets the encryption property: Configures how customer data is encrypted inside the Batch account. By default, * accounts are encrypted using a Microsoft managed key. For additional control, a customer-managed key can be used * instead. * * @return the encryption value. */ EncryptionProperties encryption(); /** * Gets the dedicatedCoreQuota property: The dedicated core quota for the Batch account. For accounts with * PoolAllocationMode set to UserSubscription, quota is managed on the subscription so this value is not returned. * * @return the dedicatedCoreQuota value. */ Integer dedicatedCoreQuota(); /** * Gets the lowPriorityCoreQuota property: The low-priority core quota for the Batch account. For accounts with * PoolAllocationMode set to UserSubscription, quota is managed on the subscription so this value is not returned. * * @return the lowPriorityCoreQuota value. */ Integer lowPriorityCoreQuota(); /** * Gets the dedicatedCoreQuotaPerVMFamily property: A list of the dedicated core quota per Virtual Machine family * for the Batch account. For accounts with PoolAllocationMode set to UserSubscription, quota is managed on the * subscription so this value is not returned. * * @return the dedicatedCoreQuotaPerVMFamily value. */ List<VirtualMachineFamilyCoreQuota> dedicatedCoreQuotaPerVMFamily(); /** * Gets the dedicatedCoreQuotaPerVMFamilyEnforced property: A value indicating whether core quotas per Virtual * Machine family are enforced for this account Batch is transitioning its core quota system for dedicated cores to * be enforced per Virtual Machine family. During this transitional phase, the dedicated core quota per Virtual * Machine family may not yet be enforced. If this flag is false, dedicated core quota is enforced via the old * dedicatedCoreQuota property on the account and does not consider Virtual Machine family. If this flag is true, * dedicated core quota is enforced via the dedicatedCoreQuotaPerVMFamily property on the account, and the old * dedicatedCoreQuota does not apply. * * @return the dedicatedCoreQuotaPerVMFamilyEnforced value. */ Boolean dedicatedCoreQuotaPerVMFamilyEnforced(); /** * Gets the poolQuota property: The pool quota for the Batch account. * * @return the poolQuota value. */ Integer poolQuota(); /** * Gets the activeJobAndJobScheduleQuota property: The active job and job schedule quota for the Batch account. * * @return the activeJobAndJobScheduleQuota value. */ Integer activeJobAndJobScheduleQuota(); /** * Gets the allowedAuthenticationModes property: List of allowed authentication modes for the Batch account that can * be used to authenticate with the data plane. This does not affect authentication with the control plane. * * @return the allowedAuthenticationModes value. */ List<AuthenticationMode> allowedAuthenticationModes(); /** * Gets the region of the resource. * * @return the region of the resource. */ Region region(); /** * Gets the name of the resource region. * * @return the name of the resource region. */ String regionName(); /** * Gets the inner com.azure.resourcemanager.batch.fluent.models.BatchAccountInner object. * * @return the inner object. */ BatchAccountInner innerModel(); /** The entirety of the BatchAccount definition. */ interface Definition extends DefinitionStages.Blank, DefinitionStages.WithLocation, DefinitionStages.WithResourceGroup, DefinitionStages.WithCreate { } /** The BatchAccount definition stages. */ interface DefinitionStages { /** The first stage of the BatchAccount definition. */ interface Blank extends WithLocation { } /** The stage of the BatchAccount definition allowing to specify location. */ interface WithLocation { /** * Specifies the region for the resource. * * @param location The region in which to create the account. * @return the next definition stage. */ WithResourceGroup withRegion(Region location); /** * Specifies the region for the resource. * * @param location The region in which to create the account. * @return the next definition stage. */ WithResourceGroup withRegion(String location); } /** The stage of the BatchAccount definition allowing to specify parent resource. */ interface WithResourceGroup { /** * Specifies resourceGroupName. * * @param resourceGroupName The name of the resource group that contains the Batch account. * @return the next definition stage. */ WithCreate withExistingResourceGroup(String resourceGroupName); } /** * The stage of the BatchAccount definition which contains all the minimum required properties for the resource * to be created, but also allows for any other optional properties to be specified. */ interface WithCreate extends DefinitionStages.WithTags, DefinitionStages.WithIdentity, DefinitionStages.WithAutoStorage, DefinitionStages.WithPoolAllocationMode, DefinitionStages.WithKeyVaultReference, DefinitionStages.WithPublicNetworkAccess, DefinitionStages.WithEncryption, DefinitionStages.WithAllowedAuthenticationModes { /** * Executes the create request. * * @return the created resource. */ BatchAccount create(); /** * Executes the create request. * * @param context The context to associate with this operation. * @return the created resource. */ BatchAccount create(Context context); } /** The stage of the BatchAccount definition allowing to specify tags. */ interface WithTags { /** * Specifies the tags property: The user-specified tags associated with the account.. * * @param tags The user-specified tags associated with the account. * @return the next definition stage. */ WithCreate withTags(Map<String, String> tags); } /** The stage of the BatchAccount definition allowing to specify identity. */ interface WithIdentity { /** * Specifies the identity property: The identity of the Batch account.. * * @param identity The identity of the Batch account. * @return the next definition stage. */ WithCreate withIdentity(BatchAccountIdentity identity); } /** The stage of the BatchAccount definition allowing to specify autoStorage. */ interface WithAutoStorage { /** * Specifies the autoStorage property: The properties related to the auto-storage account.. * * @param autoStorage The properties related to the auto-storage account. * @return the next definition stage. */ WithCreate withAutoStorage(AutoStorageBaseProperties autoStorage); } /** The stage of the BatchAccount definition allowing to specify poolAllocationMode. */ interface WithPoolAllocationMode { /** * Specifies the poolAllocationMode property: The pool allocation mode also affects how clients may * authenticate to the Batch Service API. If the mode is BatchService, clients may authenticate using access * keys or Azure Active Directory. If the mode is UserSubscription, clients must use Azure Active Directory. * The default is BatchService.. * * @param poolAllocationMode The pool allocation mode also affects how clients may authenticate to the Batch * Service API. If the mode is BatchService, clients may authenticate using access keys or Azure Active * Directory. If the mode is UserSubscription, clients must use Azure Active Directory. The default is * BatchService. * @return the next definition stage. */ WithCreate withPoolAllocationMode(PoolAllocationMode poolAllocationMode); } /** The stage of the BatchAccount definition allowing to specify keyVaultReference. */ interface WithKeyVaultReference { /** * Specifies the keyVaultReference property: A reference to the Azure key vault associated with the Batch * account.. * * @param keyVaultReference A reference to the Azure key vault associated with the Batch account. * @return the next definition stage. */ WithCreate withKeyVaultReference(KeyVaultReference keyVaultReference); } /** The stage of the BatchAccount definition allowing to specify publicNetworkAccess. */ interface WithPublicNetworkAccess { /** * Specifies the publicNetworkAccess property: If not specified, the default value is 'enabled'.. * * @param publicNetworkAccess If not specified, the default value is 'enabled'. * @return the next definition stage. */ WithCreate withPublicNetworkAccess(PublicNetworkAccessType publicNetworkAccess); } /** The stage of the BatchAccount definition allowing to specify encryption. */ interface WithEncryption { /** * Specifies the encryption property: Configures how customer data is encrypted inside the Batch account. By * default, accounts are encrypted using a Microsoft managed key. For additional control, a customer-managed * key can be used instead.. * * @param encryption Configures how customer data is encrypted inside the Batch account. By default, * accounts are encrypted using a Microsoft managed key. For additional control, a customer-managed key * can be used instead. * @return the next definition stage. */ WithCreate withEncryption(EncryptionProperties encryption); } /** The stage of the BatchAccount definition allowing to specify allowedAuthenticationModes. */ interface WithAllowedAuthenticationModes { /** * Specifies the allowedAuthenticationModes property: List of allowed authentication modes for the Batch * account that can be used to authenticate with the data plane. This does not affect authentication with * the control plane.. * * @param allowedAuthenticationModes List of allowed authentication modes for the Batch account that can be * used to authenticate with the data plane. This does not affect authentication with the control plane. * @return the next definition stage. */ WithCreate withAllowedAuthenticationModes(List<AuthenticationMode> allowedAuthenticationModes); } } /** * Begins update for the BatchAccount resource. * * @return the stage of resource update. */ BatchAccount.Update update(); /** The template for BatchAccount update. */ interface Update extends UpdateStages.WithTags, UpdateStages.WithIdentity, UpdateStages.WithAutoStorage, UpdateStages.WithEncryption, UpdateStages.WithAllowedAuthenticationModes { /** * Executes the update request. * * @return the updated resource. */ BatchAccount apply(); /** * Executes the update request. * * @param context The context to associate with this operation. * @return the updated resource. */ BatchAccount apply(Context context); } /** The BatchAccount update stages. */ interface UpdateStages { /** The stage of the BatchAccount update allowing to specify tags. */ interface WithTags { /** * Specifies the tags property: The user-specified tags associated with the account.. * * @param tags The user-specified tags associated with the account. * @return the next definition stage. */ Update withTags(Map<String, String> tags); } /** The stage of the BatchAccount update allowing to specify identity. */ interface WithIdentity { /** * Specifies the identity property: The identity of the Batch account.. * * @param identity The identity of the Batch account. * @return the next definition stage. */ Update withIdentity(BatchAccountIdentity identity); } /** The stage of the BatchAccount update allowing to specify autoStorage. */ interface WithAutoStorage { /** * Specifies the autoStorage property: The properties related to the auto-storage account.. * * @param autoStorage The properties related to the auto-storage account. * @return the next definition stage. */ Update withAutoStorage(AutoStorageBaseProperties autoStorage); } /** The stage of the BatchAccount update allowing to specify encryption. */ interface WithEncryption { /** * Specifies the encryption property: Configures how customer data is encrypted inside the Batch account. By * default, accounts are encrypted using a Microsoft managed key. For additional control, a customer-managed * key can be used instead.. * * @param encryption Configures how customer data is encrypted inside the Batch account. By default, * accounts are encrypted using a Microsoft managed key. For additional control, a customer-managed key * can be used instead. * @return the next definition stage. */ Update withEncryption(EncryptionProperties encryption); } /** The stage of the BatchAccount update allowing to specify allowedAuthenticationModes. */ interface WithAllowedAuthenticationModes { /** * Specifies the allowedAuthenticationModes property: List of allowed authentication modes for the Batch * account that can be used to authenticate with the data plane. This does not affect authentication with * the control plane.. * * @param allowedAuthenticationModes List of allowed authentication modes for the Batch account that can be * used to authenticate with the data plane. This does not affect authentication with the control plane. * @return the next definition stage. */ Update withAllowedAuthenticationModes(List<AuthenticationMode> allowedAuthenticationModes); } } /** * Refreshes the resource to sync with Azure. * * @return the refreshed resource. */ BatchAccount refresh(); /** * Refreshes the resource to sync with Azure. * * @param context The context to associate with this operation. * @return the refreshed resource. */ BatchAccount refresh(Context context); /** * Synchronizes access keys for the auto-storage account configured for the specified Batch account, only if storage * key authentication is being used. * * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. */ void synchronizeAutoStorageKeys(); /** * Synchronizes access keys for the auto-storage account configured for the specified Batch account, only if storage * key authentication is being used. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return the response. */ Response<Void> synchronizeAutoStorageKeysWithResponse(Context context); /** * This operation applies only to Batch accounts with allowedAuthenticationModes containing 'SharedKey'. If the * Batch account doesn't contain 'SharedKey' in its allowedAuthenticationMode, clients cannot use shared keys to * authenticate, and must use another allowedAuthenticationModes instead. In this case, regenerating the keys will * fail. * * @param parameters The type of key to regenerate. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a set of Azure Batch account keys. */ BatchAccountKeys regenerateKey(BatchAccountRegenerateKeyParameters parameters); /** * This operation applies only to Batch accounts with allowedAuthenticationModes containing 'SharedKey'. If the * Batch account doesn't contain 'SharedKey' in its allowedAuthenticationMode, clients cannot use shared keys to * authenticate, and must use another allowedAuthenticationModes instead. In this case, regenerating the keys will * fail. * * @param parameters The type of key to regenerate. * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a set of Azure Batch account keys. */ Response<BatchAccountKeys> regenerateKeyWithResponse( BatchAccountRegenerateKeyParameters parameters, Context context); /** * This operation applies only to Batch accounts with allowedAuthenticationModes containing 'SharedKey'. If the * Batch account doesn't contain 'SharedKey' in its allowedAuthenticationMode, clients cannot use shared keys to * authenticate, and must use another allowedAuthenticationModes instead. In this case, getting the keys will fail. * * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a set of Azure Batch account keys. */ BatchAccountKeys getKeys(); /** * This operation applies only to Batch accounts with allowedAuthenticationModes containing 'SharedKey'. If the * Batch account doesn't contain 'SharedKey' in its allowedAuthenticationMode, clients cannot use shared keys to * authenticate, and must use another allowedAuthenticationModes instead. In this case, getting the keys will fail. * * @param context The context to associate with this operation. * @throws IllegalArgumentException thrown if parameters fail the validation. * @throws com.azure.core.management.exception.ManagementException thrown if the request is rejected by server. * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent. * @return a set of Azure Batch account keys. */ Response<BatchAccountKeys> getKeysWithResponse(Context context); }
package com.vk.api.sdk.client; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.vk.api.sdk.actions.Account; import com.vk.api.sdk.actions.Ads; import com.vk.api.sdk.actions.AppWidgets; import com.vk.api.sdk.actions.Apps; import com.vk.api.sdk.actions.Auth; import com.vk.api.sdk.actions.Board; import com.vk.api.sdk.actions.Database; import com.vk.api.sdk.actions.Docs; import com.vk.api.sdk.actions.Execute; import com.vk.api.sdk.actions.Fave; import com.vk.api.sdk.actions.Friends; import com.vk.api.sdk.actions.Gifts; import com.vk.api.sdk.actions.Groups; import com.vk.api.sdk.actions.GroupsLongPoll; import com.vk.api.sdk.actions.LeadForms; import com.vk.api.sdk.actions.Likes; import com.vk.api.sdk.actions.LongPoll; import com.vk.api.sdk.actions.Market; import com.vk.api.sdk.actions.Messages; import com.vk.api.sdk.actions.Newsfeed; import com.vk.api.sdk.actions.Notes; import com.vk.api.sdk.actions.Notifications; import com.vk.api.sdk.actions.OAuth; import com.vk.api.sdk.actions.Orders; import com.vk.api.sdk.actions.Pages; import com.vk.api.sdk.actions.Photos; import com.vk.api.sdk.actions.Polls; import com.vk.api.sdk.actions.Search; import com.vk.api.sdk.actions.Secure; import com.vk.api.sdk.actions.Stats; import com.vk.api.sdk.actions.Status; import com.vk.api.sdk.actions.Storage; import com.vk.api.sdk.actions.Store; import com.vk.api.sdk.actions.Stories; import com.vk.api.sdk.actions.Streaming; import com.vk.api.sdk.actions.Upload; import com.vk.api.sdk.actions.Users; import com.vk.api.sdk.actions.Utils; import com.vk.api.sdk.actions.Videos; import com.vk.api.sdk.actions.Wall; import com.vk.api.sdk.actions.Widgets; import org.apache.commons.lang3.StringUtils; public class VkApiClient { private static final String API_ADDRESS = "https://api.vk.com/method/"; private static final String OAUTH_ENDPOINT = "https://oauth.vk.com/"; private static final int DEFAULT_RETRY_ATTEMPTS_INTERNAL_SERVER_ERROR_COUNT = 3; private String apiVersion = "5.131"; private TransportClient transportClient; private Gson gson; private String apiEndpoint; private String oauthEndpoint; private int retryAttemptsInternalServerErrorCount; public VkApiClient(TransportClient transportClient) { this(transportClient, new GsonBuilder().disableHtmlEscaping().create(), DEFAULT_RETRY_ATTEMPTS_INTERNAL_SERVER_ERROR_COUNT); } public VkApiClient(TransportClient transportClient, Gson gson, int retryAttemptsInternalServerErrorCount) { this.transportClient = transportClient; this.gson = gson; this.retryAttemptsInternalServerErrorCount = retryAttemptsInternalServerErrorCount; if (StringUtils.isNoneEmpty(System.getProperty("api.host"))) { apiEndpoint = "https://" + System.getProperty("api.host") + "/method/"; } else { apiEndpoint = API_ADDRESS; } if (StringUtils.isNoneEmpty(System.getProperty("oauth.host"))) { oauthEndpoint = "https://" + System.getProperty("oauth.host") + "/"; } else { oauthEndpoint = OAUTH_ENDPOINT; } } public TransportClient getTransportClient() { return transportClient; } public Gson getGson() { return gson; } public int getRetryAttemptsInternalServerErrorCount() { return retryAttemptsInternalServerErrorCount; } public String getApiEndpoint() { return apiEndpoint; } public String getOAuthEndpoint() { return oauthEndpoint; } public String getVersion() { return apiVersion; } public void setVersion(String version) { this.apiVersion = version; } public Account account() { return new Account(this); } public Orders orders() { return new Orders(this); } public Wall wall() { return new Wall(this); } public LeadForms leadForms() { return new LeadForms(this); } public Execute execute() { return new Execute(this); } public Friends friends() { return new Friends(this); } public LongPoll longPoll() { return new LongPoll(this); } public Store store() { return new Store(this); } public Upload upload() { return new Upload(this); } public Users users() { return new Users(this); } public Apps apps() { return new Apps(this); } public Videos videos() { return new Videos(this); } public Photos photos() { return new Photos(this); } public Ads ads() { return new Ads(this); } public Board board() { return new Board(this); } public Widgets widgets() { return new Widgets(this); } public GroupsLongPoll groupsLongPoll() { return new GroupsLongPoll(this); } public Groups groups() { return new Groups(this); } public Auth auth() { return new Auth(this); } public Newsfeed newsfeed() { return new Newsfeed(this); } public Search search() { return new Search(this); } public AppWidgets appWidgets() { return new AppWidgets(this); } public Database database() { return new Database(this); } public Stories stories() { return new Stories(this); } public Notes notes() { return new Notes(this); } public Status status() { return new Status(this); } public Streaming streaming() { return new Streaming(this); } public Messages messages() { return new Messages(this); } public Fave fave() { return new Fave(this); } public Market market() { return new Market(this); } public OAuth oAuth() { return new OAuth(this); } public Polls polls() { return new Polls(this); } public Pages pages() { return new Pages(this); } public Storage storage() { return new Storage(this); } public Utils utils() { return new Utils(this); } public Secure secure() { return new Secure(this); } public Stats stats() { return new Stats(this); } public Gifts gifts() { return new Gifts(this); } public Docs docs() { return new Docs(this); } public Likes likes() { return new Likes(this); } public Notifications notifications() { return new Notifications(this); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.smartdata.server; import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.Option; import org.apache.commons.cli.Options; import org.apache.commons.cli.ParseException; import org.apache.commons.cli.PosixParser; import org.apache.hadoop.security.UserGroupInformation; import org.apache.zeppelin.server.SmartZeppelinServer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.slf4j.bridge.SLF4JBridgeHandler; import org.smartdata.SmartServiceState; import org.smartdata.conf.SmartConf; import org.smartdata.conf.SmartConfKeys; import org.smartdata.hdfs.HadoopUtil; import org.smartdata.metastore.MetaStore; import org.smartdata.metastore.utils.MetaStoreUtils; import org.smartdata.server.engine.CmdletManager; import org.smartdata.server.engine.ConfManager; import org.smartdata.server.engine.RuleManager; import org.smartdata.server.engine.ServerContext; import org.smartdata.server.engine.ServiceMode; import org.smartdata.server.engine.StatesManager; import org.smartdata.server.engine.cmdlet.agent.AgentMaster; import org.smartdata.server.utils.GenericOptionsParser; import org.smartdata.utils.SecurityUtil; import static org.smartdata.SmartConstants.NUMBER_OF_SMART_AGENT; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.PrintStream; import java.util.ArrayList; import java.util.List; import java.util.Scanner; /** * From this Smart Storage Management begins. */ public class SmartServer { public static final Logger LOG = LoggerFactory.getLogger(SmartServer.class); private ConfManager confMgr; private final SmartConf conf; private SmartEngine engine; private ServerContext context; private boolean enabled; private SmartRpcServer rpcServer; private SmartZeppelinServer zeppelinServer; static { SLF4JBridgeHandler.removeHandlersForRootLogger(); SLF4JBridgeHandler.install(); } public SmartServer(SmartConf conf) { this.conf = conf; this.confMgr = new ConfManager(conf); this.enabled = false; } public void initWith() throws Exception { LOG.info("Start Init Smart Server"); HadoopUtil.setSmartConfByHadoop(conf); authentication(); MetaStore metaStore = MetaStoreUtils.getDBAdapter(conf); context = new ServerContext(conf, metaStore); initServiceMode(conf); engine = new SmartEngine(context); rpcServer = new SmartRpcServer(this, conf); zeppelinServer = new SmartZeppelinServer(conf, engine); LOG.info("Finish Init Smart Server"); } public StatesManager getStatesManager() { return engine.getStatesManager(); } public RuleManager getRuleManager() { return engine.getRuleManager(); } public CmdletManager getCmdletManager() { return engine.getCmdletManager(); } public MetaStore getMetaStore() { return this.context.getMetaStore(); } public ServerContext getContext() { return this.context; } public static StartupOption processArgs(String[] args, SmartConf conf) throws Exception { if (args == null) { args = new String[0]; } StartupOption startOpt = StartupOption.REGULAR; List<String> list = new ArrayList<>(); for (String arg : args) { if (StartupOption.FORMAT.getName().equalsIgnoreCase(arg)) { startOpt = StartupOption.FORMAT; } else if (StartupOption.REGULAR.getName().equalsIgnoreCase(arg)) { startOpt = StartupOption.REGULAR; } else if (arg.equals("-h") || arg.equals("-help")) { if (parseHelpArgument(new String[]{arg}, USAGE, System.out, true)) { return null; } } else { list.add(arg); } } if (list != null) { String remainArgs[] = list.toArray(new String[list.size()]); new GenericOptionsParser(conf, remainArgs); } return startOpt; } public static void setAgentNum(SmartConf conf) { String agentConfFile = conf.get(SmartConfKeys.SMART_CONF_DIR_KEY, SmartConfKeys.SMART_CONF_DIR_DEFAULT) + "/agents"; Scanner sc = null; try { sc = new Scanner(new File(agentConfFile)); } catch (FileNotFoundException ex) { LOG.error("Cannot find the config file: {}!", agentConfFile); } int num = 0; while (sc.hasNextLine()) { String host = sc.nextLine().trim(); if (!host.startsWith("#") && !host.isEmpty()) { num++; } } conf.setInt(NUMBER_OF_SMART_AGENT, num); } static SmartServer processWith(StartupOption startOption, SmartConf conf) throws Exception { // New AgentMaster AgentMaster.getAgentMaster(conf); if (startOption == StartupOption.FORMAT) { LOG.info("Formatting DataBase ..."); MetaStoreUtils.formatDatabase(conf); LOG.info("Formatting DataBase finished successfully!"); } else { MetaStoreUtils.checkTables(conf); } SmartServer ssm = new SmartServer(conf); try { ssm.initWith(); ssm.run(); return ssm; } catch (Exception e) { ssm.shutdown(); throw e; } } private static final String USAGE = "Usage: ssm [options]\n" + " -h\n\tShow this usage information.\n\n" + " -format\n\tFormat the configured database.\n\n" + " -D property=value\n" + "\tSpecify or overwrite an configure option.\n" + "\tE.g. -D smart.dfs.namenode.rpcserver=hdfs://localhost:43543\n"; private static final Options helpOptions = new Options(); private static final Option helpOpt = new Option("h", "help", false, "get help information"); static { helpOptions.addOption(helpOpt); } private static boolean parseHelpArgument(String[] args, String helpDescription, PrintStream out, boolean printGenericCmdletUsage) { try { CommandLineParser parser = new PosixParser(); CommandLine cmdLine = parser.parse(helpOptions, args); if (cmdLine.hasOption(helpOpt.getOpt()) || cmdLine.hasOption(helpOpt.getLongOpt())) { // should print out the help information out.println(helpDescription + "\n"); return true; } } catch (ParseException pe) { //LOG.warn("Parse help exception", pe); return false; } return false; } private void authentication() throws IOException { if (!SecurityUtil.isSecurityEnabled(conf)) { return; } // Load Hadoop configuration files try { HadoopUtil.loadHadoopConf(conf); } catch (IOException e) { LOG.info("Running in secure mode, but cannot find Hadoop configuration file. " + "Please config smart.hadoop.conf.path property in smart-site.xml."); conf.set("hadoop.security.authentication", "kerberos"); conf.set("hadoop.security.authorization", "true"); } UserGroupInformation.setConfiguration(conf); String keytabFilename = conf.get(SmartConfKeys.SMART_SERVER_KEYTAB_FILE_KEY); String principalConfig = conf.get(SmartConfKeys.SMART_SERVER_KERBEROS_PRINCIPAL_KEY); String principal = org.apache.hadoop.security.SecurityUtil.getServerPrincipal(principalConfig, (String) null); SecurityUtil.loginUsingKeytab(keytabFilename, principal); } /** * Bring up all the daemon threads needed. * * @throws Exception */ private void run() throws Exception { boolean enabled = conf.getBoolean(SmartConfKeys.SMART_DFS_ENABLED, SmartConfKeys.SMART_DFS_ENABLED_DEFAULT); if (enabled) { startEngines(); } rpcServer.start(); if (zeppelinServer != null) { zeppelinServer.start(); } } private void startEngines() throws Exception { enabled = true; engine.init(); engine.start(); } public void enable() throws IOException { if (getSSMServiceState() == SmartServiceState.DISABLED) { try { startEngines(); } catch (Exception e) { throw new IOException(e); } } } public SmartServiceState getSSMServiceState() { if (!enabled) { return SmartServiceState.DISABLED; } else if (!engine.inSafeMode()) { return SmartServiceState.ACTIVE; } else { return SmartServiceState.SAFEMODE; } } public boolean isActive() { return getSSMServiceState() == SmartServiceState.ACTIVE; } private void stop() throws Exception { if (engine != null) { engine.stop(); } if (zeppelinServer != null) { zeppelinServer.stop(); } try { if (rpcServer != null) { rpcServer.stop(); } } catch (Exception e) { } } public void shutdown() { try { stop(); //join(); } catch (Exception e) { LOG.error("SmartServer shutdown error", e); } } private enum StartupOption { FORMAT("-format"), REGULAR("-regular"); private String name; StartupOption(String arg) { this.name = arg; } public String getName() { return name; } } private void initServiceMode(SmartConf conf) { String serviceModeStr = conf.get(SmartConfKeys.SMART_SERVICE_MODE_KEY, SmartConfKeys.SMART_SERVICE_MODE_DEFAULT); try { context.setServiceMode(ServiceMode.valueOf(serviceModeStr.trim().toUpperCase())); } catch (IllegalStateException e) { String errorMsg = "Illegal service mode '" + serviceModeStr + "' set in property: " + SmartConfKeys.SMART_SERVICE_MODE_KEY + "!"; LOG.error(errorMsg); throw e; } LOG.info("Initialized service mode: " + context.getServiceMode().getName() + "."); } public static SmartServer launchWith(SmartConf conf) throws Exception { return launchWith(null, conf); } public static SmartServer launchWith(String[] args, SmartConf conf) throws Exception { if (conf == null) { conf = new SmartConf(); } StartupOption startOption = processArgs(args, conf); if (startOption == null) { return null; } return processWith(startOption, conf); } public static void main(String[] args) { int errorCode = 0; // if SSM exit normally then the errorCode is 0 try { final SmartServer inst = launchWith(args, null); if (inst != null) { Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { LOG.info("Shutting down SmartServer ... "); try { inst.shutdown(); } catch (Exception e) { LOG.error("Error while stopping servlet container", e); } LOG.info("SmartServer was down."); } }); //Todo: when to break while (true) { Thread.sleep(1000); } } } catch (Exception e) { LOG.error("Failed to create SmartServer", e); System.exit(1); } finally { System.exit(errorCode); } } }
/** * The contents of this file are subject to the license and copyright * detailed in the LICENSE and NOTICE files at the root of the source * tree and available online at * * http://www.dspace.org/license/ */ package org.dspace.content.authority; import org.apache.log4j.Logger; import org.dspace.authorize.AuthorizeException; import org.dspace.authorize.AuthorizeManager; import org.dspace.content.NonUniqueMetadataException; import org.dspace.core.ConfigurationManager; import org.dspace.core.Context; import org.dspace.core.LogManager; import org.dspace.storage.rdbms.DatabaseManager; import org.dspace.storage.rdbms.TableRow; import org.dspace.storage.rdbms.TableRowIterator; import java.io.IOException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Object that represents a relationship between concepts, which is * defined by a combination of "role", "incoming" Concept and * outgoing Concept internal database identifiers. * * @author Lantian Gai, Mark Diggory */ public class Concept2Concept { private int relationID = 0; private int role_id = 0; private int incoming_id; private int outgoing_id; private Context context; private boolean modified = false; /** log4j logger */ private static Logger log = Logger.getLogger(Concept2Concept.class); /** The row in the table representing this type */ private TableRow row; // cache of relation by ID (Integer) private static Map<Integer, Concept2Concept> id2relation = null; /** * Default constructor. */ public Concept2Concept() { } /** * Constructor creating a relation within a schema. * */ public Concept2Concept(Concept2ConceptRole role) { this.role_id = role.getRelationID(); } /** * Full constructor for new metadata relation elements. * */ public Concept2Concept(Concept2ConceptRole role, int incoming_id, int outgoing_id) { this.role_id = role.getRelationID(); this.incoming_id = incoming_id; this.outgoing_id = outgoing_id; } /** * Full constructor for existing metadata relation elements. * * @param role_id schema to which the relation belongs * @param relationID database ID of relation. */ public Concept2Concept(int role_id, int relationID, int incoming_id, int outgoing_id) { this.role_id = role_id; this.relationID = relationID; this.incoming_id = incoming_id; this.outgoing_id = outgoing_id; } /** * Constructor to load the object from the database. * * @param row database row from which to populate object. */ public Concept2Concept(TableRow row) { if (row != null) { this.relationID = row.getIntColumn("id"); this.role_id = row.getIntColumn("role_id"); this.incoming_id = row.getIntColumn("incoming_id"); this.outgoing_id = row.getIntColumn("outgoing_id"); this.row = row; } } /** * Get the element name. * * @return element name */ public int getIncomingId() { return incoming_id; } /** * Set the element name. * */ public void setIncomingId(int incoming_id) { this.incoming_id = incoming_id; } /** * Get the metadata relation id. * * @return metadata relation id */ public int getRelationID() { return relationID; } /** * Get the qualifier. * * @return qualifier */ public int getOutgoingId() { return outgoing_id; } /** * Set the qualifier. * */ public void setOutgoingId(int outgoing_id) { this.outgoing_id = outgoing_id; } /** * Get the schema record key. * * @return schema record key */ public int getRoleId() { return role_id; } /** * Set the schema record key. * * @param role_id new value for key */ public void setRoleId(int role_id) { this.role_id = role_id; } Concept2Concept(Context context, TableRow row) throws SQLException { this.context = context; this.row = row; this.modified = false; // Cache ourselves context.cache(this, row.getIntColumn("id")); } /** * Creates a new metadata relation. * * @param context * DSpace context object * @throws java.io.IOException * @throws org.dspace.authorize.AuthorizeException * @throws java.sql.SQLException * @throws org.dspace.content.NonUniqueMetadataException */ public static Concept2Concept create(Context context) throws AuthorizeException, SQLException { // authorized? if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "You must be an admin to create an Concept2Concept"); } // Create a table row TableRow row = DatabaseManager.create(context, "Concept2Concept"); Concept2Concept e = new Concept2Concept(context, row); log.info(LogManager.getHeader(context, "create_Concept2Concept", "Concept2Concept_id=" + e.getRelationID())); //context.addEvent(new Event(Event.CREATE, Constants.EPERSON, e.getID(), null)); return e; } /** * Retrieves the metadata relation from the database. * * @param context dspace context * @param role_id schema by ID * @return recalled metadata relation * @throws java.sql.SQLException * @throws org.dspace.authorize.AuthorizeException */ public static Concept2Concept findByElement(Context context, int role_id, int incoming_id, int outgoing_id) throws SQLException, AuthorizeException { // Grab rows from DB TableRowIterator tri; tri = DatabaseManager.queryTable(context,"Concept2Concept", "SELECT * FROM Concept2Concept WHERE role_id= ? " + "AND incoming_id= ? AND outgoing_id= ? ", role_id, incoming_id, outgoing_id); TableRow row = null; try { if (tri.hasNext()) { row = tri.next(); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } if (row == null) { return null; } else { return new Concept2Concept(row); } } /** * Retrieve all Dublin Core types from the registry * * @param context dspace context * @return an array of all the Dublin Core types * @throws java.sql.SQLException */ public static Concept2Concept[] findAll(Context context) throws SQLException { List<Concept2Concept> relations = new ArrayList<Concept2Concept>(); // Get all the metadatarelationregistry rows TableRowIterator tri = DatabaseManager.queryTable(context, "Concept2Concept", "SELECT mfr.* FROM Concept2Concept mfr ORDER BY msr.role_id, mfr.incoming_id, mfr.outgoing_id"); try { // Make into DC Type objects while (tri.hasNext()) { relations.add(new Concept2Concept(tri.next())); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // Convert list into an array Concept2Concept[] typeArray = new Concept2Concept[relations.size()]; return (Concept2Concept[]) relations.toArray(typeArray); } /** * Return all metadata relations that are found in a given schema. * * @param context dspace context * @param role_id schema by db ID * @return array of metadata relations * @throws java.sql.SQLException */ public static Concept2Concept[] findAllByRole(Context context, int role_id) throws SQLException { List<Concept2Concept> relations = new ArrayList<Concept2Concept>(); // Get all the metadatarelationregistry rows TableRowIterator tri = DatabaseManager.queryTable(context,"Concept2Concept", "SELECT * FROM Concept2Concept WHERE role_id= ? " + " ORDER BY incoming_id, outgoing_id", role_id); try { // Make into DC Type objects while (tri.hasNext()) { relations.add(new Concept2Concept(tri.next())); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } // Convert list into an array Concept2Concept[] typeArray = new Concept2Concept[relations.size()]; return (Concept2Concept[]) relations.toArray(typeArray); } /** * Update the metadata relation in the database. * * @param context dspace context * @throws java.sql.SQLException * @throws org.dspace.authorize.AuthorizeException * @throws org.dspace.content.NonUniqueMetadataException * @throws java.io.IOException */ public void update(Context context) throws SQLException, AuthorizeException, NonUniqueMetadataException, IOException { // Check authorisation: Only admins may update the metadata registry if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only administrators may modiffy the Dublin Core registry"); } // Check to see if the schema ID was altered. If is was then we will // query to ensure that there is not already a duplicate name relation. if (row.getIntColumn("role_id") != role_id) { if (Concept2Concept.hasElement(context, role_id, incoming_id, outgoing_id)) { throw new NonUniqueMetadataException( "Duplcate relation name found in target schema"); } } // Ensure the element and qualifier are unique within a given schema. if (!unique(context, role_id, incoming_id, outgoing_id)) { throw new NonUniqueMetadataException("Please make " + incoming_id + "." + outgoing_id); } row.setColumn("role_id", role_id); row.setColumn("incoming_id", incoming_id); row.setColumn("outgoing_id", outgoing_id); DatabaseManager.update(context, row); decache(); log.info(LogManager.getHeader(context, "update_Concept2Concept", "Concept2Concept_id=" + getRelationID() + "incoming_id=" + incoming_id + "outgoing_id=" + outgoing_id)); } /** * Return true if and only if the schema has a relation with the given element * and qualifier pair. * * @param context dspace context * @param role_id schema by ID * @return true if the relation exists * @throws java.sql.SQLException * @throws org.dspace.authorize.AuthorizeException */ private static boolean hasElement(Context context, int role_id, int incoming_id, int outgoing_id) throws SQLException, AuthorizeException { return Concept2Concept.findByElement(context, role_id, incoming_id, outgoing_id) != null; } /** * Delete the metadata relation. * * @param context dspace context * @throws java.sql.SQLException * @throws org.dspace.authorize.AuthorizeException */ public void delete(Context context) throws SQLException, AuthorizeException { // Check authorisation: Only admins may create DC types if (!AuthorizeManager.isAdmin(context)) { throw new AuthorizeException( "Only administrators may modify the incoming registry"); } log.info(LogManager.getHeader(context, "delete_Concept2Concept", "metadata_incoming_relation_id=" + getRelationID())); DatabaseManager.delete(context, row); decache(); } /** * A sanity check that ensures a given element and qualifier are unique * within a given schema. The check happens in code as we cannot use a * database constraint. * * @param context dspace context * @param role_id * @return true if unique * @throws org.dspace.authorize.AuthorizeException * @throws java.sql.SQLException * @throws java.io.IOException */ private boolean unique(Context context, int role_id, int incoming_id, int outgoing_id) throws IOException, SQLException, AuthorizeException { int count = 0; Connection con = null; PreparedStatement statement = null; ResultSet rs = null; try { con = context.getDBConnection(); TableRow reg = DatabaseManager.row("Concept2Concept"); String query = "SELECT COUNT(*) FROM " + reg.getTable() + " WHERE role_id= ? " + " and incoming_id= ? " + "and outgoing_id = ?"; statement = con.prepareStatement(query); statement.setInt(1,role_id); statement.setInt(2,incoming_id); statement.setInt(3,outgoing_id); rs = statement.executeQuery(); if (rs.next()) { count = rs.getInt(1); } } finally { if (rs != null) { try { rs.close(); } catch (SQLException sqle) { } } if (statement != null) { try { statement.close(); } catch (SQLException sqle) { } } } return (count == 0); } /** * Return the HTML FORM key for the given relation. * * @return HTML FORM key */ public static String formKey(int role_id, int incoming_id, int outgoing_id) { return role_id+"_"+incoming_id+"_"+outgoing_id; } /** * Find the relation corresponding to the given numeric ID. The ID is * a database key internal to DSpace. * * @param context * context, in case we need to read it in from DB * @param id * the metadata relation ID * @return the metadata relation object * @throws java.sql.SQLException */ public static Concept2Concept find(Context context, int id) throws SQLException { if (!isCacheInitialized()) { initCache(context); } // 'sanity check' first. Integer iid = Integer.valueOf(id); if (!id2relation.containsKey(iid)) { return null; } return id2relation.get(iid); } // invalidate the cache e.g. after something modifies DB state. private static void decache() { id2relation = null; } private static boolean isCacheInitialized() { return id2relation != null; } // load caches if necessary private static synchronized void initCache(Context context) throws SQLException { if (!isCacheInitialized()) { Map<Integer, Concept2Concept> new_id2relation = new HashMap<Integer, Concept2Concept>(); log.info("Loading Concept2Concept elements into cache."); // Grab rows from DB TableRowIterator tri = DatabaseManager.queryTable(context,"Concept2Concept", "SELECT * from Concept2Concept"); try { while (tri.hasNext()) { TableRow row = tri.next(); int relationID = row.getIntColumn("metadata_incoming_relation_id"); new_id2relation.put(Integer.valueOf(relationID), new Concept2Concept(row)); } } finally { // close the TableRowIterator to free up resources if (tri != null) { tri.close(); } } id2relation = new_id2relation; } } /** * Return <code>true</code> if <code>other</code> is the same MetadataRelation * as this object, <code>false</code> otherwise * * @param obj * object to compare to * * @return <code>true</code> if object passed in represents the same * MetadataRelation as this object */ @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Concept2Concept other = (Concept2Concept) obj; if (this.relationID != other.relationID) { return false; } if (this.role_id != other.role_id) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 47 * hash + this.relationID; hash = 47 * hash + this.role_id; return hash; } public static Concept2Concept[] search(Context context, String query) throws SQLException { return search(context, query, -1, -1); } public static Concept2Concept[] search(Context context, String query, int offset, int limit) throws SQLException { String params = "%"+query.toLowerCase()+"%"; StringBuffer queryBuf = new StringBuffer(); queryBuf.append("SELECT count(*) as outgoingcount FROM incoming2outgoing,incoming2outgoingrole WHERE incoming2outgoing.id = incoming2outgoingrole.id AND (incoming2outgoing.id = ? OR "); queryBuf.append("incoming2outgoing.role_id = ? OR incoming2outgoing.incoming_id = ? OR incoming2outgoing.outgoing_id = ? OR incoming2outgoing.role like LOWER(?) ORDER BY incoming2outgoing.id desc"); // Add offset and limit restrictions - Oracle requires special code if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { // First prepare the query to generate row numbers if (limit > 0 || offset > 0) { queryBuf.insert(0, "SELECT /*+ FIRST_ROWS(n) */ rec.*, ROWNUM rnum FROM ("); queryBuf.append(") "); } // Restrict the number of rows returned based on the limit if (limit > 0) { queryBuf.append("rec WHERE rownum<=? "); // If we also have an offset, then convert the limit into the maximum row number if (offset > 0) { limit += offset; } } // Return only the records after the specified offset (row number) if (offset > 0) { queryBuf.insert(0, "SELECT * FROM ("); queryBuf.append(") WHERE rnum>?"); } } else { if (limit > 0) { queryBuf.append(" LIMIT ? "); } if (offset > 0) { queryBuf.append(" OFFSET ? "); } } String dbquery = queryBuf.toString(); // When checking against the eperson-id, make sure the query can be made into a number Integer int_param; try { int_param = Integer.valueOf(query); } catch (NumberFormatException e) { int_param = Integer.valueOf(-1); } // Create the parameter array, including limit and offset if part of the query Object[] paramArr = new Object[] {int_param,int_param,int_param,int_param,params}; if (limit > 0 && offset > 0) { paramArr = new Object[]{int_param, int_param,int_param,int_param,params,limit, offset}; } else if (limit > 0) { paramArr = new Object[]{int_param,int_param,int_param,int_param,params, limit}; } else if (offset > 0) { paramArr = new Object[]{int_param,int_param,int_param,int_param,params, offset}; } // Get all the epeople that match the query TableRowIterator rows = DatabaseManager.query(context, dbquery, paramArr); try { List<TableRow> incoming2ConceptsRows = rows.toList(); Concept2Concept[] incoming2Concepts = new Concept2Concept[incoming2ConceptsRows.size()]; for (int i = 0; i < incoming2ConceptsRows.size(); i++) { TableRow row = (TableRow) incoming2ConceptsRows.get(i); // First check the cache Concept2Concept fromCache = (Concept2Concept) context.fromCache(Concept2Concept.class, row .getIntColumn("id")); if (fromCache != null) { incoming2Concepts[i] = fromCache; } else { incoming2Concepts[i] = new Concept2Concept(row); } } return incoming2Concepts; } finally { if (rows != null) { rows.close(); } } } public static int searchResultCount(Context context, String query) throws SQLException { String dbquery = "%"+query.toLowerCase()+"%"; Long count; // When checking against the eperson-id, make sure the query can be made into a number Integer int_param; try { int_param = Integer.valueOf(query); } catch (NumberFormatException e) { int_param = Integer.valueOf(-1); } // Get all the epeople that match the query TableRow row = DatabaseManager.querySingle(context, "SELECT count(*) as outgoingcount FROM incoming2outgoing,incoming2outgoingrole WHERE incoming2outgoing.role_id = incoming2outgoingrole.id AND (incoming2outgoing.id = ? OR " + "incoming2outgoing.role_id = ? OR incoming2outgoing.incoming_id = ? OR incoming2outgoing.outgoing_id = ? OR incoming2outgoingrole.role like LOWER(?) ) ", new Object[] {int_param,int_param,int_param,int_param,dbquery}); // use getIntColumn for Oracle count data if ("oracle".equals(ConfigurationManager.getProperty("db.name"))) { count = Long.valueOf(row.getIntColumn("outgoingcount")); } else //getLongColumn works for postgres { count = Long.valueOf(row.getLongColumn("outgoingcount")); } return count.intValue(); } public static Concept2Concept findByConceptAndConcept(Context context,Integer incoming_id,Integer outgoing_id) throws SQLException, AuthorizeException { if (incoming_id == null||outgoing_id == null||incoming_id<0||outgoing_id<0) { return null; } Object[] paramArr = new Object[] {incoming_id,outgoing_id}; // All email addresses are stored as lowercase, so ensure that the email address is lowercased for the lookup TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM incoming2outgoing WHERE concpet_id = ? AND outgoing_id = ? ", paramArr); try { List<TableRow> incoming2ConceptsRows = rows.toList(); Concept2Concept[] incoming2Concepts = new Concept2Concept[incoming2ConceptsRows.size()]; for (int i = 0; i < incoming2ConceptsRows.size(); i++) { TableRow row = (TableRow) incoming2ConceptsRows.get(i); // First check the cache Concept2Concept fromCache = (Concept2Concept) context.fromCache(Concept2Concept.class, row .getIntColumn("id")); if (fromCache != null) { incoming2Concepts[i] = fromCache; } else { incoming2Concepts[i] = new Concept2Concept(row); } } return incoming2Concepts[0]; } finally { if (rows != null) { rows.close(); } } } public static Concept2Concept[] findByParentAndRole(Context context,Integer incoming_id,Integer role_id) throws SQLException, AuthorizeException { if (incoming_id == null||role_id == null||incoming_id<0||role_id<0) { return null; } Object[] paramArr = new Object[] {incoming_id,role_id}; // All email addresses are stored as lowercase, so ensure that the email address is lowercased for the lookup TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM Concept2Concept WHERE incoming_id = ? AND role_id = ? ", paramArr); try { List<TableRow> incoming2ConceptsRows = rows.toList(); Concept2Concept[] incoming2Concepts = new Concept2Concept[incoming2ConceptsRows.size()]; for (int i = 0; i < incoming2ConceptsRows.size(); i++) { TableRow row = (TableRow) incoming2ConceptsRows.get(i); // First check the cache Concept2Concept fromCache = (Concept2Concept) context.fromCache(Concept2Concept.class, row .getIntColumn("id")); if (fromCache != null) { incoming2Concepts[i] = fromCache; } else { incoming2Concepts[i] = new Concept2Concept(row); } } return incoming2Concepts; } finally { if (rows != null) { rows.close(); } } } public static Concept2Concept[] findByChildAndRole(Context context,Integer outgoing_id,Integer role_id) throws SQLException, AuthorizeException { if (outgoing_id == null||role_id == null||outgoing_id<0||role_id<0) { return null; } Object[] paramArr = new Object[] {outgoing_id,outgoing_id}; // All email addresses are stored as lowercase, so ensure that the email address is lowercased for the lookup TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM Concept2Concept WHERE outgoing_id = ? AND role_id = ? ", paramArr); try { List<TableRow> incoming2ConceptsRows = rows.toList(); Concept2Concept[] incoming2Concepts = new Concept2Concept[incoming2ConceptsRows.size()]; for (int i = 0; i < incoming2ConceptsRows.size(); i++) { TableRow row = (TableRow) incoming2ConceptsRows.get(i); // First check the cache Concept2Concept fromCache = (Concept2Concept) context.fromCache(Concept2Concept.class, row .getIntColumn("id")); if (fromCache != null) { incoming2Concepts[i] = fromCache; } else { incoming2Concepts[i] = new Concept2Concept(row); } } return incoming2Concepts; } finally { if (rows != null) { rows.close(); } } } public static Concept2Concept[] findByChild(Context context,Integer outgoing_id) throws SQLException, AuthorizeException { if (outgoing_id == null||outgoing_id<0) { return null; } Object[] paramArr = new Object[] {outgoing_id}; // All email addresses are stored as lowercase, so ensure that the email address is lowercased for the lookup TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM Concept2Concept WHERE outgoing_id = ? ORDER BY role_id asc", paramArr); try { List<TableRow> incoming2ConceptsRows = rows.toList(); Concept2Concept[] incoming2Concepts = new Concept2Concept[incoming2ConceptsRows.size()]; for (int i = 0; i < incoming2ConceptsRows.size(); i++) { TableRow row = (TableRow) incoming2ConceptsRows.get(i); // First check the cache Concept2Concept fromCache = (Concept2Concept) context.fromCache(Concept2Concept.class, row .getIntColumn("id")); if (fromCache != null) { incoming2Concepts[i] = fromCache; } else { incoming2Concepts[i] = new Concept2Concept(row); } } return incoming2Concepts; } finally { if (rows != null) { rows.close(); } } } public static Concept2Concept[] findByParent(Context context,Integer incoming_id) throws SQLException, AuthorizeException { if (incoming_id == null||incoming_id<0) { return null; } Object[] paramArr = new Object[] {incoming_id}; // All email addresses are stored as lowercase, so ensure that the email address is lowercased for the lookup TableRowIterator rows = DatabaseManager.query(context, "SELECT * FROM Concept2Concept WHERE incoming_id = ? ORDER BY role_id asc", paramArr); try { List<TableRow> incoming2ConceptsRows = rows.toList(); Concept2Concept[] incoming2Concepts = new Concept2Concept[incoming2ConceptsRows.size()]; for (int i = 0; i < incoming2ConceptsRows.size(); i++) { TableRow row = (TableRow) incoming2ConceptsRows.get(i); // First check the cache Concept2Concept fromCache = (Concept2Concept) context.fromCache(Concept2Concept.class, row .getIntColumn("id")); if (fromCache != null) { incoming2Concepts[i] = fromCache; } else { incoming2Concepts[i] = new Concept2Concept(row); } } return incoming2Concepts; } finally { if (rows != null) { rows.close(); } } } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.workbench.screens.solver.client.editor; import java.util.List; import java.util.function.Supplier; import com.google.gwt.core.client.GWT; import com.google.gwtmockito.GwtMock; import com.google.gwtmockito.GwtMockitoTestRunner; import elemental2.promise.Promise; import org.guvnor.common.services.shared.metadata.model.Metadata; import org.guvnor.common.services.shared.metadata.model.Overview; import org.guvnor.common.services.shared.validation.model.ValidationMessage; import org.guvnor.messageconsole.client.console.widget.button.AlertsButtonMenuItemBuilder; import org.jboss.errai.common.client.api.Caller; import org.jboss.errai.common.client.api.ErrorCallback; import org.jboss.errai.common.client.api.RemoteCallback; import org.jboss.errai.ui.client.local.spi.TranslationService; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.kie.workbench.common.widgets.client.popups.validation.ValidationPopup; import org.kie.workbench.common.widgets.metadata.client.KieEditorWrapperView; import org.kie.workbench.common.widgets.metadata.client.widget.OverviewWidgetPresenter; import org.mockito.Mock; import org.optaplanner.workbench.screens.solver.client.resources.SolverEditorResources; import org.optaplanner.workbench.screens.solver.client.resources.images.SolverEditorImageResources; import org.optaplanner.workbench.screens.solver.client.type.SolverResourceType; import org.optaplanner.workbench.screens.solver.model.ScoreDirectorFactoryConfigModel; import org.optaplanner.workbench.screens.solver.model.SolverConfigModel; import org.optaplanner.workbench.screens.solver.model.SolverModelContent; import org.optaplanner.workbench.screens.solver.model.TerminationConfigModel; import org.optaplanner.workbench.screens.solver.service.SolverEditorService; import org.uberfire.backend.vfs.ObservablePath; import org.uberfire.backend.vfs.Path; import org.uberfire.client.promise.Promises; import org.uberfire.ext.editor.commons.client.history.VersionRecordManager; import org.uberfire.ext.editor.commons.service.support.SupportsSaveAndRename; import org.uberfire.mvp.Command; import org.uberfire.mvp.PlaceRequest; import org.uberfire.promise.SyncPromises; import static org.junit.Assert.assertEquals; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @RunWith(GwtMockitoTestRunner.class) public class SolverEditorPresenterTest { @GwtMock SolverEditorImageResources solverEditorImageResources; @GwtMock SolverEditorResources solverEditorResources; @Mock KieEditorWrapperView kieView; @Mock ObservablePath path; @Mock VersionRecordManager versionRecordManager; @Mock AlertsButtonMenuItemBuilder alertsButtonMenuItemBuilder; @Mock private SolverEditorView view; private SolverEditorPresenter presenter; private TerminationConfigModel terminationConfigModel = new TerminationConfigModel(); private ScoreDirectorFactoryConfigModel scoreDirectorFactoryConfig = new ScoreDirectorFactoryConfigModel(); private SolverConfigModel model; private SolverResourceType resourceType; private ServiceMock solverService; private Promises promises; @Before public void setUp() throws Exception { promises = new SyncPromises(); model = new SolverConfigModel(); model.setTerminationConfig(terminationConfigModel); model.setScoreDirectorFactoryConfig(scoreDirectorFactoryConfig); resourceType = GWT.create(SolverResourceType.class); when(resourceType.getSuffix()).thenReturn("solver.xml"); when(resourceType.accept(path)).thenReturn(true); when(resourceType.accept(path)).thenReturn(false); when(versionRecordManager.getCurrentPath()).thenReturn(path); solverService = new ServiceMock(); presenter = spy(new SolverEditorPresenter(view, resourceType, mock(XMLViewer.class), new NotificationEventMock(), solverService, mock(ValidationPopup.class), mock(TranslationService.class)) { { kieView = mock(KieEditorWrapperView.class); versionRecordManager = SolverEditorPresenterTest.this.versionRecordManager; alertsButtonMenuItemBuilder = SolverEditorPresenterTest.this.alertsButtonMenuItemBuilder; overviewWidget = mock(OverviewWidgetPresenter.class); promises = SolverEditorPresenterTest.this.promises; } @Override protected Command getSaveAndRename() { return mock(Command.class); } protected Promise<Void> makeMenuBar() { return promises.resolve(); } protected void addSourcePage() { } }); } @Test public void load() throws Exception { presenter.onStartup(path, mock(PlaceRequest.class)); verify(view).setTerminationConfigModel(terminationConfigModel); verify(view).setScoreDirectorFactoryConfig(scoreDirectorFactoryConfig, path); } @Test public void testGetContentSupplier() throws Exception { final SolverConfigModel content = mock(SolverConfigModel.class); doReturn(content).when(presenter).getModel(); final Supplier<SolverConfigModel> contentSupplier = presenter.getContentSupplier(); assertEquals(content, contentSupplier.get()); } @Test public void testGetSaveAndRenameServiceCaller() throws Exception { final Caller<? extends SupportsSaveAndRename<SolverConfigModel, Metadata>> serviceCaller = presenter.getSaveAndRenameServiceCaller(); assertEquals(this.solverService, serviceCaller); } private class ServiceMock implements Caller<SolverEditorService> { RemoteCallback remoteCallback; private SolverEditorService service = new SolverEditorServiceMock(); @Override public SolverEditorService call() { return service; } @Override public SolverEditorService call(RemoteCallback<?> remoteCallback) { return call(remoteCallback, null); } @Override public SolverEditorService call(RemoteCallback<?> remoteCallback, ErrorCallback<?> errorCallback) { this.remoteCallback = remoteCallback; return call(); } private class SolverEditorServiceMock implements SolverEditorService { @Override public SolverModelContent loadContent(Path path) { final Overview overview = new Overview() {{ setMetadata(mock(Metadata.class)); }}; final SolverModelContent content = new SolverModelContent(model, overview); remoteCallback.callback(content); return null; } @Override public List<ValidationMessage> smokeTest(Path path, SolverConfigModel config) { return null; } @Override public Path copy(Path path, String newName, String comment) { return null; } @Override public Path copy(Path path, String newName, Path targetDirectory, String comment) { return null; } @Override public Path create(Path context, String fileName, SolverConfigModel content, String comment) { return null; } @Override public void delete(Path path, String comment) { } @Override public SolverConfigModel load(Path path) { return null; } @Override public Path rename(Path path, String newName, String comment) { return null; } @Override public Path save(Path path, SolverConfigModel content, Metadata metadata, String comment) { return null; } @Override public List<ValidationMessage> validate(Path path, SolverConfigModel content) { return null; } @Override public String toSource(Path path, SolverConfigModel model) { return null; } @Override public Path saveAndRename(final Path path, final String newFileName, final Metadata metadata, final SolverConfigModel content, final String comment) { return null; } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to you under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.calcite.util.trace; import org.slf4j.Logger; /** * Small extension to {@link Logger} with some performance improvements. * * <p>{@link Logger#info(String format, Object[] params)} is expensive * to call, since the caller must always allocate and fill in the array * <code>params</code>, even when the <code>level</code> will prevent a message * being logged. On the other hand, {@link Logger#info(String msg)} * and {@link Logger#info(String msg, Object o)} do not have this * problem. * * <p>As a workaround this class provides * {@link #info(String msg, Object o1, Object o2)} etc. (The varargs feature of * java 1.5 half-solves this problem, by automatically wrapping args in an * array, but it does so without testing the level.) * * <p>Usage: replace: * * <blockquote><code>static final Logger tracer = * CalciteTracer.getMyTracer();</code></blockquote> * * <p>by: * * <blockquote><code>static final CalciteLogger tracer = * new CalciteLogger(CalciteTrace.getMyTracer());</code></blockquote> */ public class CalciteLogger { //~ Instance fields -------------------------------------------------------- private final Logger logger; // delegate //~ Constructors ----------------------------------------------------------- public CalciteLogger(Logger logger) { assert logger != null; this.logger = logger; } //~ Methods ---------------------------------------------------------------- // WARN /** * Logs a WARN message with two Object parameters */ public void warn(String format, Object arg1, Object arg2) { // slf4j already avoids the array creation for 1 or 2 arg invocations logger.warn(format, arg1, arg2); } /** * Conditionally logs a WARN message with three Object parameters */ public void warn(String format, Object arg1, Object arg2, Object arg3) { if (logger.isWarnEnabled()) { logger.warn(format, arg1, arg2, arg3); } } /** * Conditionally logs a WARN message with four Object parameters */ public void warn(String format, Object arg1, Object arg2, Object arg3, Object arg4) { if (logger.isWarnEnabled()) { logger.warn(format, arg1, arg2, arg3, arg4); } } public void warn(String format, Object... args) { if (logger.isWarnEnabled()) { logger.warn(format, args); } } // INFO /** * Logs an INFO message with two Object parameters */ public void info(String format, Object arg1, Object arg2) { // slf4j already avoids the array creation for 1 or 2 arg invocations logger.info(format, arg1, arg2); } /** * Conditionally logs an INFO message with three Object parameters */ public void info(String format, Object arg1, Object arg2, Object arg3) { if (logger.isInfoEnabled()) { logger.info(format, arg1, arg2, arg3); } } /** * Conditionally logs an INFO message with four Object parameters */ public void info(String format, Object arg1, Object arg2, Object arg3, Object arg4) { if (logger.isInfoEnabled()) { logger.info(format, arg1, arg2, arg3, arg4); } } public void info(String format, Object... args) { if (logger.isInfoEnabled()) { logger.info(format, args); } } // DEBUG /** * Logs a DEBUG message with two Object parameters */ public void debug(String format, Object arg1, Object arg2) { // slf4j already avoids the array creation for 1 or 2 arg invocations logger.debug(format, arg1, arg2); } /** * Conditionally logs a DEBUG message with three Object parameters */ public void debug(String format, Object arg1, Object arg2, Object arg3) { if (logger.isDebugEnabled()) { logger.debug(format, arg1, arg2, arg3); } } /** * Conditionally logs a DEBUG message with four Object parameters */ public void debug(String format, Object arg1, Object arg2, Object arg3, Object arg4) { if (logger.isDebugEnabled()) { logger.debug(format, arg1, arg2, arg3, arg4); } } public void debug(String format, Object... args) { if (logger.isDebugEnabled()) { logger.debug(format, args); } } // TRACE /** * Logs a TRACE message with two Object parameters */ public void trace(String format, Object arg1, Object arg2) { // slf4j already avoids the array creation for 1 or 2 arg invocations logger.trace(format, arg1, arg2); } /** * Conditionally logs a TRACE message with three Object parameters */ public void trace(String format, Object arg1, Object arg2, Object arg3) { if (logger.isTraceEnabled()) { logger.trace(format, arg1, arg2, arg3); } } /** * Conditionally logs a TRACE message with four Object parameters */ public void trace(String format, Object arg1, Object arg2, Object arg3, Object arg4) { if (logger.isTraceEnabled()) { logger.trace(format, arg1, arg2, arg3, arg4); } } public void trace(String format, Object... args) { if (logger.isTraceEnabled()) { logger.trace(format, args); } } // We expose and delegate the commonly used part of the Logger interface. // For everything else, just expose the delegate. (Could use reflection.) public Logger getLogger() { return logger; } // Hold-over from the previous j.u.logging implementation public void warn(String msg) { logger.warn(msg); } public void info(String msg) { logger.info(msg); } }
/* * Copyright 2013-2014 Richard M. Hightower * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * __________ _____ __ .__ * \______ \ ____ ____ ____ /\ / \ _____ | | _|__| ____ ____ * | | _// _ \ / _ \ / \ \/ / \ / \\__ \ | |/ / |/ \ / ___\ * | | ( <_> | <_> ) | \ /\ / Y \/ __ \| <| | | \/ /_/ > * |______ /\____/ \____/|___| / \/ \____|__ (____ /__|_ \__|___| /\___ / * \/ \/ \/ \/ \/ \//_____/ * ____. ___________ _____ ______________.___. * | |____ ___ _______ \_ _____/ / _ \ / _____/\__ | | * | \__ \\ \/ /\__ \ | __)_ / /_\ \ \_____ \ / | | * /\__| |/ __ \\ / / __ \_ | \/ | \/ \ \____ | * \________(____ /\_/ (____ / /_______ /\____|__ /_______ / / ______| * \/ \/ \/ \/ \/ \/ */ package io.advantageous.boon.primitive; import io.advantageous.boon.core.Exceptions; import io.advantageous.boon.core.reflection.Invoker; import java.lang.invoke.ConstantCallSite; import java.lang.invoke.MethodHandle; import java.lang.reflect.Method; import java.util.Arrays; import static io.advantageous.boon.core.Exceptions.die; import static io.advantageous.boon.core.Exceptions.handle; public class Flt { public static float[] grow( float[] array, final int size ) { Exceptions.requireNonNull(array); float[] newArray = new float[ array.length + size ]; System.arraycopy( array, 0, newArray, 0, array.length ); return newArray; } public static float[] grow( float[] array ) { Exceptions.requireNonNull( array ); float[] newArray = new float[ array.length * 2 ]; System.arraycopy( array, 0, newArray, 0, array.length ); return newArray; } public static float[] shrink( float[] array, int size ) { Exceptions.requireNonNull( array ); float[] newArray = new float[ array.length - size ]; System.arraycopy( array, 0, newArray, 0, array.length - size ); return newArray; } public static float[] compact( float[] array ) { Exceptions.requireNonNull( array ); int nullCount = 0; for ( float ch : array ) { if ( ch == '\0' ) { nullCount++; } } float[] newArray = new float[ array.length - nullCount ]; int j = 0; for ( float ch : array ) { if ( ch == '\0' ) { continue; } newArray[ j ] = ch; j++; } return newArray; } /** * Creates an array of bytes * * @param size size of the array you want to make * @return array */ public static float[] arrayOfFloat( final int size ) { return new float[ size ]; } /** * @param array array * @return array */ public static float[] array( final float... array ) { Exceptions.requireNonNull( array ); return array; } public static int lengthOf( float[] array ) { return len(array); } public static int len( float[] array ) { return array.length; } public static float atIndex( final float[] array, final int index ) { return idx(array, index); } public static float idx( final float[] array, final int index ) { final int i = calculateIndex( array, index ); return array[ i ]; } public static void atIndex( final float[] array, int index, float value ) { idx (array, index, value); } public static void idx( final float[] array, int index, float value ) { final int i = calculateIndex( array, index ); array[ i ] = value; } public static float[] sliceOf( float[] array, int startIndex, int endIndex ) { return slc(array, startIndex, endIndex); } public static float[] slc( float[] array, int startIndex, int endIndex ) { final int start = calculateIndex( array, startIndex ); final int end = calculateEndIndex( array, endIndex ); final int newLength = end - start; if ( newLength < 0 ) { throw new ArrayIndexOutOfBoundsException( String.format( "start index %d, end index %d, length %d", startIndex, endIndex, array.length ) ); } float[] newArray = new float[ newLength ]; System.arraycopy( array, start, newArray, 0, newLength ); return newArray; } public static float[] sliceOf( float[] array, int startIndex ) { return slc(array, startIndex); } public static float[] slc( float[] array, int startIndex ) { final int start = calculateIndex( array, startIndex ); final int newLength = array.length - start; if ( newLength < 0 ) { throw new ArrayIndexOutOfBoundsException( String.format( "start index %d, length %d", startIndex, array.length ) ); } float[] newArray = new float[ newLength ]; System.arraycopy( array, start, newArray, 0, newLength ); return newArray; } public static float[] endOfSlice( float[] array, int endIndex ) { return slcEnd(array, endIndex); } public static float[] slcEnd( float[] array, int endIndex ) { final int end = calculateEndIndex( array, endIndex ); final int newLength = end; // + (endIndex < 0 ? 1 : 0); if ( newLength < 0 ) { throw new ArrayIndexOutOfBoundsException( String.format( "start index %d, length %d", endIndex, array.length ) ); } float[] newArray = new float[ newLength ]; System.arraycopy( array, 0, newArray, 0, newLength ); return newArray; } public static boolean in( float value, float[] array ) { for ( float currentValue : array ) { if ( currentValue == value ) { return true; } } return false; } public static float[] copy( float[] array ) { Exceptions.requireNonNull( array ); float[] newArray = new float[ array.length ]; System.arraycopy( array, 0, newArray, 0, array.length ); return newArray; } public static float[] add( float[] array, float v ) { Exceptions.requireNonNull( array ); float[] newArray = new float[ array.length + 1 ]; System.arraycopy( array, 0, newArray, 0, array.length ); newArray[ array.length ] = v; return newArray; } public static float[] add( float[] array, float[] array2 ) { Exceptions.requireNonNull( array ); float[] newArray = new float[ array.length + array2.length ]; System.arraycopy( array, 0, newArray, 0, array.length ); System.arraycopy( array2, 0, newArray, array.length, array2.length ); return newArray; } public static float[] insert( final float[] array, final int idx, final float v ) { if ( idx >= array.length ) { return add( array, v ); } final int index = calculateIndex( array, idx ); //Object newArray = Array.newInstance(array.getClass().getComponentType(), array.length+1); float[] newArray = new float[ array.length + 1 ]; if ( index != 0 ) { /* Copy up to the length in the array before the index. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, 0, newArray, 0, index ); } boolean lastIndex = index == array.length - 1; int remainingIndex = array.length - index; if ( lastIndex ) { /* Copy the area after the insert. Make sure we don't write over the end. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + 1, remainingIndex ); } else { /* Copy the area after the insert. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + 1, remainingIndex ); } newArray[ index ] = v; return newArray; } public static float[] insert( final float[] array, final int fromIndex, final float[] values ) { Exceptions.requireNonNull( array ); if ( fromIndex >= array.length ) { return add( array, values ); } final int index = calculateIndex( array, fromIndex ); //Object newArray = Array.newInstance(array.getClass().getComponentType(), array.length+1); float[] newArray = new float[ array.length + values.length ]; if ( index != 0 ) { /* Copy up to the length in the array before the index. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, 0, newArray, 0, index ); } boolean lastIndex = index == array.length - 1; int toIndex = index + values.length; int remainingIndex = newArray.length - toIndex; if ( lastIndex ) { /* Copy the area after the insert. Make sure we don't write over the end. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + values.length, remainingIndex ); } else { /* Copy the area after the insert. */ /* src sbegin dst dbegin length of copy */ System.arraycopy( array, index, newArray, index + values.length, remainingIndex ); } for ( int i = index, j = 0; i < toIndex; i++, j++ ) { newArray[ i ] = values[ j ]; } return newArray; } /* End universal methods. */ private static int calculateIndex( float[] array, int originalIndex ) { final int length = array.length; int index = originalIndex; /* Adjust for reading from the right as in -1 reads the 4th element if the length is 5 */ if ( index < 0 ) { index = length + index; } /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ if ( index < 0 ) { index = 0; } if ( index >= length ) { index = length - 1; } return index; } /* End universal methods. */ private static int calculateEndIndex( float[] array, int originalIndex ) { final int length = array.length; int index = originalIndex; /* Adjust for reading from the right as in -1 reads the 4th element if the length is 5 */ if ( index < 0 ) { index = length + index; } /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ /* Bounds check if it is still less than 0, then they have an negative index that is greater than length */ if ( index < 0 ) { index = 0; } if ( index > length ) { index = length; } return index; } /** * Checks to see if two arrays are equals * @param expected expected array * @param got got array * @return true if equal or throws exception if not. */ public static boolean equalsOrDie(float[] expected, float[] got) { if (expected.length != got.length) { Exceptions.die("Lengths did not match, expected length", expected.length, "but got", got.length); } for (int index=0; index< expected.length; index++) { if (expected[index]!= got[index]) { Exceptions.die("value at index did not match index", index, "expected value", expected[index], "but got", got[index]); } } return true; } /** * Checks to see if two arrays are equals * @param expected expected array * @param got got array * @return true if equal or false if not. */ public static boolean equals(float[] expected, float[] got) { if (expected.length != got.length) { return false; } for (int index=0; index< expected.length; index++) { if (expected[index]!= got[index]) { return false; } } return true; } /** Public interface for a very fast reduce by. */ public static interface ReduceBy { double reduce(double sum, float value); } /** * A very fast reduce by. * If performance is your thing, this seems to be as fast a plain for loop when benchmarking with JMH. * * @param array array of items to reduce by * @param reduceBy reduceBy interface * @return the final value */ public static double reduceBy( final float[] array, ReduceBy reduceBy ) { double sum = 0; for ( float v : array ) { sum = reduceBy.reduce(sum, v); } return sum; } /** * * @param array array of items to reduce by * @param start where to start in the array * @param length where to end in the array * @param reduceBy the function to do the reduce by * @return the reduction */ public static double reduceBy( final float[] array, final int start, final int length, ReduceBy reduceBy ) { double sum = 0; for (int index = start; index < length; index++) { float v = array[index]; sum = reduceBy.reduce(sum, v); } return sum; } /** * * @param array array of items to reduce by * @param length where to end in the array * @param reduceBy the function to do the reduce by * @return the reduction */ public static double reduceBy( final float[] array, final int length, ReduceBy reduceBy ) { double sum = 0; for (int index = 0; index < length; index++) { float v = array[index]; sum = reduceBy.reduce(sum, v); } return sum; } /** * Reduce by functional support for int arrays. * @param array array of items to reduce by * @param object object that contains the reduce by function * @param <T> the type of object * @return the final reduction */ public static <T> double reduceBy( final float[] array, T object ) { if (object.getClass().isAnonymousClass()) { return reduceByR(array, object ); } try { ConstantCallSite callSite = Invoker.invokeReducerLongIntReturnLongMethodHandle(object); MethodHandle methodHandle = callSite.dynamicInvoker(); try { double sum = 0; for ( float v : array ) { sum = (double) methodHandle.invokeExact( sum, v ); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } catch (Exception ex) { return reduceByR(array, object); } } /** * Reduce by functional support for int arrays. * @param array array of items to reduce by * @param object object that contains the reduce by function * @param <T> the type of object * @return the final reduction */ public static <T> double reduceBy( final float[] array, T object, String methodName ) { if (object.getClass().isAnonymousClass()) { return reduceByR(array, object, methodName); } try { ConstantCallSite callSite = Invoker.invokeReducerLongIntReturnLongMethodHandle(object, methodName); MethodHandle methodHandle = callSite.dynamicInvoker(); try { double sum = 0; for ( float v : array ) { sum = (double) methodHandle.invokeExact( sum, v ); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } catch (Exception ex) { return reduceByR(array, object, methodName); } } /** * Fallback to reflection if the call-site will not work or did not work * @param array array of items to reduce by * @param object function object * @param <T> type of function object. * @return result */ private static <T> double reduceByR( final float[] array, T object ) { try { Method method = Invoker.invokeReducerLongIntReturnLongMethod(object); double sum = 0; for ( float v : array ) { sum = (double) method.invoke(object, sum, v); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } /** * Reflection based reduce by. * @param array array of items to reduce by * @param object function * @param methodName name of method * @param <T> type of function * @return reduction */ private static <T> double reduceByR( final float[] array, T object, String methodName ) { try { Method method = Invoker.invokeReducerLongIntReturnLongMethod(object, methodName); double sum = 0; for ( float v : array ) { sum = (double) method.invoke(object, sum, v); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } /** * Reflection based reduce by. * @param array array of items to reduce by * @param object function * @param methodName name of method * @param <T> type of function * @return reduction */ private static <T> double reduceByR( final float[] array, int length, T object, String methodName ) { try { Method method = Invoker.invokeReducerLongIntReturnLongMethod(object, methodName); double sum = 0; for (int index=0; index< length; index++) { float v = array[index]; sum = (double) method.invoke(object, sum, v); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } /** * Reflection based reduce by. * @param array array of items to reduce by * @param object function * @param <T> type of function * @return reduction */ private static <T> double reduceByR( final float[] array, int length, T object ) { try { Method method = Invoker.invokeReducerLongIntReturnLongMethod(object); double sum = 0; for (int index=0; index< length; index++) { float v = array[index]; sum = (double) method.invoke(object, sum, v); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } /** * Reduce By * @param array array of items to reduce by * @param length where to end in the array * @param object function * @return reduction */ public static double reduceBy( final float[] array, int length, Object object ) { if (object.getClass().isAnonymousClass()) { return reduceByR(array, length, object ); } try { ConstantCallSite callSite = Invoker.invokeReducerLongIntReturnLongMethodHandle(object ); MethodHandle methodHandle = callSite.dynamicInvoker(); try { double sum = 0; for (int index=0; index < length; index++) { float v = array[index]; sum = (double) methodHandle.invokeExact( sum, v ); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } catch (Exception ex) { return reduceByR(array, length, object ); } } /** * Reduce By * @param array array of items to reduce by * @param length where to end in the array * @param function function * @param functionName functionName * @return reduction */ public static double reduceBy( final float[] array, int length, Object function, String functionName ) { if (function.getClass().isAnonymousClass()) { return reduceByR(array, length, function, functionName ); } try { ConstantCallSite callSite = Invoker.invokeReducerLongIntReturnLongMethodHandle(function, functionName ); MethodHandle methodHandle = callSite.dynamicInvoker(); try { double sum = 0; for (int index=0; index < length; index++) { float v = array[index]; sum = (double) methodHandle.invokeExact( sum, v ); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } catch (Exception ex) { return reduceByR(array, length, function, functionName ); } } /** * Reduce By * @param array array of items to reduce by * @param length where to end in the array * @param object function * @return reduction */ public static double reduceBy( final float[] array, int start, int length, Object object ) { if (object.getClass().isAnonymousClass()) { return reduceByR(array, object ); } try { ConstantCallSite callSite = Invoker.invokeReducerLongIntReturnLongMethodHandle(object ); MethodHandle methodHandle = callSite.dynamicInvoker(); try { double sum = 0; for (int index=start; index < length; index++) { float v = array[index]; sum = (double) methodHandle.invokeExact( sum, v ); } return sum; } catch (Throwable throwable) { return Exceptions.handle(Long.class, throwable, "Unable to perform reduceBy"); } } catch (Exception ex) { return reduceByR(array, object ); } } /** * Checks to see if two values are the same * @param expected expected value * @param got got value * @return true if equal throws exception if not equal */ public static boolean equalsOrDie(float expected, float got) { if (expected != got) { return Exceptions.die(Boolean.class, "Expected was", expected, "but we got ", got); } return true; } /** * Compares two values * @param expected expected value * @param got got value * @return true or false */ public static boolean equals(float expected, float got) { return expected == got; } /** * Sum * Provides overflow protection. * @param values values in int * @return sum */ public static double sum( float[] values ) { return sum( values, 0, values.length); } /** * Sum * Provides overflow protection. * @param values values in int * @return sum */ public static float sum( float[] values, int length ) { return sum( values, 0, length); } /** * Sum * Provides overflow protection. * @param values values in int * @return sum */ public static float sum( float[] values, int start, int length ) { double sum = 0; for (int index = start; index < length; index++ ) { sum+= values[index]; } if (sum < Float.MIN_VALUE) { Exceptions.die("overflow the sum is too small", sum); } if (sum > Float.MAX_VALUE) { Exceptions.die("overflow the sum is too big", sum); } return (float) sum; } /** * Sum * Provides overflow protection. * @param values values in int * @return sum */ public static double bigSum( float[] values ) { return bigSum(values, 0, values.length); } /** * Sum * Provides overflow protection. * @param values values in int * @return sum */ public static double bigSum( float[] values, int length ) { return bigSum(values, 0, length); } /** * Big Sum * @param values values in int * @return sum */ public static double bigSum( float[] values, int start, int length ) { double sum = 0; for (int index = start; index < length; index++ ) { sum+= values[index]; } return sum; } /** * Max * @param values values in int * @return max */ public static float max( float[] values, final int start, final int length ) { float max = Float.MIN_VALUE; for (int index = start; index < length; index++ ) { if ( values[index] > max ) { max = values[index]; } } return max; } /** * max * @param values values in int * @return max */ public static float max( float[] values ) { return max(values, 0, values.length); } /** * max * @param values values in int * @return max */ public static float max( float[] values, int length ) { return max(values, 0, length); } /** * Min * @param values values in int * @return min */ public static float min( float[] values, final int start, final int length ) { float min = Float.MAX_VALUE; for (int index = start; index < length; index++ ) { if (values[index] < min) min = values[index]; } return min; } /** * Min * @param values values in int * @return min */ public static float min( float[] values ) { return min(values, 0, values.length); } /** * Min * @param values values in int * @return min */ public static float min( float[] values, int length ) { return min(values, 0, length); } /** * Average * @param values values in int * @return average */ public static float mean( float[] values, final int start, final int length ) { return (float) meanDouble(values, start, length); } /** * Average * @param values values in int * @return average */ public static float mean( float[] values, final int length ) { return (float) meanDouble(values, 0, length); } /** * Average * @param values values in int * @return average */ public static float mean( float[] values ) { return (float) meanDouble(values, 0, values.length); } /** * Calculate Variance. * * @param values values * @param start start * @param length length * @return variance */ public static float variance(float[] values, final int start, final int length) { return (float) varianceDouble(values, start, length); } private static double meanDouble( float[] values, final int start, final int length ) { double mean = ((double)bigSum(values, start, length))/ ((double) length); return mean; } /** * Calculate Variance. * * @param values values * @param start start * @param length length * @return variance */ public static double varianceDouble(float[] values, final int start, final int length) { double mean = meanDouble(values, start, length); double temp = 0; for(int index = start; index < length; index++) { double a = values[index]; temp += (mean-a)*(mean-a); } return temp / length; } /** * Calculate Variance. * * @param values values * @param length length * @return variance */ public static float variance(float[] values, final int length) { return (float) varianceDouble(values, 0, length); } /** * Calculate Variance. * * @param values values * @return variance */ public static float variance(float[] values) { return (float) varianceDouble(values, 0, values.length); } /** * Calculate standard deviation. * * @param values values * @param start start * @param length length * @return standard deviation */ public static float standardDeviation(float[] values, final int start, final int length) { double variance = varianceDouble(values, start, length); return (float)Math.sqrt(variance); } /** * Calculate standard deviation. * * @param values values * @param length length * @return standard deviation */ public static float standardDeviation(float[] values, final int length) { double variance = varianceDouble(values, 0, length); return (float)Math.sqrt(variance); } /** * Calculate standard deviation. * * @param values values * @return standard deviation */ public static float standardDeviation(float[] values) { double variance = varianceDouble(values, 0, values.length); return (float)Math.sqrt(variance); } /** * Calculate Median * * @param start start * @param values values * @param length length * @return median */ public static float median(float[] values, final int start, final int length) { float[] sorted = new float[length]; System.arraycopy(values, start, sorted, 0, length); Arrays.sort(sorted); if (length % 2 == 0) { int middle = sorted.length / 2; double median = (sorted[middle-1] + sorted[middle]) / 2.0; return (float) median; } else { return sorted[sorted.length / 2]; } } /** * Calculate Median * @param values values * @param length length * @return median */ public static float median(float[] values, final int length) { return median(values, 0, length); } /** * Calculate Median * @param values values * @return median */ public static float median(float[] values) { return median(values, 0, values.length); } /** * Checks to see if two arrays are equals * @param expected expected array * @param got got array * @return true if equal or false if not. */ public static boolean equals(int start, int end, float[] expected, float[] got) { if (expected.length != got.length) { return false; } for (int index=start; index< end; index++) { if (expected[index]!= got[index]) { return false; } } return true; } public static int hashCode(float array[]) { if (array == null) { return 0; } int result = 1; for (float item : array) { int bits = Float.floatToIntBits(item); result = 31 * result + bits; } return result; } public static int hashCode(int start, int end, float array[]) { if (array == null) { return 0; } int result = 1; for (int index=start; index< end; index++) { int bits = Float.floatToIntBits(array[index]); result = 31 * result + bits; } return result; } }
/* * Kodkod -- Copyright (c) 2005-present, Emina Torlak * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package kodkod.util.nodes; import static kodkod.ast.RelationPredicate.Name.ACYCLIC; import static kodkod.ast.RelationPredicate.Name.FUNCTION; import static kodkod.ast.RelationPredicate.Name.TOTAL_ORDERING; import static kodkod.ast.operator.FormulaOperator.AND; import static kodkod.ast.operator.FormulaOperator.IMPLIES; import static kodkod.ast.operator.FormulaOperator.OR; import java.util.Collections; import java.util.EnumMap; import java.util.IdentityHashMap; import java.util.Map; import java.util.Set; import kodkod.ast.BinaryFormula; import kodkod.ast.ComparisonFormula; import kodkod.ast.Comprehension; import kodkod.ast.ConstantExpression; import kodkod.ast.Decl; import kodkod.ast.Decls; import kodkod.ast.ExprToIntCast; import kodkod.ast.Expression; import kodkod.ast.Formula; import kodkod.ast.IfExpression; import kodkod.ast.IfIntExpression; import kodkod.ast.IntComparisonFormula; import kodkod.ast.IntToExprCast; import kodkod.ast.MultiplicityFormula; import kodkod.ast.NaryFormula; import kodkod.ast.Node; import kodkod.ast.NotFormula; import kodkod.ast.QuantifiedFormula; import kodkod.ast.Relation; import kodkod.ast.RelationPredicate; import kodkod.ast.SumExpression; import kodkod.ast.Variable; import kodkod.ast.operator.ExprCastOperator; import kodkod.ast.operator.FormulaOperator; import kodkod.ast.visitor.AbstractDetector; import kodkod.ast.visitor.AbstractVoidVisitor; import kodkod.util.collections.ArrayStack; import kodkod.util.collections.IdentityHashSet; import kodkod.util.collections.Stack; /** * A node annotated with information about * structural sharing in its ast/dag. The class * also provides utility methods for collecting * various information about annotated nodes. * * @specfield node: N // annotated node * @specfield source: node.*components ->one Node // maps the subnodes of this.node to nodes from * // which they were derived by some transformation process * // (e.g. skolemization, predicate inlining) * @author Emina Torlak */ public final class AnnotatedNode<N extends Node> { private final N node; private final Set<Node> sharedNodes; private final Map<? extends Node, ? extends Node> source; /** * Constructs a new annotator for the given node. * @ensures this.node' = node && this.source' = node.*components<:iden */ private AnnotatedNode(N node) { this.node = node; final SharingDetector detector = new SharingDetector(); node.accept(detector); this.sharedNodes = Collections.unmodifiableSet(detector.sharedNodes()); this.source = Collections.emptyMap(); } /** * Constructs a new annotator for the given node and source map. * @ensures this.node' = node && this.source' = node.*components<:iden ++ source */ private AnnotatedNode(N node, Map<? extends Node, ? extends Node> source) { this.node = node; final SharingDetector detector = new SharingDetector(); node.accept(detector); this.sharedNodes = Collections.unmodifiableSet(detector.sharedNodes()); this.source = source; } /** * Returns an annotation for the given node. The source map of the returned annotation object * maps each descendant of the node to itself. * @return { a: AnnotatedNode<N> | a.node = node && a.source = node.*components<:iden } */ public static <N extends Node> AnnotatedNode<N> annotate(N node) { return new AnnotatedNode<N>(node); } /** * Returns an annotation for the given node. The source map of the returned annotation object * maps each descendant of the node to its value in the given source map, or to itself * if the given source map has no value for that descendant. * @return { a: AnnotatedNode<N> | a.node = node && a.source = (node.*components<:iden) ++ source } */ public static <N extends Node> AnnotatedNode<N> annotate(N node, Map<? extends Node, ? extends Node> source) { return new AnnotatedNode<N>(node,source); } /** * Returns an annotation for an n-ary conjunctions of {@linkplain Nodes#roots(Formula) roots} of the given formula. * The source map of the returned annotation object maps each descendant of the node to itself. * The root conjunction itself is mapped to the input formula. * @return { a: AnnotatedNode<Formula> | a.node = Formula.and(Nodes.roots(formula)) && a.source = (node.^components<:iden) + a.node->formula } */ public static AnnotatedNode<Formula> annotateRoots(Formula formula) { final Formula flat = Formula.and(Nodes.roots(formula)); return new AnnotatedNode<Formula>(flat, Collections.singletonMap(flat, formula)); } /** * Returns this.node. * @return this.node */ public final N node() { return node; } /** * Returns the source of the the given descendant of this.node. * @requires n in this.node.*components * @return this.source[n] */ public final Node sourceOf(Node n) { final Node d = source.get(n); return d==null ? n : d; } /** * Returns the set of all non-leaf descendants of this.node that have more than one parent. * @return {n: Node | some n.children && #(n.~components & this.node.*components) > 1 } */ public final Set<Node> sharedNodes() { return sharedNodes; } /** * Returns the set of all relations at the leaves of this annotated node. * @return Relation & this.node.*components */ public final Set<Relation> relations() { final Set<Relation> relations = new IdentityHashSet<Relation>(); final AbstractVoidVisitor visitor = new AbstractVoidVisitor() { private final Set<Node> visited = new IdentityHashSet<Node>(sharedNodes.size()); protected boolean visited(Node n) { return sharedNodes.contains(n) && !visited.add(n); } public void visit(Relation relation) { relations.add(relation); } }; node.accept(visitor); return relations; } /** * Returns true if this.node contains a child whose meaning depends on * integer bounds (i.e. an ExprToIntCast node with SUM operator or an IntToExprCast node or Expression.INTS constant). * @return true if this.node contains a child whose meaning depends on * integer bounds (i.e. an ExprToIntCast node with SUM operator or an IntToExprCast node or Expression.INTS constant). */ public final boolean usesInts() { final AbstractDetector detector = new AbstractDetector(sharedNodes) { public Boolean visit(IntToExprCast expr) { return cache(expr, Boolean.TRUE); } public Boolean visit(ExprToIntCast intExpr) { if (intExpr.op()==ExprCastOperator.CARDINALITY) super.visit(intExpr); return cache(intExpr, Boolean.TRUE); } public Boolean visit(ConstantExpression expr) { return expr==Expression.INTS ? Boolean.TRUE : Boolean.FALSE; } }; return (Boolean)node.accept(detector); } /** * Returns a map of RelationPredicate names to sets of top-level relation predicates with * the corresponding names in this.node. * @return a map of RelationPredicate names to sets of top-level relation predicates with * the corresponding names in this.node. A predicate is considered 'top-level' * if it is a component of the top-level conjunction, if any, of this.node. */ public final Map<RelationPredicate.Name, Set<RelationPredicate>> predicates() { final PredicateCollector collector = new PredicateCollector(sharedNodes); node.accept(collector); return collector.preds; } /** * Returns a Detector that will return TRUE when applied to a descendent * of this.node iff the descendent contains a quantified formula. * @return a Detector that will return TRUE when applied to a descendent * of this.node iff the descendent contains a quantified formula. */ public final AbstractDetector quantifiedFormulaDetector() { return new AbstractDetector(sharedNodes) { public Boolean visit(QuantifiedFormula quantFormula) { return cache(quantFormula, true); } }; } /** * Returns a Detector that will return TRUE when applied to a descendent * of this.node iff the descendent contains a free variable. * @return a Detector that will return TRUE when applied to a descendent * of this.node iff the descendent contains a free variable. */ public final AbstractDetector freeVariableDetector() { return new FreeVariableDetector(sharedNodes); } /** * Returns a string representation of this annotated node. * @return string representation of this annotated node. */ public String toString() { final StringBuilder ret = new StringBuilder(); ret.append("node: "); ret.append(node); ret.append("\nshared nodes: "); ret.append(sharedNodes); ret.append("\nsources: "); ret.append(source); return ret.toString(); } /** * Detects shared non-leaf descendents of a given node. * * @specfield node: Node // node to which the analyzer is applied */ private static final class SharingDetector extends AbstractVoidVisitor { /* maps each internal node with more than one parent to TRUE and all * other internal nodes to FALSE */ final IdentityHashMap<Node,Boolean> sharingStatus; /* @invariant numShareNodes = #sharingStatus.TRUE */ int numSharedNodes; SharingDetector() { sharingStatus = new IdentityHashMap<Node,Boolean>(); } /** * Returns the shared internal nodes of this.node. This method should * be called only after this visitor has been applied to this.node. * @return {n: Node | #(n.~children & node.*children) > 1 } */ IdentityHashSet<Node> sharedNodes() { final IdentityHashSet<Node> shared = new IdentityHashSet<Node>(numSharedNodes); for(Map.Entry<Node,Boolean> entry : sharingStatus.entrySet()) { if (entry.getValue()==Boolean.TRUE) shared.add(entry.getKey()); } return shared; } /** * Records the visit to the given node in the status map. * If the node has not been visited before, it is mapped * to Boolean.FALSE and false is returned. Otherwise, * it is mapped to Boolean.TRUE and true is returned. * The first time a Node is mapped to true, numSharedNodes * is incremented by one. * @ensures no this.shared[node] => this.shared' = this.shared + node->FALSE, * this.shared[node] = FALSE => this.shared' = this.shared + node->TRUE, * this.shared' = this.shared * @return this.shared'[node] */ protected final boolean visited(Node node) { Boolean status = sharingStatus.get(node); if (!Boolean.TRUE.equals(status)) { if (status==null) { status = Boolean.FALSE; } else { // status == Boolean.FALSE status = Boolean.TRUE; numSharedNodes++; } sharingStatus.put(node,status); } return status; } } /** * A visitor that detects free variables of a node. * @author Emina Torlak */ private static final class FreeVariableDetector extends AbstractDetector { /* Holds the variables that are currently in scope, with the * variable at the top of the stack being the last declared variable. */ private final Stack<Variable> varsInScope = new ArrayStack<Variable>(); /** * Constructs a new free variable detector. */ FreeVariableDetector(Set<Node> sharedNodes) { super(sharedNodes); } /** * Visits the given comprehension, quantified formula, or sum expression. * The method returns TRUE if the creator body contains any * variable not bound by the decls; otherwise returns FALSE. */ private Boolean visit(Node creator, Decls decls, Node body) { Boolean ret = lookup(creator); if (ret!=null) return ret; boolean retVal = false; for(Decl decl : decls) { retVal = decl.expression().accept(this) || retVal; varsInScope.push(decl.variable()); } retVal = ((Boolean)body.accept(this)) || retVal; for(int i = decls.size(); i > 0; i--) { varsInScope.pop(); } return cache(creator, retVal); } /** * Returns TRUE if the given variable is free in its parent, otherwise returns FALSE. * @return TRUE if the given variable is free in its parent, otherwise returns FALSE. */ public Boolean visit(Variable variable) { return Boolean.valueOf(varsInScope.search(variable)<0); } public Boolean visit(Decl decl) { Boolean ret = lookup(decl); if (ret!=null) return ret; return cache(decl, decl.expression().accept(this)); } public Boolean visit(Comprehension comprehension) { return visit(comprehension, comprehension.decls(), comprehension.formula()); } public Boolean visit(SumExpression intExpr) { return visit(intExpr, intExpr.decls(), intExpr.intExpr()); } public Boolean visit(QuantifiedFormula qformula) { return visit(qformula, qformula.decls(), qformula.formula()); } } /** * A visitor that detects and collects * top-level relation predicates; i.e. predicates that * are components in the top-level conjunction, if any, on ANY * path starting at the root. */ private static final class PredicateCollector extends AbstractVoidVisitor { protected boolean negated; private final Set<Node> sharedNodes; /* if a given node is not mapped at all, it means that it has not been visited; * if it is mapped to FALSE, it has been visited with negated=FALSE, * if it is mapped to TRUE, it has been visited with negated=TRUE, * if it is mapped to null, it has been visited with both values of negated. */ private final Map<Node,Boolean> visited; /* holds the top level predicates at the the end of the visit*/ final EnumMap<RelationPredicate.Name, Set<RelationPredicate>> preds; /** * Constructs a new collector. * @ensures this.negated' = false */ PredicateCollector(Set<Node> sharedNodes) { this.sharedNodes = sharedNodes; this.visited = new IdentityHashMap<Node,Boolean>(); this.negated = false; preds = new EnumMap<RelationPredicate.Name, Set<RelationPredicate>>(RelationPredicate.Name.class); preds.put(ACYCLIC, new IdentityHashSet<RelationPredicate>(4)); preds.put(TOTAL_ORDERING, new IdentityHashSet<RelationPredicate>(4)); preds.put(FUNCTION, new IdentityHashSet<RelationPredicate>(8)); } /** * Returns true if n has already been visited with the current value of the * negated flag; otherwise returns false. * @ensures records that n is being visited with the current value of the negated flag * @return true if n has already been visited with the current value of the * negated flag; otherwise returns false. */ @Override protected final boolean visited(Node n) { if (sharedNodes.contains(n)) { if (!visited.containsKey(n)) { // first visit visited.put(n, Boolean.valueOf(negated)); return false; } else { final Boolean visit = visited.get(n); if (visit==null || visit==negated) { // already visited with same negated value return true; } else { // already visited with different negated value visited.put(n, null); return false; } } } return false; } /** * Calls visited(comp); comp's children are not top-level formulas * so they are not visited. */ public void visit(Comprehension comp) { visited(comp); } /** * Calls visited(ifexpr); ifexpr's children are not top-level formulas * so they are not visited. */ public void visit(IfExpression ifexpr) { visited(ifexpr); } /** * Calls visited(ifexpr); ifexpr's children are not top-level formulas * so they are not visited. */ public void visit(IfIntExpression ifexpr) { visited(ifexpr); } /** * Calls visited(intComp); intComp's children are not top-level formulas * so they are not visited. */ public void visit(IntComparisonFormula intComp) { visited(intComp); } /** * Calls visited(quantFormula); quantFormula's children are not top-level formulas * so they are not visited. */ public void visit(QuantifiedFormula quantFormula) { visited(quantFormula); } /** * Visits the children of the given formula if it has not been visited already with * the given value of the negated flag and if binFormula.op==IMPLIES && negated or * binFormula.op==AND && !negated or binFormula.op==OR && negated. Otherwise does nothing. * @see kodkod.ast.visitor.AbstractVoidVisitor#visit(kodkod.ast.BinaryFormula) */ public void visit(BinaryFormula binFormula) { if (visited(binFormula)) return; final FormulaOperator op = binFormula.op(); if ((!negated && op==AND) || (negated && op==OR)) { // op==AND || op==OR binFormula.left().accept(this); binFormula.right().accept(this); } else if (negated && op==IMPLIES) { // !(a => b) = !(!a || b) = a && !b negated = !negated; binFormula.left().accept(this); negated = !negated; binFormula.right().accept(this); } } /** * Visits the children of the given formula if it has not been visited already with * the given value of the negated flag and if formula.op==OR && negated or * formula.op==AND && !negated. Otherwise does nothing. * @see kodkod.ast.visitor.AbstractVoidVisitor#visit(kodkod.ast.NaryFormula) */ public void visit(NaryFormula formula) { if (visited(formula)) return; final FormulaOperator op = formula.op(); if ((!negated && op==AND) || (negated && op==OR)) { // op==AND || op==OR for(Formula child : formula) { child.accept(this); } } } /** * Visits the children of the child of the child formula, with * the negation of the current value of the negated flag, * if it has not already been visited * with the current value of this.negated; otherwise does nothing. */ public void visit(NotFormula not) { if (visited(not)) return; negated = !negated; not.formula().accept(this); negated = !negated; } /** * Calls visited(compFormula); compFormula's children are not top-level formulas * so they are not visited. */ public void visit(ComparisonFormula compFormula) { visited(compFormula); } /** * Calls visited(multFormula); multFormula's child is not top-level formulas * so it is not visited. */ public void visit(MultiplicityFormula multFormula) { visited(multFormula); } /** * Records the visit to this predicate if it is not negated. */ public void visit(RelationPredicate pred) { if (visited(pred)) return; if (!negated) { preds.get(pred.name()).add(pred); } } } }
/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package com.lifeweb.enitity; import java.io.Serializable; import java.util.Date; import javax.persistence.Basic; import javax.persistence.Column; import javax.persistence.Entity; import javax.persistence.FetchType; import javax.persistence.GeneratedValue; import javax.persistence.GenerationType; import javax.persistence.Id; import javax.persistence.JoinColumn; import javax.persistence.ManyToOne; import javax.persistence.NamedQueries; import javax.persistence.NamedQuery; import javax.persistence.Table; import javax.persistence.Temporal; import javax.persistence.TemporalType; import javax.xml.bind.annotation.XmlRootElement; /** * * @author Life */ @Entity @Table(name = "banka_fisleri", catalog = "lifev1", schema = "") @XmlRootElement @NamedQueries({ @NamedQuery(name = "BankaFisleri.findAll", query = "SELECT b FROM BankaFisleri b"), @NamedQuery(name = "BankaFisleri.findByFisId", query = "SELECT b FROM BankaFisleri b WHERE b.fisId = :fisId"), @NamedQuery(name = "BankaFisleri.findByFisTarih", query = "SELECT b FROM BankaFisleri b WHERE b.fisTarih = :fisTarih"), @NamedQuery(name = "BankaFisleri.findByYatanPara", query = "SELECT b FROM BankaFisleri b WHERE b.yatanPara = :yatanPara"), @NamedQuery(name = "BankaFisleri.findByCekilenPara", query = "SELECT b FROM BankaFisleri b WHERE b.cekilenPara = :cekilenPara"), @NamedQuery(name = "BankaFisleri.findByFisBelgeno", query = "SELECT b FROM BankaFisleri b WHERE b.fisBelgeno = :fisBelgeno"), @NamedQuery(name = "BankaFisleri.findByFisAciklama", query = "SELECT b FROM BankaFisleri b WHERE b.fisAciklama = :fisAciklama"), @NamedQuery(name = "BankaFisleri.findByFisOzelkod", query = "SELECT b FROM BankaFisleri b WHERE b.fisOzelkod = :fisOzelkod"), @NamedQuery(name = "BankaFisleri.findByIsaret", query = "SELECT b FROM BankaFisleri b WHERE b.isaret = :isaret"), @NamedQuery(name = "BankaFisleri.findBySysEkleyen", query = "SELECT b FROM BankaFisleri b WHERE b.sysEkleyen = :sysEkleyen"), @NamedQuery(name = "BankaFisleri.findBySysEtarih", query = "SELECT b FROM BankaFisleri b WHERE b.sysEtarih = :sysEtarih"), @NamedQuery(name = "BankaFisleri.findBySysDuzelten", query = "SELECT b FROM BankaFisleri b WHERE b.sysDuzelten = :sysDuzelten"), @NamedQuery(name = "BankaFisleri.findBySysDtarih", query = "SELECT b FROM BankaFisleri b WHERE b.sysDtarih = :sysDtarih")}) public class BankaFisleri implements Serializable { private static final long serialVersionUID = 1L; @Id @GeneratedValue(strategy = GenerationType.IDENTITY) @Basic(optional = false) @Column(name = "FIS_ID", nullable = false) private Integer fisId; @Column(name = "FIS_TARIH") @Temporal(TemporalType.DATE) private Date fisTarih; // @Max(value=?) @Min(value=?)//if you know range of your decimal fields consider using these annotations to enforce field validation @Column(name = "YATAN_PARA", precision = 15, scale = 2) private Double yatanPara; @Column(name = "CEKILEN_PARA", precision = 15, scale = 2) private Double cekilenPara; @Column(name = "FIS_BELGENO", length = 20) private String fisBelgeno; @Column(name = "FIS_ACIKLAMA", length = 80) private String fisAciklama; @Column(name = "FIS_OZELKOD", length = 20) private String fisOzelkod; private Short isaret; @Column(name = "SYS_EKLEYEN", length = 15) private String sysEkleyen; @Column(name = "SYS_ETARIH") private Integer sysEtarih; @Column(name = "SYS_DUZELTEN", length = 15) private String sysDuzelten; @Column(name = "SYS_DTARIH") private Integer sysDtarih; @JoinColumn(name = "BANKA_REF", referencedColumnName = "BANKA_ADI") @ManyToOne(fetch = FetchType.EAGER) private Bankalar bankaRef; public BankaFisleri() { } public BankaFisleri(Integer fisId) { this.fisId = fisId; } public Integer getFisId() { return fisId; } public void setFisId(Integer fisId) { this.fisId = fisId; } public Date getFisTarih() { return fisTarih; } public void setFisTarih(Date fisTarih) { this.fisTarih = fisTarih; } public Double getYatanPara() { return yatanPara; } public void setYatanPara(Double yatanPara) { this.yatanPara = yatanPara; } public Double getCekilenPara() { return cekilenPara; } public void setCekilenPara(Double cekilenPara) { this.cekilenPara = cekilenPara; } public String getFisBelgeno() { return fisBelgeno; } public void setFisBelgeno(String fisBelgeno) { this.fisBelgeno = fisBelgeno; } public String getFisAciklama() { return fisAciklama; } public void setFisAciklama(String fisAciklama) { this.fisAciklama = fisAciklama; } public String getFisOzelkod() { return fisOzelkod; } public void setFisOzelkod(String fisOzelkod) { this.fisOzelkod = fisOzelkod; } public Short getIsaret() { return isaret; } public void setIsaret(Short isaret) { this.isaret = isaret; } public String getSysEkleyen() { return sysEkleyen; } public void setSysEkleyen(String sysEkleyen) { this.sysEkleyen = sysEkleyen; } public Integer getSysEtarih() { return sysEtarih; } public void setSysEtarih(Integer sysEtarih) { this.sysEtarih = sysEtarih; } public String getSysDuzelten() { return sysDuzelten; } public void setSysDuzelten(String sysDuzelten) { this.sysDuzelten = sysDuzelten; } public Integer getSysDtarih() { return sysDtarih; } public void setSysDtarih(Integer sysDtarih) { this.sysDtarih = sysDtarih; } public Bankalar getBankaRef() { return bankaRef; } public void setBankaRef(Bankalar bankaRef) { this.bankaRef = bankaRef; } @Override public int hashCode() { int hash = 0; hash += (fisId != null ? fisId.hashCode() : 0); return hash; } @Override public boolean equals(Object object) { // TODO: Warning - this method won't work in the case the id fields are not set if (!(object instanceof BankaFisleri)) { return false; } BankaFisleri other = (BankaFisleri) object; if ((this.fisId == null && other.fisId != null) || (this.fisId != null && !this.fisId.equals(other.fisId))) { return false; } return true; } @Override public String toString() { return "com.lifeweb.enitity.BankaFisleri[ fisId=" + fisId + " ]"; } }
/* * Copyright (C) 2014, Thomas Obenaus. All rights reserved. * Licensed under the New BSD License (3-clause lic) * See attached license-file. * * Author: Thomas Obenaus * EMail: obenaus.thomas@gmail.com * Project: LogFileViewer */ package thobe.logfileviewer.kernel.plugin; import java.io.File; import java.io.FilenameFilter; import java.io.IOException; import java.lang.reflect.Modifier; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.jar.JarEntry; import java.util.jar.JarFile; import java.util.logging.Logger; import thobe.logfileviewer.kernel.LogFileViewerConfiguration; import thobe.logfileviewer.kernel.memory.IMemoryWatchable; import thobe.logfileviewer.kernel.preferences.PluginManagerPrefs; import thobe.logfileviewer.plugin.Plugin; import thobe.logfileviewer.plugin.PluginApiVersion; import thobe.logfileviewer.plugin.api.IConsole; import thobe.logfileviewer.plugin.api.IPlugin; import thobe.logfileviewer.plugin.api.IPluginAccess; import thobe.logfileviewer.plugin.api.IPluginApiVersion; import thobe.logfileviewer.plugin.api.IPluginUI; /** * @author Thomas Obenaus * @source PluginManager.java * @date May 31, 2014 */ public class PluginManager implements IPluginAccess, IMemoryWatchable { private static final String NAME = "thobe.logfileviewer.kernel.PluginManager"; private Map<String, IPlugin> plugins; private Map<String, IPlugin> incompatiblePlugins; private Logger log; private PluginManagerPrefs prefs; private File pluginDirectory; public PluginManager( PluginManagerPrefs prefs, File pluginDirectory ) { this.prefs = prefs; this.pluginDirectory = pluginDirectory; this.log = Logger.getLogger( NAME ); this.plugins = new HashMap<>( ); this.incompatiblePlugins = new HashMap<>( ); } public String getPluginDirectory( ) { return ( pluginDirectory != null ? this.pluginDirectory.getAbsolutePath( ) : "N/A" ); } public PluginApiVersion getPluginApiVersion( ) { return new PluginApiVersion( ); } private void checkPluginDirectory( ) throws PluginManagerException { final File def = LogFileViewerConfiguration.getDefaultPluginDir( ); if ( this.pluginDirectory == null ) { LOG( ).warning( "No plugin-directory set, trying the default one ('" + def.getAbsolutePath( ) + "')." ); this.pluginDirectory = def; } else if ( !this.pluginDirectory.exists( ) ) { LOG( ).warning( "Plugin-directory '" + this.pluginDirectory.getAbsolutePath( ) + "' does not exist, trying the default one ('" + def.getAbsolutePath( ) + "')." ); this.pluginDirectory = def; } else if ( !this.pluginDirectory.canRead( ) ) { LOG( ).warning( "Plugin-directory '" + this.pluginDirectory.getAbsolutePath( ) + "' is not readable, trying the default one ('" + def.getAbsolutePath( ) + "')." ); this.pluginDirectory = def; } if ( ( this.pluginDirectory == null ) || ( !this.pluginDirectory.canRead( ) ) || ( !this.pluginDirectory.exists( ) ) ) { String msg = "Unable to use given plugin-directory (" + ( ( this.pluginDirectory != null ) ? "'" + this.pluginDirectory.getAbsolutePath( ) + "'" : "Not set, its null" ) + ") since it is not readable or does not exsit."; LOG( ).severe( msg ); throw new PluginManagerException( msg ); }// if ( ( this.pluginDirectory == null ) || ( !this.pluginDirectory.canRead( ) ) || ( !this.pluginDirectory.exists( ) ) ) } @SuppressWarnings ( "unchecked") public void findAndRegisterPlugins( ) throws PluginManagerException { // determine/ find plugin-directory, throws an exception if it can't be found this.checkPluginDirectory( ); LOG( ).info( "0. Looking for plugins in '" + this.pluginDirectory.getAbsolutePath( ) + "'" ); File[] plugins = this.pluginDirectory.listFiles( new FilenameFilter( ) { @Override public boolean accept( File dir, String name ) { if ( name == null ) return false; if ( name.trim( ).isEmpty( ) ) return false; if ( !name.matches( ".*\\.jar$" ) ) return false; return true; } } ); List<JarFile> jarFiles = new ArrayList<JarFile>( ); // 1. Now build the ULR array for loading the jars to class-path LOG( ).info( "1. Now build the ULR array for loading the jars to class-path (" + plugins.length + ")" ); URL[] pluginUrls = new URL[plugins.length]; for ( int i = 0; i < plugins.length; ++i ) { try { jarFiles.add( new JarFile( plugins[i] ) ); pluginUrls[i] = plugins[i].toURI( ).toURL( ); LOG( ).info( "\t'" + pluginUrls[i] + "' will be added to class-path." ); } catch ( MalformedURLException e ) { LOG( ).severe( "Unable to add '" + pluginUrls[i] + "' to class-path." ); } catch ( IOException e ) { LOG( ).severe( "Unable to create jar-file from '" + plugins[i] + "' (this one won't be available at class-path)." ); } }// for ( int i = 0; i < plugins.length; ++i ) // 2. Now find the plugins. ClassLoader appClassLoader = PluginManager.class.getClassLoader( ); Set<Class<? extends IPlugin>> pluginClasses = new HashSet<Class<? extends IPlugin>>( ); LOG( ).info( "2. Now find the plugins." ); for ( JarFile jarFile : jarFiles ) { PluginClassLoader pluginClassLoader = null; try { pluginClassLoader = new PluginClassLoader( appClassLoader, jarFile ); } catch ( IOException | URISyntaxException e1 ) { LOG( ).severe( "Unable to load classes from jar-file '" + jarFile.getName( ) + "' (the corresponding plugin won't be loaded): " + e1.getLocalizedMessage( ) ); continue; } Enumeration<JarEntry> entries = jarFile.entries( ); Class<? extends IPlugin> pluginClass = null; while ( entries.hasMoreElements( ) ) { JarEntry entry = entries.nextElement( ); String name = entry.getName( ); int extIndex = name.lastIndexOf( ".class" ); if ( extIndex > 0 ) { String plainClassName = name.substring( 0, extIndex ); plainClassName = plainClassName.replaceAll( "/", "." ); try { Class<?> classToLoad = pluginClassLoader.loadClass( plainClassName ); // check if it is a plugin final boolean bImplementsIPlugin = IPlugin.class.isAssignableFrom( classToLoad ); final boolean bIsAbstract = Modifier.isAbstract( classToLoad.getModifiers( ) ); final boolean bIsInterface = classToLoad.isInterface( ); if ( bImplementsIPlugin && !bIsAbstract && !bIsInterface ) { LOG( ).info( "\tPlugin found: '" + classToLoad.getName( ) + "'" ); pluginClass = ( Class<? extends Plugin> ) classToLoad; } } catch ( NoClassDefFoundError e ) { LOG( ).warning( "\tUnable to load class " + name + " using '" + plainClassName + "': NoClassDefFoundError '" + e.getLocalizedMessage( ) + "'" ); } catch ( ClassNotFoundException e ) { LOG( ).warning( "\tUnable to load class " + name + " using '" + plainClassName + "': ClassNotFoundException '" + e.getLocalizedMessage( ) + "'" ); } }// if ( extIndex > 0 ) }// while ( entries.hasMoreElements( ) ) // extract the version file and write it into a temp-folder if ( pluginClass != null ) { pluginClasses.add( pluginClass ); LOG( ).info( "\tPlugin seems to be valid '" + pluginClass.getName( ) + "'." ); }// if ( pluginClass != null && versionFileStr != null ) else { LOG( ).warning( "\tPlugin '" + pluginClass + "' ignored." ); } }// for ( JarFile jarFile : jarFiles ) IPluginApiVersion apiVersionOfLogFileViewer = new PluginApiVersion( ); // 3. Now register the plugins. LOG( ).info( "3. Now register the plugins (" + pluginClasses.size( ) + "), api of plugin-api of LogFileViewer=" + apiVersionOfLogFileViewer ); for ( Class<? extends IPlugin> pluginClass : pluginClasses ) { try { IPlugin plugin = pluginClass.newInstance( ); IPluginApiVersion apiVersionOfPlugin = plugin.getPluginApiVersion( ); if ( !apiVersionOfLogFileViewer.isCompatible( apiVersionOfPlugin ) ) { this.incompatiblePlugins.put( plugin.getPluginName( ), plugin ); LOG( ).warning( "\tPlugin '" + pluginClass.getSimpleName( ) + "' will be ignored. API-missmatch: ApiOfLogFileViewer='" + apiVersionOfLogFileViewer + "' apiOfPlugin='" + apiVersionOfPlugin + "'" ); }// if ( !apiVersionOfLogFileViewer.isCompatible( pluginApiOfPlugin ) ) else { boolean pluginEnabled = prefs.isPluginEnabled( plugin.getPluginName( ) ); plugin.setEnabled( pluginEnabled ); this.registerPlugin( plugin ); LOG( ).info( "\tPlugin '" + plugin.getPluginName( ) + "' sucessfully registered [plugin api: " + apiVersionOfPlugin + ", plugin-api of LogFileViewer: " + apiVersionOfLogFileViewer + "], the plugin is " + ( plugin.isEnabled( ) ? "enabled" : "disabled" ) ); }// if ( !apiVersionOfLogFileViewer.isCompatible( pluginApiOfPlugin ) ) ... else ... } catch ( InstantiationException | IllegalAccessException e ) { LOG( ).severe( "\tError creating plugin: " + e.getLocalizedMessage( ) ); } catch ( NoClassDefFoundError e ) { LOG( ).severe( "\tError creating plugin '" + pluginClass + "' (NoClassDefFoundError): " + e.getLocalizedMessage( ) ); } }// for ( Class<? extends Plugin> pluginClass : pluginClasses ) } public void registerPlugin( IPlugin plugin ) { this.plugins.put( plugin.getPluginName( ), plugin ); } public void unregisterPlugin( IPlugin plugin ) { this.plugins.remove( plugin ); } public void unregisterAllPlugins( ) { this.plugins.clear( ); } public Map<String, IPlugin> getPlugins( ) { return plugins; } public Map<String, IPlugin> getIncompatiblePlugins( ) { return incompatiblePlugins; } @Override public IPlugin getPlugin( String pluginName ) { return this.plugins.get( pluginName ); } @Override public boolean hasPlugin( String pluginName ) { return this.plugins.containsKey( pluginName ); } public void freeMemory( ) { synchronized ( this.plugins ) { for ( Entry<String, IPlugin> entry : this.plugins.entrySet( ) ) { IPlugin plugin = entry.getValue( ); long memBeforeFree = plugin.getMemory( ); entry.getValue( ).freeMemory( ); long memAfterFree = plugin.getMemory( ); if ( ( memBeforeFree != 0 ) && ( memBeforeFree <= memAfterFree ) ) { LOG( ).warning( "Plugin '" + plugin.getPluginName( ) + "' failed to free memory (remaining: " + ( memAfterFree / 1024f / 1024f ) + "MB)" ); }// if ( ( memBeforeFree != 0 ) && ( memBeforeFree <= memAfterFree ) ). }// for ( Entry<String, Plugin> entry : this.plugins.entrySet( ) ). }// synchronized ( this.plugins ) . } protected Logger LOG( ) { return this.log; } @Override public IConsole getConsole( ) { IConsole console = null; for ( Map.Entry<String, IPlugin> entry : this.plugins.entrySet( ) ) { if ( entry.getValue( ) instanceof IConsole ) { console = ( IConsole ) entry.getValue( ); } } return console; } @Override public Set<IPluginUI> getPluginsNotAttachedToGui( ) { Set<IPluginUI> result = new HashSet<>( ); for ( Map.Entry<String, IPlugin> entry : this.plugins.entrySet( ) ) { IPluginUI uiEntry = ( IPluginUI ) entry.getValue( ); if ( !uiEntry.isAttachedToGUI( ) && entry.getValue( ).isEnabled( ) ) { result.add( uiEntry ); }// if ( !entry.getValue( ).isAttachedToGUI( ) && entry.getValue( ).isEnabled( ) ) }// for ( Map.Entry<String, Plugin> entry : this.plugins.entrySet( ) ) return result; } @Override public long getMemory( ) { long completeMemory = 0; for ( Entry<String, IPlugin> entry : this.getPlugins( ).entrySet( ) ) { IPlugin plugin = entry.getValue( ); completeMemory += plugin.getMemory( ); }// for ( Entry<String, Plugin> entry : this.getPlugins( ).entrySet( ) ) . return completeMemory; } @Override public String getNameOfMemoryWatchable( ) { return NAME; } public PluginManagerPrefs getPrefs( ) { return prefs; } }
/* Copyright 2014 Shahriyar Amini Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.cmuchimps.gort.modules.appview; import java.beans.PropertyChangeEvent; import java.beans.PropertyChangeListener; import java.util.List; import javax.persistence.EntityManager; import javax.swing.JTable; import org.cmuchimps.gort.api.gort.GestureCollection; import org.cmuchimps.gort.api.gort.GortDatabaseService; import org.cmuchimps.gort.modules.dataobject.App; import org.cmuchimps.gort.modules.dataobject.GortEntityManager; import org.cmuchimps.gort.modules.dataobject.Receiver; import org.cmuchimps.gort.modules.tablewidgets.StringTableModel; import org.netbeans.api.project.Project; import org.netbeans.api.settings.ConvertAsProperties; import org.openide.awt.ActionID; import org.openide.awt.ActionReference; import org.openide.filesystems.FileObject; import org.openide.windows.TopComponent; import org.openide.util.NbBundle.Messages; import org.openide.util.WeakListeners; /** * Top component which displays something. */ @ConvertAsProperties( dtd = "-//org.cmuchimps.gort.modules.appview//Receivers//EN", autostore = false) @TopComponent.Description( preferredID = "ReceiversTopComponent", //iconBase="SET/PATH/TO/ICON/HERE", persistenceType = TopComponent.PERSISTENCE_ALWAYS) @TopComponent.Registration(mode = "output", openAtStartup = false) @ActionID(category = "Window", id = "org.cmuchimps.gort.modules.appview.ReceiversTopComponent") @ActionReference(path = "Menu/Window" /*, position = 333 */) @TopComponent.OpenActionRegistration( displayName = "#CTL_ReceiversAction", preferredID = "ReceiversTopComponent") @Messages({ "CTL_ReceiversAction=Receivers", "CTL_ReceiversTopComponent=Receivers", "HINT_ReceiversTopComponent=This is a Receivers window" }) public final class ReceiversTopComponent extends TopComponent implements PropertyChangeListener { private static final String[] HEADERS = {"Name"}; private StringTableModel model; public ReceiversTopComponent() { initComponents(); setName(Bundle.CTL_ReceiversTopComponent()); setToolTipText(Bundle.HINT_ReceiversTopComponent()); model = new StringTableModel(HEADERS); receiversTableScrollPane.setModel(model); receiversTableScrollPane.setTable(new JTable()); // Add a listener to this so that we can update TopComponent.Registry reg = TopComponent.getRegistry(); reg.addPropertyChangeListener(WeakListeners.propertyChange(this, reg)); } /** * This method is called from within the constructor to initialize the form. * WARNING: Do NOT modify this code. The content of this method is always * regenerated by the Form Editor. */ // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { receiversTableScrollPane = new org.cmuchimps.gort.modules.tablewidgets.TableScrollPane(); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this); this.setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(receiversTableScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 388, Short.MAX_VALUE) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addContainerGap() .addComponent(receiversTableScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 288, Short.MAX_VALUE) .addContainerGap()) ); }// </editor-fold>//GEN-END:initComponents // Variables declaration - do not modify//GEN-BEGIN:variables private org.cmuchimps.gort.modules.tablewidgets.TableScrollPane receiversTableScrollPane; // End of variables declaration//GEN-END:variables @Override public void componentOpened() { GestureCollection.getInstance().topComponentOpened(this.getClass()); super.componentOpened(); } @Override public void componentClosed() { GestureCollection.getInstance().topComponentClosed(this.getClass()); super.componentClosed(); } @Override protected void componentShowing() { GestureCollection.getInstance().topComponentShowing(this.getClass()); super.componentShowing(); } @Override protected void componentHidden() { GestureCollection.getInstance().topComponentHidden(this.getClass()); super.componentHidden(); } @Override protected void componentActivated() { GestureCollection.getInstance().topComponentActivated(this.getClass()); super.componentActivated(); //To change body of generated methods, choose Tools | Templates. } @Override protected void componentDeactivated() { GestureCollection.getInstance().topComponentDeactivated(this.getClass()); super.componentDeactivated(); //To change body of generated methods, choose Tools | Templates. } void writeProperties(java.util.Properties p) { // better to version settings since initial version as advocated at // http://wiki.apidesign.org/wiki/PropertyFiles p.setProperty("version", "1.0"); // TODO store your settings } void readProperties(java.util.Properties p) { String version = p.getProperty("version"); // TODO read your settings according to their version } @Override public void propertyChange(PropertyChangeEvent evt) { if (evt == null) { return; } if (!TopComponent.Registry.PROP_ACTIVATED.equals(evt.getPropertyName())) { return; } update(); } private void clear() { if (model != null) { model.setNoData(); model.fireTableDataChanged(); } } private void update() { TopComponent activated = TopComponent.getRegistry().getActivated(); if (activated == null || !(activated instanceof AppViewCloneableTopComponent)) { return; } System.out.println("Updating app receivers"); AppViewCloneableTopComponent appView = (AppViewCloneableTopComponent) activated; Project project = appView.getProject(); FileObject fo = appView.getFileObject(); clear(); if (project == null || fo == null) { return; } GortDatabaseService gds = project.getLookup().lookup(GortDatabaseService.class); if (gds == null) { return; } GortEntityManager gem = gds.getGortEntityManager(); if (gem == null) { return; } EntityManager em = gem.getEntityManager(); try { App app = gem.selectApp(em, fo.getNameExt()); List<Receiver> receivers = app.getReceivers(); String[][] data = new String[receivers.size()][1]; if (receivers != null) { if (receivers.size() > 0) { int index = 0; for (Receiver r : receivers) { data[index][0] = r.getName(); index++; } model.setData(data); model.fireTableDataChanged(); } else { System.out.println("App has no associated receivers: " + app.getName()); } } } catch (Exception e) { e.printStackTrace(); } finally { GortEntityManager.closeEntityManager(em); } } }
package redhat.jee_migration_example.data.itemInventory; import static javax.ejb.ConcurrencyManagementType.BEAN; import static javax.ejb.TransactionAttributeType.REQUIRED; import static javax.ejb.TransactionManagementType.CONTAINER; import java.util.Collection; import java.util.List; import java.util.Map; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.annotation.Resource; import javax.ejb.ConcurrencyManagement; import javax.ejb.LocalBean; import javax.ejb.Startup; import javax.ejb.Stateful; import javax.ejb.TransactionAttribute; import javax.ejb.TransactionManagement; import javax.inject.Inject; import javax.transaction.TransactionSynchronizationRegistry; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import redhat.jee_migration_example.EventLoggerContext; import redhat.jee_migration_example.Item; import redhat.jee_migration_example.util.JeeMigrationExampleFixture; import common.jmx.MBeanUtil; import common.tx.state.AbstractStateManager; @Startup @Stateful @LocalBean @ConcurrencyManagement(BEAN) @TransactionManagement(CONTAINER) @TransactionAttribute(REQUIRED) public class ItemInventoryManager extends AbstractStateManager<ItemInventoryState> implements ItemInventoryManagerMBean { private static final Log log = LogFactory.getLog(ItemInventoryManager.class); @Inject private EventLoggerContext eventLoggerContext; @Inject private ItemInventory itemInventory; @Inject private ItemInventoryProcessor stateProcessor; @Resource private TransactionSynchronizationRegistry transactionSynchronizationRegistry; public ItemInventoryManager() { //nothing for now } public ItemInventory getItemInventory() { return itemInventory; } public void setItemInventory(ItemInventory itemInventory) { this.itemInventory = itemInventory; } public ItemInventoryProcessor getStateProcessor() { return stateProcessor; } public void setStateProcessor(ItemInventoryProcessor stateProcessor) { this.stateProcessor = stateProcessor; } @Override public String getName() { return "ItemInventoryManager"; } @PostConstruct public void registerWithJMX() { MBeanUtil.registerMBean(this, MBEAN_NAME); } @PreDestroy public void unregisterWithJMX() { MBeanUtil.unregisterMBean(MBEAN_NAME); } public ItemInventoryState createState() { return new ItemInventoryState(); } public ItemInventoryState resetState() { ItemInventoryState state = createState(); return state; } public void updateState() { //TODO if this fails then we need to mark global TX as rollback only updateState(stateProcessor); } public boolean saveState(ItemInventoryState state) { try { itemInventory.addToItemStore(state.getAllItemStoreAsMap()); return true; } catch (Exception e) { return false; } } public void commitState() { stateProcessor.updateState(currentState); } @Override public void clearContext() { itemInventory.clearContext(); } @Override public List<Item> getAllItemStore() { if (eventLoggerContext.isGlobalTransactionActive()) return currentState.getAllItemStore(); return itemInventory.getAllItemStore(); } @Override public Map<String, Item> getAllItemStoreAsMap() { if (eventLoggerContext.isGlobalTransactionActive()) return currentState.getAllItemStoreAsMap(); return itemInventory.getAllItemStoreAsMap(); } @Override public Item getFromItemStore(Long id) { if (eventLoggerContext.isGlobalTransactionActive()) return currentState.getFromItemStore(id); return itemInventory.getFromItemStore(id); } @Override public Item getFromItemStore(String key) { if (eventLoggerContext.isGlobalTransactionActive()) return currentState.getFromItemStore(key); return itemInventory.getFromItemStore(key); } // @Override // public List<Item> getFromItemStore(Collection<Long> itemIds) { // if (eventLoggerContext.isGlobalTransactionActive()) // return currentState.getFromItemStore(itemIds); // return itemInventory.getFromItemStore(itemIds); // } @Override public Long addToItemStore(String key, Item item) { if (eventLoggerContext.isGlobalTransactionActive()) { stateProcessor.addToPendingItemStore(key, item); return null; } else { return itemInventory.addToItemStore(key, item); } } // @Override // public List<Long> addToItemStore(Collection<Item> itemList) { // if (eventLoggerContext.isGlobalTransactionActive()) { // Map<String, Item> itemMap = JeeMigrationExampleFixture.createMap_Item(itemList); // stateProcessor.addToPendingItemStore(itemMap); // return null; // } else { // return itemInventory.addToItemStore(itemList); // } // } @Override public List<Long> addToItemStore(Map<String, Item> itemMap) { if (eventLoggerContext.isGlobalTransactionActive()) { stateProcessor.addToPendingItemStore(itemMap); return null; } else { return itemInventory.addToItemStore(itemMap); } } @Override public void removeAllItemStore() { if (eventLoggerContext.isGlobalTransactionActive()) stateProcessor.removeAllPendingItemStore(); else itemInventory.removeAllItemStore(); } @Override public void removeFromItemStore(Long id) { if (eventLoggerContext.isGlobalTransactionActive()) stateProcessor.removeFromPendingItemStore(id); else itemInventory.removeFromItemStore(id); } @Override public void removeFromItemStore(String key) { if (eventLoggerContext.isGlobalTransactionActive()) stateProcessor.removeFromPendingItemStore(key); else itemInventory.removeFromItemStore(key); } @Override public void removeFromItemStore(Map<String, Item> itemMap) { if (eventLoggerContext.isGlobalTransactionActive()) stateProcessor.removeFromPendingItemStore(itemMap); else itemInventory.removeFromItemStore(itemMap); } }
/* * Copyright (c) 2008, 2012 Oracle and/or its affiliates. * All rights reserved. Use is subject to license terms. * * This file is available and licensed under the following license: * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * - Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * - Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the distribution. * - Neither the name of Oracle Corporation nor the names of its * contributors may be used to endorse or promote products derived * from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package ensemble.sampleproject; import ensemble.util.Utils; import java.io.*; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.ZipEntry; import java.util.zip.ZipInputStream; import javafx.scene.Node; /** * Helper class to build a Netbeans project for a sample and open it in Netbeans */ public class SampleProjectBuilder { private static final Pattern findPackage = Pattern.compile("[ \\t]*package[ \\t]*([^;]*);\\s*"); private static final Pattern findMultilineComment = Pattern.compile("\\/\\*(.*?)\\*\\/\\s*",Pattern.DOTALL); private static final Pattern findRemoveMeBlock = Pattern.compile("\\s+//\\s+REMOVE ME.*?END REMOVE ME",Pattern.DOTALL); private static final Pattern findEnsembleImport = Pattern.compile("\\nimport ensemble.*?;"); private static final Pattern findSuperCall = Pattern.compile("super\\(\\s*(\\d+)\\s*,\\s*(\\d+)\\s*\\)\\s*;"); private static final Pattern findPlayMethod = Pattern.compile("public\\s+void\\s+play\\s*\\(\\)\\s*\\{"); private static final Pattern findCreate3DContentMethod = Pattern.compile("public\\s+Node\\s+create3dContent\\s*\\(\\)\\s*\\{"); public static String getClassName(String urlToSampleJavaFile) { return urlToSampleJavaFile.substring( urlToSampleJavaFile.lastIndexOf('/')+1, urlToSampleJavaFile.lastIndexOf('.')); } /** * Convert source from the form we write in Ensemble to a standalone runnable application * * @param urlToSampleJavaFile Url to the sample java source file * @return The converted source code as a string */ public static String loadAndConvertSampleCode(String urlToSampleJavaFile) { // load source file String source = Utils.loadFile(urlToSampleJavaFile); // check if 3d sample final boolean is3D = source.contains("Sample3D"); // remove comments and package statement source = findMultilineComment.matcher(source).replaceFirst(""); source = findEnsembleImport.matcher(source).replaceAll(""); source = findPackage.matcher(source).replaceFirst(""); // remove REMOVE ME ... END REMOVE ME blocks source = findRemoveMeBlock.matcher(source).replaceAll(""); // add imports source = "import javafx.application.Application;\n" + "import javafx.scene.Group;\n" + "import javafx.scene.Scene;\n" + "import javafx.stage.Stage;\n" + source; if (is3D) { source = "import javafx.scene.transform.Rotate;\n" + "import javafx.scene.PerspectiveCamera;\n" + "import javafx.scene.transform.Translate;\n" + source; } // add 2 lines of copyright source = "/**\n" + " * Copyright (c) 2008, 2012 Oracle and/or its affiliates.\n" + " * All rights reserved. Use is subject to license terms.\n" + " */\n" + source; // make extends Application source = source.replaceAll("extends Sample(3D)?", "extends Application"); // change constructor to init method String className = getClassName(urlToSampleJavaFile); source = source.replaceAll("public "+className+"\\(\\) \\{", "private void init(Stage primaryStage) {"); // make getChildren() add to root int firstClass = source.indexOf(" class "); ///System.out.println("firstClass = " + firstClass); int secondClass = source.indexOf(" class ",firstClass+1); ///System.out.println("secondClass = " + secondClass); if (secondClass != -1) { source = source.substring(0,secondClass) .replaceAll("(\\s+)getChildren\\(\\)\\.add", "$1root.getChildren().add") .replaceAll("(\\s+)setOn", "$1root.setOn") + source.substring(secondClass); } else { source = source .replaceAll("(\\s+)getChildren\\(\\)\\.add", "$1root.getChildren().add") .replaceAll("(\\s+)setOn", "$1root.setOn"); } // add scene creation Matcher superCallMatcher = findSuperCall.matcher(source); if (superCallMatcher.find()) { final String width = superCallMatcher.group(1); final String height = superCallMatcher.group(2); final String extrasFor3D = is3D ? "\n" + " primaryStage.getScene().setCamera(new PerspectiveCamera());\n" + " root.getTransforms().addAll(\n" + " new Translate("+width+" / 2, "+height+" / 2),\n" + " new Rotate(180, Rotate.X_AXIS)\n" + " );\n" + " root.getChildren().add(create3dContent());" : ""; source = superCallMatcher.replaceFirst("Group root = new Group();\n" + " primaryStage.setResizable(false);\n"+ " primaryStage.setScene(new Scene(root, "+ superCallMatcher.group(1)+","+superCallMatcher.group(2)+(is3D?",true":"")+"));" +extrasFor3D); int lastCloseBrace = source.lastIndexOf('}'); source = source.substring(0,lastCloseBrace) + "\n public double getSampleWidth() { return "+superCallMatcher.group(1)+"; }\n" + "\n public double getSampleHeight() { return "+superCallMatcher.group(2)+"; }\n" + source.substring(lastCloseBrace); } else { source = source.replaceAll("init\\(Stage primaryStage\\) \\{","init(Stage primaryStage) {\n" + " Group root = new Group();\n" + " primaryStage.setScene(new Scene(root));"); } // remove @override from playMethod boolean hasPlayMethod = findPlayMethod.matcher(source).find(); if (hasPlayMethod) source = source.replaceAll("@Override public void play\\(\\)","public void play()"); // remove @override from playMethod boolean hasContent3DMethod = findCreate3DContentMethod.matcher(source).find(); if (hasContent3DMethod) source = source.replaceAll("@Override public Node create3dContent\\(\\)","public Node create3dContent()"); // add main and start methods int lastCloseBrace = source.lastIndexOf('}'); source = source.substring(0,lastCloseBrace) + "\n @Override public void start(Stage primaryStage) throws Exception {\n" + " init(primaryStage);\n" + " primaryStage.show();\n" + (hasPlayMethod ? " play();\n" : "") + " }\n" + " public static void main(String[] args) { launch(args); }\n" + source.substring(lastCloseBrace); // return the converted source return source; } public static void createSampleProject(File projectDir, String urlToSampleJavaFile, String[] resourceArray) { String nodeLoc = Node.class.getResource("Node.class").toExternalForm(); String javafxrtPath = nodeLoc.substring(4,nodeLoc.indexOf('!')); try { File f = new File(new URI(javafxrtPath)); javafxrtPath = f.getAbsolutePath(); } catch (Exception e) { e.printStackTrace(); } String sampleClassName = getClassName(urlToSampleJavaFile); // System.out.println("sampleClassName = " + sampleClassName); // // get javafxrt.jar path // String javafxrtPath = "jfxrt.jar"; // String classPath = System.getProperty("java.class.path"); // for (String path: classPath.split(File.pathSeparator)) { // System.out.println("path = " + path); // if (path.endsWith("jfxrt.jar")) { // javafxrtPath = path; // break; // } // } String sep = System.getProperty("file.separator"); if (sep.equals("\\")) { javafxrtPath = javafxrtPath.replaceAll("\\" + sep, "/"); } ///System.out.println(" javafxrtPath = " + javafxrtPath); // extract project name String projectName = projectDir.toURI().toString(); projectName = projectName.substring(projectName.lastIndexOf('/')+1); ///System.out.println("projectName = " + projectName); // create destDir projectDir.mkdirs(); // unzip project template try { ZipInputStream zipinputstream = new ZipInputStream( SampleProjectBuilder.class.getResourceAsStream("SampleProject.zip")); ZipEntry zipentry; while ((zipentry = zipinputstream.getNextEntry()) != null) { //for each entry to be extracted String entryName = zipentry.getName(); File entryFile = new File(projectDir,entryName); if (zipentry.isDirectory()) { entryFile.mkdirs(); ///System.out.println(" CREATED DIR -> " + entryFile); } else { // assume all are text files, load text file into string so we can process it StringBuilder sb = new StringBuilder(); String line; BufferedReader reader = new BufferedReader(new InputStreamReader(zipinputstream)); while((line = reader.readLine()) != null) { sb.append(line); sb.append('\n'); } String contents = sb.toString(); // replace any place holders contents = contents.replaceAll("ENSEMBLESAMPLE",projectName); contents = contents.replaceAll("APPLICATIONCLASS",sampleClassName); contents = contents.replaceAll("PATHTOJAVAFXRTJAR",javafxrtPath); // save out file FileWriter fileWriter = new FileWriter(entryFile); fileWriter.write(contents); fileWriter.flush(); fileWriter.close(); ///System.out.println(" WRITTEN FILE -> " + entryFile); } zipinputstream.closeEntry(); } zipinputstream.close(); //Put resources like images under src/ File srcDestDir = new File(projectDir.getPath()+"/src/"); loadSampleResourceUrls(srcDestDir,urlToSampleJavaFile, resourceArray); // save out source file File mainSrcFile = new File(projectDir,"src/"+sampleClassName+".java"); FileWriter fileWriter = new FileWriter(mainSrcFile); fileWriter.write(loadAndConvertSampleCode(urlToSampleJavaFile)); fileWriter.flush(); fileWriter.close(); ///System.out.println(" WRITTEN FILE -> src/"+sampleClassName+".java"); // open project in netbeans loadProject(projectDir, mainSrcFile); } catch (Exception e) { e.printStackTrace(); } } private static void loadProject(File projectDir, File mainSrcFile) { ///System.out.println("Trying to load project in Netbeans..."); NBInstallation[] installations = UserDirScanner.suitableNBInstallations(new File(System.getProperty("user.home")),"6.9.0", NBInstallation.LAST_USED_COMPARATOR); if (installations.length > 0) { NBInstallation installation = installations[0]; String launcher = NBInstallation.getPlatformLauncher(); ///System.out.println("launcher = " + launcher); String cmdArray[] = new String[]{ installation.getExecDir().getAbsolutePath() + File.separator + launcher, "--open", projectDir.getAbsolutePath(), mainSrcFile.getAbsolutePath() }; ///System.out.println("Command line: " + Arrays.asList(cmdArray)); try { Process proc = Runtime.getRuntime().exec(cmdArray, null, installation.getExecDir()); } catch (IOException e) { e.printStackTrace(); } } else { ///System.out.println("Could not find netbeans installed."); } } private static void loadSampleResourceUrls(File destDir, String urlToSampleJavaFile, String[] resourceUrlArray) { //get dir from urlToSampleJavaFile String sampleJavaFileDir = urlToSampleJavaFile.substring(0, urlToSampleJavaFile.lastIndexOf('/') + 1); //include the last forward slash List<String> resourceUrlList = Arrays.asList(resourceUrlArray); //create resource files for each of the resources we use if (!resourceUrlList.isEmpty()) { for (String oneResourceUrl : resourceUrlList) { String sampleResourceName = oneResourceUrl.substring( oneResourceUrl.lastIndexOf('/') + 1, oneResourceUrl.length()); try { URL resourceUrl = new URL(sampleJavaFileDir + sampleResourceName); Utils.copyFile(resourceUrl, destDir.getPath() + "/" + sampleResourceName); } catch (MalformedURLException e) { e.printStackTrace(); } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.igfs; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.configuration.*; import org.apache.ignite.igfs.*; import org.apache.ignite.internal.util.typedef.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.spi.discovery.tcp.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.*; import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.*; import java.util.*; import static org.apache.ignite.cache.CacheAtomicityMode.*; import static org.apache.ignite.cache.CacheMode.*; import static org.apache.ignite.igfs.IgfsMode.*; /** * Test for IGFS metrics. */ public class IgfsMetricsSelfTest extends IgfsCommonAbstractTest { /** Primary IGFS name. */ private static final String IGFS_PRIMARY = "igfs-primary"; /** Primary IGFS name. */ private static final String IGFS_SECONDARY = "igfs-secondary"; /** Secondary file system REST endpoint configuration map. */ private static final IgfsIpcEndpointConfiguration SECONDARY_REST_CFG; /** Test nodes count. */ private static final int NODES_CNT = 3; /** IP finder for the grid with the primary file system. */ private static final TcpDiscoveryIpFinder IP_FINDER = new TcpDiscoveryVmIpFinder(true); /** Primary IGFS instances. */ private static IgniteFileSystem[] igfsPrimary; /** Secondary IGFS instance. */ private static IgfsImpl igfsSecondary; /** Primary file system block size. */ public static final int PRIMARY_BLOCK_SIZE = 512; /** Secondary file system block size. */ public static final int SECONDARY_BLOCK_SIZE = 512; static { SECONDARY_REST_CFG = new IgfsIpcEndpointConfiguration(); SECONDARY_REST_CFG.setType(IgfsIpcEndpointType.TCP); SECONDARY_REST_CFG.setPort(11500); } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startSecondary(); startPrimary(); } /** {@inheritDoc} */ @Override protected void afterTestsStopped() throws Exception { stopAllGrids(false); } /** * Start a grid with the primary file system. * * @throws Exception If failed. */ private void startPrimary() throws Exception { igfsPrimary = new IgniteFileSystem[NODES_CNT]; for (int i = 0; i < NODES_CNT; i++) { Ignite g = G.start(primaryConfiguration(i)); igfsPrimary[i] = g.fileSystem(IGFS_PRIMARY); } } /** * Get configuration for a grid with the primary file system. * * @param idx Node index. * @return Configuration. * @throws Exception If failed. */ private IgniteConfiguration primaryConfiguration(int idx) throws Exception { FileSystemConfiguration igfsCfg = new FileSystemConfiguration(); igfsCfg.setDataCacheName("dataCache"); igfsCfg.setMetaCacheName("metaCache"); igfsCfg.setName(IGFS_PRIMARY); igfsCfg.setBlockSize(PRIMARY_BLOCK_SIZE); igfsCfg.setDefaultMode(PRIMARY); igfsCfg.setSecondaryFileSystem(igfsSecondary.asSecondary()); Map<String, IgfsMode> pathModes = new HashMap<>(); pathModes.put("/fileRemote", DUAL_SYNC); igfsCfg.setPathModes(pathModes); CacheConfiguration dataCacheCfg = defaultCacheConfiguration(); dataCacheCfg.setName("dataCache"); dataCacheCfg.setCacheMode(PARTITIONED); dataCacheCfg.setNearConfiguration(null); dataCacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); dataCacheCfg.setAffinityMapper(new IgfsGroupDataBlocksKeyMapper(128)); dataCacheCfg.setBackups(0); dataCacheCfg.setAtomicityMode(TRANSACTIONAL); CacheConfiguration metaCacheCfg = defaultCacheConfiguration(); metaCacheCfg.setName("metaCache"); metaCacheCfg.setCacheMode(REPLICATED); metaCacheCfg.setNearConfiguration(null); metaCacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); metaCacheCfg.setAtomicityMode(TRANSACTIONAL); IgniteConfiguration cfg = new IgniteConfiguration(); cfg.setGridName("grid-" + idx); TcpDiscoverySpi discoSpi = new TcpDiscoverySpi(); discoSpi.setIpFinder(IP_FINDER); cfg.setDiscoverySpi(discoSpi); cfg.setCacheConfiguration(dataCacheCfg, metaCacheCfg); cfg.setFileSystemConfiguration(igfsCfg); cfg.setLocalHost("127.0.0.1"); return cfg; } /** * Start a grid with the secondary file system. * * @throws Exception If failed. */ private void startSecondary() throws Exception { FileSystemConfiguration igfsCfg = new FileSystemConfiguration(); igfsCfg.setDataCacheName("dataCache"); igfsCfg.setMetaCacheName("metaCache"); igfsCfg.setName(IGFS_SECONDARY); igfsCfg.setBlockSize(SECONDARY_BLOCK_SIZE); igfsCfg.setDefaultMode(PRIMARY); igfsCfg.setIpcEndpointConfiguration(SECONDARY_REST_CFG); CacheConfiguration dataCacheCfg = defaultCacheConfiguration(); dataCacheCfg.setName("dataCache"); dataCacheCfg.setCacheMode(PARTITIONED); dataCacheCfg.setNearConfiguration(null); dataCacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); dataCacheCfg.setAffinityMapper(new IgfsGroupDataBlocksKeyMapper(128)); dataCacheCfg.setBackups(0); dataCacheCfg.setAtomicityMode(TRANSACTIONAL); CacheConfiguration metaCacheCfg = defaultCacheConfiguration(); metaCacheCfg.setName("metaCache"); metaCacheCfg.setCacheMode(REPLICATED); metaCacheCfg.setNearConfiguration(null); metaCacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC); metaCacheCfg.setAtomicityMode(TRANSACTIONAL); IgniteConfiguration cfg = new IgniteConfiguration(); cfg.setGridName("grid-secondary"); TcpDiscoverySpi discoSpi = new TcpDiscoverySpi(); discoSpi.setIpFinder(new TcpDiscoveryVmIpFinder(true)); cfg.setDiscoverySpi(discoSpi); cfg.setCacheConfiguration(dataCacheCfg, metaCacheCfg); cfg.setFileSystemConfiguration(igfsCfg); cfg.setLocalHost("127.0.0.1"); Ignite g = G.start(cfg); igfsSecondary = (IgfsImpl)g.fileSystem(IGFS_SECONDARY); } /** @throws Exception If failed. */ public void testMetrics() throws Exception { IgniteFileSystem fs = igfsPrimary[0]; assertNotNull(fs); IgfsMetrics m = fs.metrics(); assertNotNull(m); assertEquals(0, m.directoriesCount()); assertEquals(0, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); fs.mkdirs(new IgfsPath("/dir1")); m = fs.metrics(); assertNotNull(m); assertEquals(1, m.directoriesCount()); assertEquals(0, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); fs.mkdirs(new IgfsPath("/dir1/dir2/dir3")); fs.mkdirs(new IgfsPath("/dir4")); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(0, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); IgfsOutputStream out1 = fs.create(new IgfsPath("/dir1/file1"), false); IgfsOutputStream out2 = fs.create(new IgfsPath("/dir1/file2"), false); IgfsOutputStream out3 = fs.create(new IgfsPath("/dir1/dir2/file"), false); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(3, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(3, m.filesOpenedForWrite()); out1.write(new byte[10]); out2.write(new byte[20]); out3.write(new byte[30]); out1.close(); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(3, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(2, m.filesOpenedForWrite()); out2.close(); out3.close(); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(3, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); IgfsOutputStream out = fs.append(new IgfsPath("/dir1/file1"), false); out.write(new byte[20]); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(3, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(1, m.filesOpenedForWrite()); out.write(new byte[20]); out.close(); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(3, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); IgfsInputStream in1 = fs.open(new IgfsPath("/dir1/file1")); IgfsInputStream in2 = fs.open(new IgfsPath("/dir1/file2")); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(3, m.filesCount()); assertEquals(2, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); in1.close(); in2.close(); m = fs.metrics(); assertNotNull(m); assertEquals(4, m.directoriesCount()); assertEquals(3, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); fs.delete(new IgfsPath("/dir1/file1"), false); fs.delete(new IgfsPath("/dir1/dir2"), true); m = fs.metrics(); assertNotNull(m); assertEquals(2, m.directoriesCount()); assertEquals(1, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); fs.delete(new IgfsPath("/"), true); m = fs.metrics(); assertNotNull(m); assertEquals(0, m.directoriesCount()); assertEquals(0, m.filesCount()); assertEquals(0, m.filesOpenedForRead()); assertEquals(0, m.filesOpenedForWrite()); } /** @throws Exception If failed. */ public void testMultipleClose() throws Exception { IgniteFileSystem fs = igfsPrimary[0]; IgfsOutputStream out = fs.create(new IgfsPath("/file"), false); out.close(); out.close(); IgfsInputStream in = fs.open(new IgfsPath("/file")); in.close(); in.close(); IgfsMetrics m = fs.metrics(); assertEquals(0, m.filesOpenedForWrite()); assertEquals(0, m.filesOpenedForRead()); } /** * Test block metrics. * * @throws Exception If failed. */ public void testBlockMetrics() throws Exception { IgfsEx igfs = (IgfsEx)igfsPrimary[0]; IgfsPath fileRemote = new IgfsPath("/fileRemote"); IgfsPath file1 = new IgfsPath("/file1"); IgfsPath file2 = new IgfsPath("/file2"); // Create remote file and write some data to it. IgfsOutputStream out = igfsSecondary.create(fileRemote, 256, true, null, 1, 256, null); int rmtBlockSize = igfsSecondary.info(fileRemote).blockSize(); out.write(new byte[rmtBlockSize]); out.close(); // Start metrics measuring. IgfsMetrics initMetrics = igfs.metrics(); // Create empty file. igfs.create(file1, 256, true, null, 1, 256, null).close(); int blockSize = igfs.info(file1).blockSize(); checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 0, 0, 0); // Write two blocks to the file. IgfsOutputStream os = igfs.append(file1, false); os.write(new byte[blockSize * 2]); os.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 2, 0, blockSize * 2); // Write one more file (one block). os = igfs.create(file2, 256, true, null, 1, 256, null); os.write(new byte[blockSize]); os.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 0, 0, 0, 3, 0, blockSize * 3); // Read data from the first file. IgfsInputStreamAdapter is = igfs.open(file1); is.readFully(0, new byte[blockSize * 2]); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 2, 0, blockSize * 2, 3, 0, blockSize * 3); // Read data from the second file with hits. is = igfs.open(file2); is.readChunks(0, blockSize); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3); // Clear the first file. igfs.create(file1, true).close(); checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3); // Delete the second file. igfs.delete(file2, false); checkBlockMetrics(initMetrics, igfs.metrics(), 3, 0, blockSize * 3, 3, 0, blockSize * 3); // Read remote file. is = igfs.open(fileRemote); is.readChunks(0, rmtBlockSize); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 4, 1, blockSize * 3 + rmtBlockSize, 3, 0, blockSize * 3); // Lets wait for blocks will be placed to cache U.sleep(300); // Read remote file again. is = igfs.open(fileRemote); is.readChunks(0, rmtBlockSize); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 3, 0, blockSize * 3); IgfsMetrics metrics = igfs.metrics(); assert metrics.secondarySpaceSize() == rmtBlockSize; // Write some data to the file working in DUAL mode. os = igfs.append(fileRemote, false); os.write(new byte[rmtBlockSize]); os.close(); // Additional block read here due to file ending synchronization. checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 4, 1, blockSize * 3 + rmtBlockSize); metrics = igfs.metrics(); assert metrics.secondarySpaceSize() == rmtBlockSize * 2; igfs.delete(fileRemote, false); U.sleep(300); assert igfs.metrics().secondarySpaceSize() == 0; // Write partial block to the first file. os = igfs.append(file1, false); os.write(new byte[blockSize / 2]); os.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 5, 1, blockSize * 3 + rmtBlockSize * 2, 5, 1, blockSize * 7 / 2 + rmtBlockSize); // Now read partial block. // Read remote file again. is = igfs.open(file1); is.seek(blockSize * 2); is.readChunks(0, blockSize / 2); is.close(); checkBlockMetrics(initMetrics, igfs.metrics(), 6, 1, blockSize * 7 / 2 + rmtBlockSize * 2, 5, 1, blockSize * 7 / 2 + rmtBlockSize); igfs.resetMetrics(); metrics = igfs.metrics(); assert metrics.blocksReadTotal() == 0; assert metrics.blocksReadRemote() == 0; assert metrics.blocksWrittenTotal() == 0; assert metrics.blocksWrittenRemote() == 0; assert metrics.bytesRead() == 0; assert metrics.bytesReadTime() == 0; assert metrics.bytesWritten() == 0; assert metrics.bytesWriteTime() == 0; } /** * Ensure overall block-related metrics correctness. * * @param initMetrics Initial metrics. * @param metrics Metrics to check. * @param blocksRead Blocks read remote. * @param blocksReadRemote Blocks read remote. * @param bytesRead Bytes read. * @param blocksWrite Blocks write. * @param blocksWriteRemote Blocks write remote. * @param bytesWrite Bytes write. * @throws Exception If failed. */ private void checkBlockMetrics(IgfsMetrics initMetrics, IgfsMetrics metrics, long blocksRead, long blocksReadRemote, long bytesRead, long blocksWrite, long blocksWriteRemote, long bytesWrite) throws Exception { assert metrics != null; assertEquals(blocksRead, metrics.blocksReadTotal() - initMetrics.blocksReadTotal()); assertEquals(blocksReadRemote, metrics.blocksReadRemote() - initMetrics.blocksReadRemote()); assertEquals(bytesRead, metrics.bytesRead() - initMetrics.bytesRead()); assertEquals(blocksWrite, metrics.blocksWrittenTotal() - initMetrics.blocksWrittenTotal()); assertEquals(blocksWriteRemote, metrics.blocksWrittenRemote() - initMetrics.blocksWrittenRemote()); assertEquals(bytesWrite, metrics.bytesWritten() - initMetrics.bytesWritten()); } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.refactoring.typeMigration; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.diagnostic.Logger; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.Ref; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.wm.ToolWindowId; import com.intellij.openapi.wm.ToolWindowManager; import com.intellij.psi.*; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.util.PsiUtilCore; import com.intellij.refactoring.BaseRefactoringProcessor; import com.intellij.refactoring.typeMigration.ui.FailedConversionsDialog; import com.intellij.refactoring.typeMigration.ui.MigrationPanel; import com.intellij.refactoring.typeMigration.usageInfo.TypeMigrationUsageInfo; import com.intellij.refactoring.util.RefactoringUtil; import com.intellij.ui.content.Content; import com.intellij.usageView.UsageInfo; import com.intellij.usageView.UsageViewDescriptor; import com.intellij.usageView.UsageViewManager; import com.intellij.util.*; import com.intellij.util.containers.*; import gnu.trove.THashSet; import org.jetbrains.annotations.NotNull; import java.util.*; import static com.intellij.util.ObjectUtils.assertNotNull; public class TypeMigrationProcessor extends BaseRefactoringProcessor { private final static Logger LOG = Logger.getInstance(TypeMigrationProcessor.class); private final static int MAX_ROOT_IN_PREVIEW_PRESENTATION = 3; private PsiElement[] myRoot; private Function<PsiElement, PsiType> myRootTypes; private final TypeMigrationRules myRules; private TypeMigrationLabeler myLabeler; public TypeMigrationProcessor(final Project project, final PsiElement[] roots, final Function<PsiElement, PsiType> rootTypes, final TypeMigrationRules rules) { super(project); myRoot = roots; myRules = rules; myRootTypes = rootTypes; } public static void runHighlightingTypeMigration(final Project project, final Editor editor, final TypeMigrationRules rules, final PsiElement root, final PsiType migrationType) { runHighlightingTypeMigration(project, editor, rules, root, migrationType, false); } public static void runHighlightingTypeMigration(final Project project, final Editor editor, final TypeMigrationRules rules, final PsiElement root, final PsiType migrationType, final boolean optimizeImports) { runHighlightingTypeMigration(project, editor, rules, new PsiElement[] {root}, Functions.<PsiElement, PsiType>constant(migrationType), optimizeImports); } public static void runHighlightingTypeMigration(final Project project, final Editor editor, final TypeMigrationRules rules, final PsiElement[] roots, final Function<PsiElement, PsiType> migrationTypeFunction, final boolean optimizeImports) { final Set<PsiFile> containingFiles = ContainerUtil.map2Set(roots, new Function<PsiElement, PsiFile>() { @Override public PsiFile fun(PsiElement element) { return element.getContainingFile(); } }); final TypeMigrationProcessor processor = new TypeMigrationProcessor(project, roots, migrationTypeFunction, rules) { @Override public void performRefactoring(@NotNull final UsageInfo[] usages) { super.performRefactoring(usages); if (editor != null) { ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { final List<PsiElement> result = new ArrayList<PsiElement>(); for (UsageInfo usage : usages) { final PsiElement element = usage.getElement(); if (element == null || !containingFiles.contains(element.getContainingFile())) continue; if (element instanceof PsiMethod) { result.add(((PsiMethod)element).getReturnTypeElement()); } else if (element instanceof PsiVariable) { result.add(((PsiVariable)element).getTypeElement()); } else { result.add(element); } } RefactoringUtil.highlightAllOccurrences(project, PsiUtilCore.toPsiElementArray(result), editor); } }); } if (optimizeImports) { final JavaCodeStyleManager javaCodeStyleManager = JavaCodeStyleManager.getInstance(myProject); final Set<PsiFile> affectedFiles = new THashSet<PsiFile>(); for (UsageInfo usage : usages) { final PsiFile usageFile = usage.getFile(); if (usageFile != null) { affectedFiles.add(usageFile); } } for (PsiFile file : affectedFiles) { javaCodeStyleManager.optimizeImports(file); javaCodeStyleManager.shortenClassReferences(file); } } } }; processor.run(); } @NotNull @Override protected UsageViewDescriptor createUsageViewDescriptor(@NotNull UsageInfo[] usages) { return new TypeMigrationViewDescriptor(myRoot[0]); } @Override protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) { if (hasFailedConversions()) { if (ApplicationManager.getApplication().isUnitTestMode()) { throw new RuntimeException(StringUtil.join(myLabeler.getFailedConversionsReport(), "\n")); } FailedConversionsDialog dialog = new FailedConversionsDialog(myLabeler.getFailedConversionsReport(), myProject); if (!dialog.showAndGet()) { final int exitCode = dialog.getExitCode(); prepareSuccessful(); if (exitCode == FailedConversionsDialog.VIEW_USAGES_EXIT_CODE) { previewRefactoring(refUsages.get()); } return false; } } prepareSuccessful(); return true; } public boolean hasFailedConversions() { return myLabeler.hasFailedConversions(); } @Override protected void previewRefactoring(@NotNull final UsageInfo[] usages) { MigrationPanel panel = new MigrationPanel(myRoot, myLabeler, myProject, isPreviewUsages()); String name; if (myRoot.length == 1) { String fromType = assertNotNull(TypeMigrationLabeler.getElementType(myRoot[0])).getPresentableText(); String toType = myRootTypes.fun(myRoot[0]).getPresentableText(); String text; text = getPresentation(myRoot[0]); name = "Migrate Type of " + text + " from \'" + fromType + "\' to \'" + toType + "\'"; } else { final int rootsInPresentationCount = myRoot.length > MAX_ROOT_IN_PREVIEW_PRESENTATION ? MAX_ROOT_IN_PREVIEW_PRESENTATION : myRoot.length; String[] rootsPresentation = new String[rootsInPresentationCount]; for (int i = 0; i < rootsInPresentationCount; i++) { final PsiElement root = myRoot[i]; rootsPresentation[i] = root instanceof PsiNamedElement ? ((PsiNamedElement)root).getName() : root.getText(); } rootsPresentation = StringUtil.surround(rootsPresentation, "\'", "\'"); name = "Migrate Type of " + StringUtil.join(rootsPresentation, ", "); if (myRoot.length > MAX_ROOT_IN_PREVIEW_PRESENTATION) { name += "..."; } } Content content = UsageViewManager.getInstance(myProject).addContent(name, false, panel, true, true); panel.setContent(content); ToolWindowManager.getInstance(myProject).getToolWindow(ToolWindowId.FIND).activate(null); } public static String getPresentation(PsiElement element) { String text; if (element instanceof PsiField) { text = "field \'" + ((PsiField)element).getName() + "\'"; } else if (element instanceof PsiParameter) { text = "parameter \'" + ((PsiParameter)element).getName() + "\'"; } else if (element instanceof PsiLocalVariable) { text = "variable \'" + ((PsiLocalVariable)element).getName() + "\'"; } else if (element instanceof PsiMethod) { text = "method \'" + ((PsiMethod)element).getName() + "\' return"; } else { text = element.getText(); } return text; } @NotNull @Override public UsageInfo[] findUsages() { myLabeler = new TypeMigrationLabeler(myRules, myRootTypes); try { return myLabeler.getMigratedUsages(!isPreviewUsages(), myRoot); } catch (TypeMigrationLabeler.MigrateException e) { setPreviewUsages(true); myLabeler.clearStopException(); return myLabeler.getMigratedUsages(false, myRoot); } } @Override protected void refreshElements(@NotNull PsiElement[] elements) { myRoot = elements; } @Override public void performRefactoring(@NotNull UsageInfo[] usages) { for (PsiElement element : myRoot) { if (element instanceof PsiVariable && ((PsiVariable)element).getTypeElement() != null) { ((PsiVariable)element).normalizeDeclaration(); } } change(usages, myLabeler, myProject); } public static void change(UsageInfo[] usages, TypeMigrationLabeler labeler, Project project) { final List<SmartPsiElementPointer<PsiNewExpression>> newExpressionsToCheckDiamonds = new SmartList<>(); final TypeMigrationLabeler.MigrationProducer producer = labeler.createMigratorFor(usages); final SmartPointerManager smartPointerManager = SmartPointerManager.getInstance(project); List<UsageInfo> nonCodeUsages = new ArrayList<>(); for (UsageInfo usage : usages) { if (((TypeMigrationUsageInfo)usage).isExcluded()) continue; final PsiElement element = usage.getElement(); if (element instanceof PsiVariable || element instanceof PsiMember || element instanceof PsiExpression || element instanceof PsiReferenceParameterList) { producer.change((TypeMigrationUsageInfo)usage, expression -> newExpressionsToCheckDiamonds.add(smartPointerManager.createSmartPsiElementPointer(expression)), labeler); } else { nonCodeUsages.add(usage); } } for (SmartPsiElementPointer<PsiNewExpression> newExpressionPointer : newExpressionsToCheckDiamonds) { final PsiNewExpression newExpression = newExpressionPointer.getElement(); if (newExpression != null) { labeler.postProcessNewExpression(newExpression); } } for (UsageInfo usageInfo : nonCodeUsages) { final PsiElement element = usageInfo.getElement(); if (element != null) { final PsiReference reference = element.getReference(); if (reference != null) { final Object target = producer.getConversion(usageInfo); if (target instanceof PsiMember) { try { reference.bindToElement((PsiElement)target); } catch (IncorrectOperationException ignored) { } } } } } } public TypeMigrationLabeler getLabeler() { return myLabeler; } @Override protected String getCommandName() { return "TypeMigration"; } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.aws; import com.github.tomakehurst.wiremock.junit.WireMockRule; import com.hazelcast.aws.AwsEcsApi.Task; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mock; import org.mockito.runners.MockitoJUnitRunner; import java.time.Clock; import java.time.Instant; import java.time.ZoneId; import java.util.List; import static com.github.tomakehurst.wiremock.client.WireMock.aResponse; import static com.github.tomakehurst.wiremock.client.WireMock.equalTo; import static com.github.tomakehurst.wiremock.client.WireMock.equalToJson; import static com.github.tomakehurst.wiremock.client.WireMock.post; import static com.github.tomakehurst.wiremock.client.WireMock.stubFor; import static com.github.tomakehurst.wiremock.client.WireMock.urlMatching; import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.wireMockConfig; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.hasItems; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import static org.mockito.BDDMockito.given; import static org.mockito.Matchers.any; @RunWith(MockitoJUnitRunner.class) public class AwsEcsApiTest { private static final Clock CLOCK = Clock.fixed(Instant.ofEpochMilli(1585909518929L), ZoneId.systemDefault()); private static final String AUTHORIZATION_HEADER = "authorization-header"; private static final String TOKEN = "IQoJb3JpZ2luX2VjEFIaDGV1LWNlbnRyYWwtMSJGM=="; private static final AwsCredentials CREDENTIALS = AwsCredentials.builder() .setAccessKey("AKIDEXAMPLE") .setSecretKey("wJalrXUtnFEMI/K7MDENG+bPxRfiCYEXAMPLEKEY") .setToken(TOKEN) .build(); @Mock private AwsRequestSigner requestSigner; private String endpoint; private AwsEcsApi awsEcsApi; @Rule public WireMockRule wireMockRule = new WireMockRule(wireMockConfig().dynamicPort()); @Before public void setUp() { given(requestSigner.authHeader(any(), any(), any(), any(), any(), any())).willReturn(AUTHORIZATION_HEADER); endpoint = String.format("http://localhost:%s", wireMockRule.port()); AwsConfig awsConfig = AwsConfig.builder().build(); awsEcsApi = new AwsEcsApi(endpoint, awsConfig, requestSigner, CLOCK); } @Test public void listTasks() { // given String cluster = "arn:aws:ecs:eu-central-1:665466731577:cluster/rafal-test-cluster"; //language=JSON String requestBody = "{\n" + " \"cluster\": \"arn:aws:ecs:eu-central-1:665466731577:cluster/rafal-test-cluster\"\n" + "}"; //language=JSON String response = "{\n" + " \"taskArns\": [\n" + " \"arn:aws:ecs:us-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a\",\n" + " \"arn:aws:ecs:us-east-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207\"\n" + " ]\n" + "}"; stubFor(post("/") .withHeader("X-Amz-Date", equalTo("20200403T102518Z")) .withHeader("Authorization", equalTo(AUTHORIZATION_HEADER)) .withHeader("X-Amz-Target", equalTo("AmazonEC2ContainerServiceV20141113.ListTasks")) .withHeader("Content-Type", equalTo("application/x-amz-json-1.1")) .withHeader("Accept-Encoding", equalTo("identity")) .withHeader("X-Amz-Security-Token", equalTo(TOKEN)) .withRequestBody(equalToJson(requestBody)) .willReturn(aResponse().withStatus(200).withBody(response))); // when List<String> tasks = awsEcsApi.listTasks(cluster, CREDENTIALS); // then assertThat(tasks, hasItems( "arn:aws:ecs:us-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a", "arn:aws:ecs:us-east-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207" ) ); } @Test public void listTasksFiltered() { // given String cluster = "arn:aws:ecs:eu-central-1:665466731577:cluster/rafal-test-cluster"; AwsConfig awsConfig = AwsConfig.builder() .setFamily("family-name") .build(); AwsEcsApi awsEcsApi = new AwsEcsApi(endpoint, awsConfig, requestSigner, CLOCK); //language=JSON String requestBody = "{\n" + " \"cluster\": \"arn:aws:ecs:eu-central-1:665466731577:cluster/rafal-test-cluster\",\n" + " \"family\": \"family-name\"\n" + "}"; //language=JSON String response = "{\n" + " \"taskArns\": [\n" + " \"arn:aws:ecs:us-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a\",\n" + " \"arn:aws:ecs:us-east-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207\"\n" + " ]\n" + "}"; stubFor(post("/") .withHeader("X-Amz-Date", equalTo("20200403T102518Z")) .withHeader("Authorization", equalTo(AUTHORIZATION_HEADER)) .withHeader("X-Amz-Target", equalTo("AmazonEC2ContainerServiceV20141113.ListTasks")) .withHeader("Content-Type", equalTo("application/x-amz-json-1.1")) .withHeader("Accept-Encoding", equalTo("identity")) .withHeader("X-Amz-Security-Token", equalTo(TOKEN)) .withRequestBody(equalToJson(requestBody)) .willReturn(aResponse().withStatus(200).withBody(response))); // when List<String> tasks = awsEcsApi.listTasks(cluster, CREDENTIALS); // then assertThat(tasks, hasItems( "arn:aws:ecs:us-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a", "arn:aws:ecs:us-east-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207" ) ); } @Test public void describeTasks() { // given String cluster = "arn:aws:ecs:eu-central-1:665466731577:cluster/rafal-test-cluster"; List<String> tasks = asList( "arn:aws:ecs:eu-central-1-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a", "arn:aws:ecs:eu-central-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207" ); //language=JSON String requestBody = "{\n" + " \"cluster\" : \"arn:aws:ecs:eu-central-1:665466731577:cluster/rafal-test-cluster\",\n" + " \"tasks\": [\n" + " \"arn:aws:ecs:eu-central-1-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a\",\n" + " \"arn:aws:ecs:eu-central-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207\"\n" + " ]\n" + "}"; //language=JSON String response = "{\n" + " \"tasks\": [\n" + " {\n" + " \"taskArn\": \"arn:aws:ecs:eu-central-1-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a\",\n" + " \"availabilityZone\": \"eu-central-1a\",\n" + " \"containers\": [\n" + " {\n" + " \"taskArn\": \"arn:aws:ecs:eu-central-1-east-1:012345678910:task/0b69d5c0-d655-4695-98cd-5d2d526d9d5a\",\n" + " \"networkInterfaces\": [\n" + " {\n" + " \"privateIpv4Address\": \"10.0.1.16\"\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + " },\n" + " {\n" + " \"taskArn\": \"arn:aws:ecs:eu-central-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207\",\n" + " \"availabilityZone\": \"eu-central-1a\",\n" + " \"containers\": [\n" + " {\n" + " \"taskArn\": \"arn:aws:ecs:eu-central-1:012345678910:task/51a01bdf-d00e-487e-ab14-7645330b6207\",\n" + " \"networkInterfaces\": [\n" + " {\n" + " \"privateIpv4Address\": \"10.0.1.219\"\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + " }\n" + " ]\n" + "}"; stubFor(post("/") .withHeader("X-Amz-Date", equalTo("20200403T102518Z")) .withHeader("Authorization", equalTo(AUTHORIZATION_HEADER)) .withHeader("X-Amz-Target", equalTo("AmazonEC2ContainerServiceV20141113.DescribeTasks")) .withHeader("Content-Type", equalTo("application/x-amz-json-1.1")) .withHeader("Accept-Encoding", equalTo("identity")) .withHeader("X-Amz-Security-Token", equalTo(TOKEN)) .withRequestBody(equalToJson(requestBody)) .willReturn(aResponse().withStatus(200).withBody(response))); // when List<Task> result = awsEcsApi.describeTasks(cluster, tasks, CREDENTIALS); // then assertEquals("10.0.1.16", result.get(0).getPrivateAddress()); assertEquals("eu-central-1a", result.get(0).getAvailabilityZone()); assertEquals("10.0.1.219", result.get(1).getPrivateAddress()); assertEquals("eu-central-1a", result.get(1).getAvailabilityZone()); } @Test public void awsError() { // given int errorCode = 401; String errorMessage = "Error message retrieved from AWS"; stubFor(post(urlMatching("/.*")) .willReturn(aResponse().withStatus(errorCode).withBody(errorMessage))); // when Exception exception = assertThrows(Exception.class, () -> awsEcsApi.listTasks("cluster-arn", CREDENTIALS)); // then assertTrue(exception.getMessage().contains(Integer.toString(errorCode))); assertTrue(exception.getMessage().contains(errorMessage)); } }
package com.planet_ink.coffee_mud.Behaviors; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class MOBTeacher extends CombatAbilities { public String ID(){return "MOBTeacher";} protected MOB myMOB=null; protected boolean teachEverything=true; protected boolean noCommon=false; protected boolean noExpertises=false; // doubles as a "done ticking" flag protected int tickDownToKnowledge=4; public void startBehavior(Environmental forMe) { if(forMe instanceof MOB) myMOB=(MOB)forMe; setParms(parms); } protected void setTheCharClass(MOB mob, CharClass C) { if((mob.baseCharStats().numClasses()==1) &&(mob.baseCharStats().getMyClass(0).ID().equals("StdCharClass")) &&(!C.ID().equals("StdCharClass"))) { mob.baseCharStats().setMyClasses(C.ID()); mob.baseCharStats().setMyLevels(""+mob.envStats().level()); mob.recoverCharStats(); return; } for(int i=0;i<mob.baseCharStats().numClasses();i++) { CharClass C1=mob.baseCharStats().getMyClass(i); if((C1!=null) &&(mob.baseCharStats().getClassLevel(C1)>0)) mob.baseCharStats().setClassLevel(C1,1); } mob.baseCharStats().setCurrentClass(C); mob.baseCharStats().setClassLevel(C,mob.envStats().level()); mob.recoverCharStats(); } protected void classAbles(MOB mob, Hashtable myAbles, int pct) { boolean stdCharClass=mob.charStats().getCurrentClass().ID().equals("StdCharClass"); String className=mob.charStats().getCurrentClass().ID(); Ability A=null; for(Enumeration a=CMClass.abilities();a.hasMoreElements();) { A=(Ability)a.nextElement(); if((((stdCharClass&&(CMLib.ableMapper().lowestQualifyingLevel(A.ID())>0))) ||(CMLib.ableMapper().qualifiesByLevel(mob,A)&&(!CMLib.ableMapper().getSecretSkill(className,true,A.ID())))) &&((!noCommon)||((A.classificationCode()&Ability.ALL_ACODES)!=Ability.ACODE_COMMON_SKILL)) &&((!stdCharClass)||(CMLib.ableMapper().availableToTheme(A.ID(),Area.THEME_FANTASY,true)))) addAbility(mob,A,pct,myAbles); } } public boolean tryTeach(MOB teacher, MOB student, String teachWhat) { CMMsg msg2=CMClass.getMsg(teacher,student,null,CMMsg.MSG_SPEAK,null); if(!teacher.location().okMessage(teacher,msg2)) return false; msg2=CMClass.getMsg(teacher,student,null,CMMsg.MSG_OK_ACTION,"<S-NAME> teach(es) <T-NAMESELF> '"+teachWhat+"'."); if(!teacher.location().okMessage(teacher,msg2)) return false; teacher.location().send(teacher,msg2); return true; } public boolean tick(Tickable ticking, int tickID) { if((tickID==Tickable.TICKID_MOB) &&(!noExpertises) &&((--tickDownToKnowledge)==0) &&(ticking instanceof MOB)) { noExpertises=true; MOB mob=(MOB)ticking; if(teachEverything) { for(Enumeration e=CMLib.expertises().definitions();e.hasMoreElements();) { ExpertiseLibrary.ExpertiseDefinition def=(ExpertiseLibrary.ExpertiseDefinition)e.nextElement(); if(mob.fetchExpertise(def.ID)==null) mob.addExpertise(def.ID); } } else { boolean someNew=true; CharStats oldBase=(CharStats)mob.baseCharStats().copyOf(); for(int i: CharStats.CODES.BASE()) mob.baseCharStats().setStat(i,100); for(int i=0;i<mob.baseCharStats().numClasses();i++) mob.baseCharStats().setClassLevel(mob.baseCharStats().getMyClass(i),100); mob.recoverCharStats(); while(someNew) { someNew=false; Vector V=CMLib.expertises().myQualifiedExpertises(mob); ExpertiseLibrary.ExpertiseDefinition def=null; for(int v=0;v<V.size();v++) { def=(ExpertiseLibrary.ExpertiseDefinition)V.elementAt(v); if(mob.fetchExpertise(def.ID)==null) { mob.addExpertise(def.ID); someNew=true; } } } mob.setBaseCharStats(oldBase); mob.recoverCharStats(); } } return super.tick(ticking,tickID); } public void addAbility(MOB mob, Ability A, int pct, Hashtable myAbles) { if(CMLib.dice().rollPercentage()<=pct) { Ability A2=(Ability)myAbles.get(A.ID()); if(A2==null) { A=(Ability)A.copyOf(); A.setSavable(false); A.setProficiency(CMLib.ableMapper().getMaxProficiency(mob,true,A.ID())); myAbles.put(A.ID(),A); mob.addAbility(A); } else A2.setProficiency(CMLib.ableMapper().getMaxProficiency(mob,true,A2.ID())); } } protected void ensureCharClass() { myMOB.baseCharStats().setMyClasses("StdCharClass"); myMOB.baseCharStats().setMyLevels(""+myMOB.envStats().level()); myMOB.recoverCharStats(); Hashtable myAbles=new Hashtable(); Ability A=null; for(int a=0;a<myMOB.numAbilities();a++) { A=myMOB.fetchAbility(a); if(A!=null) myAbles.put(A.ID(),A); } myMOB.baseCharStats().setStat(CharStats.STAT_INTELLIGENCE,19); myMOB.baseCharStats().setStat(CharStats.STAT_WISDOM,19); int pct=100; Vector V=null; A=CMClass.getAbility(getParms()); if(A!=null) { addAbility(myMOB,A,pct,myAbles); teachEverything=false; } else V=CMParms.parse(getParms()); if(V!=null) for(int v=V.size()-1;v>=0;v--) { String s=(String)V.elementAt(v); if(s.equalsIgnoreCase("NOCOMMON")) { noCommon=true; V.removeElementAt(v); } if(s.equalsIgnoreCase("NOEXPS")||s.equalsIgnoreCase("NOEXP")) { noExpertises=true; V.removeElementAt(v); } } if(V!=null) for(int v=0;v<V.size();v++) { String s=(String)V.elementAt(v); if(s.endsWith("%")) { pct=CMath.s_int(s.substring(0,s.length()-1)); continue; } A=CMClass.getAbility(s); CharClass C=CMClass.findCharClass(s); if((C!=null)&&(!C.ID().equals("StdCharClass"))) { teachEverything=false; setTheCharClass(myMOB,C); classAbles(myMOB,myAbles,pct); myMOB.recoverCharStats(); } else if(A!=null) { addAbility(myMOB,A,pct,myAbles); teachEverything=false; } else { ExpertiseLibrary.ExpertiseDefinition def=CMLib.expertises().getDefinition(s); if(def!=null) { myMOB.addExpertise(def.ID); teachEverything=false; } } } myMOB.recoverCharStats(); if((myMOB.charStats().getCurrentClass().ID().equals("StdCharClass")) &&(teachEverything)) classAbles(myMOB,myAbles,pct); int lvl=myMOB.envStats().level()/myMOB.baseCharStats().numClasses(); if(lvl<1) lvl=1; for(int i=0;i<myMOB.baseCharStats().numClasses();i++) { CharClass C=myMOB.baseCharStats().getMyClass(i); if((C!=null)&&(myMOB.baseCharStats().getClassLevel(C)>=0)) myMOB.baseCharStats().setClassLevel(C,lvl); } myMOB.recoverCharStats(); } public void setParms(String newParms) { super.setParms(newParms); if(myMOB==null) return; teachEverything=true; noCommon=false; noExpertises=false; tickDownToKnowledge=4; ensureCharClass(); } public boolean okMessage(Environmental host, CMMsg msg) { if(host instanceof MOB) { if(CMath.bset(((MOB)host).getBitmap(),MOB.ATT_NOTEACH)) ((MOB)host).setBitmap(CMath.unsetb(((MOB)host).getBitmap(),MOB.ATT_NOTEACH)); } return super.okMessage(host,msg); } public void executeMsg(Environmental affecting, CMMsg msg) { if(myMOB==null) return; super.executeMsg(affecting,msg); if(!canFreelyBehaveNormal(affecting)) return; MOB monster=myMOB; MOB student=msg.source(); if((!msg.amISource(monster)) &&(!student.isMonster()) &&(msg.sourceMessage()!=null) &&((msg.target()==null)||msg.amITarget(monster)) &&(msg.targetMinor()==CMMsg.TYP_SPEAK)) { int x=msg.sourceMessage().toUpperCase().indexOf("TEACH"); if(x<0) x=msg.sourceMessage().toUpperCase().indexOf("GAIN "); if(x>=0) { boolean giveABonus=false; String s=msg.sourceMessage().substring(x+5).trim(); x=s.lastIndexOf("\'"); if(x>0) s=s.substring(0,x); else { x=s.lastIndexOf("`"); if(x>0) s=s.substring(0,x); } if(s.startsWith("\"")) s=s.substring(1).trim(); if(s.endsWith("\"")) s=s.substring(0,s.length()-1); if(s.toUpperCase().endsWith("PLEASE")) s=s.substring(0,s.length()-6).trim(); if(s.startsWith("\"")) s=s.substring(1).trim(); if(s.endsWith("\"")) s=s.substring(0,s.length()-1); if(s.toUpperCase().startsWith("PLEASE ")) { giveABonus=true; s=s.substring(6).trim(); } if(s.startsWith("\"")) s=s.substring(1).trim(); if(s.endsWith("\"")) s=s.substring(0,s.length()-1); if(s.toUpperCase().startsWith("ME ")) s=s.substring(3).trim(); if(s.startsWith("\"")) s=s.substring(1).trim(); if(s.endsWith("\"")) s=s.substring(0,s.length()-1); if(s.toUpperCase().startsWith("PLEASE ")) { giveABonus=true; s=s.substring(6).trim(); } if(s.toUpperCase().startsWith("ME ")) s=s.substring(3).trim(); if(s.startsWith("\"")) s=s.substring(1).trim(); if(s.endsWith("\"")) s=s.substring(0,s.length()-1); if(s.trim().equalsIgnoreCase("LIST")) { CMLib.commands().postSay(monster,student,"Try the QUALIFY command.",true,false); return; } if(s.trim().toUpperCase().equals("ALL")) { CMLib.commands().postSay(monster,student,"I can't teach you everything at once. Try the QUALIFY command.",true,false); return; } Ability myAbility=CMClass.findAbility(s.trim().toUpperCase(),monster); if(myAbility==null) { ExpertiseLibrary.ExpertiseDefinition theExpertise=null; Vector V=CMLib.expertises().myListableExpertises(monster); for(int v=0;v<V.size();v++) { ExpertiseLibrary.ExpertiseDefinition def=(ExpertiseLibrary.ExpertiseDefinition)V.elementAt(v); if((def.name.equalsIgnoreCase(s)) &&(theExpertise==null)) theExpertise=def; } if(theExpertise==null) for(int v=0;v<V.size();v++) { ExpertiseLibrary.ExpertiseDefinition def=(ExpertiseLibrary.ExpertiseDefinition)V.elementAt(v); if((CMLib.english().containsString(def.name,s) &&(theExpertise==null))) theExpertise=def; } if(theExpertise!=null) { if(student.fetchExpertise(theExpertise.ID)!=null) { monster.tell(student.name()+" already knows "+theExpertise.name); CMLib.commands().postSay(monster,student,"You already know "+theExpertise.name,true,false); return; } if(!CMLib.expertises().myQualifiedExpertises(student).contains(theExpertise)) { monster.tell(student.name()+" does not yet fully qualify for the expertise '"+theExpertise.name+"'.\n\rRequirements: "+CMLib.masking().maskDesc(theExpertise.allRequirements())); CMLib.commands().postSay(monster,student,"I'm sorry, you do not yet fully qualify for the expertise '"+theExpertise.name+"'.\n\rRequirements: "+CMLib.masking().maskDesc(theExpertise.allRequirements()),true,false); return; } if(((theExpertise.trainCost>0)&&(student.getTrains()<theExpertise.trainCost)) ||((theExpertise.practiceCost>0)&&(student.getPractices()<theExpertise.practiceCost)) ||((theExpertise.expCost>0)&&(student.getExperience()<theExpertise.expCost)) ||((theExpertise.qpCost>0)&&(student.getQuestPoint()<theExpertise.qpCost))) { monster.tell("Training for that expertise requires "+theExpertise.costDescription()+"."); CMLib.commands().postSay(monster,student,"I'm sorry, but to learn the expertise '"+theExpertise.name+"' requires: "+theExpertise.costDescription(),true,false); return ; } if(!tryTeach(monster,student,theExpertise.name)) return ; student.setPractices(student.getPractices()-theExpertise.practiceCost); student.setTrains(student.getTrains()-theExpertise.trainCost); student.setExperience(student.getExperience()-theExpertise.expCost); student.setQuestPoint(student.getQuestPoint()-theExpertise.qpCost); student.addExpertise(theExpertise.ID); } else if((CMClass.findCharClass(s.trim())!=null)) CMLib.commands().postSay(monster,student,"I've heard of "+s+", but that's an class-- try TRAINing for it.",true,false); else CMLib.commands().postSay(monster,student,"I'm sorry, I've never heard of "+s,true,false); return; } if(giveABonus) { monster.baseCharStats().setStat(CharStats.STAT_INTELLIGENCE,25); monster.baseCharStats().setStat(CharStats.STAT_WISDOM,25); monster.recoverCharStats(); } if(student.fetchAbility(myAbility.ID())!=null) { CMLib.commands().postSay(monster,student,"But you already know '"+myAbility.name()+"'.",true,false); return; } int prof75=(int)Math.round(CMath.mul(CMLib.ableMapper().getMaxProficiency(student,true,myAbility.ID()),0.75)); myAbility.setProficiency(prof75/2); if(!myAbility.canBeTaughtBy(monster,student)) return; if(!myAbility.canBeLearnedBy(monster,student)) return; if(!tryTeach(monster,student,myAbility.name())) return ; myAbility.teach(monster,student); monster.baseCharStats().setStat(CharStats.STAT_INTELLIGENCE,19); monster.baseCharStats().setStat(CharStats.STAT_WISDOM,19); monster.recoverCharStats(); } } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. */ package com.microsoft.azure.management.kusto.v2019_11_09.implementation; import retrofit2.Retrofit; import com.google.common.reflect.TypeToken; import com.microsoft.azure.CloudException; import com.microsoft.rest.ServiceCallback; import com.microsoft.rest.ServiceFuture; import com.microsoft.rest.ServiceResponse; import com.microsoft.rest.Validator; import java.io.IOException; import java.util.List; import okhttp3.ResponseBody; import retrofit2.http.Body; import retrofit2.http.GET; import retrofit2.http.Header; import retrofit2.http.Headers; import retrofit2.http.HTTP; import retrofit2.http.Path; import retrofit2.http.PUT; import retrofit2.http.Query; import retrofit2.Response; import rx.functions.Func1; import rx.Observable; /** * An instance of this class provides access to all the operations defined * in AttachedDatabaseConfigurations. */ public class AttachedDatabaseConfigurationsInner { /** The Retrofit service to perform REST calls. */ private AttachedDatabaseConfigurationsService service; /** The service client containing this operation class. */ private KustoManagementClientImpl client; /** * Initializes an instance of AttachedDatabaseConfigurationsInner. * * @param retrofit the Retrofit instance built from a Retrofit Builder. * @param client the instance of the service client containing this operation class. */ public AttachedDatabaseConfigurationsInner(Retrofit retrofit, KustoManagementClientImpl client) { this.service = retrofit.create(AttachedDatabaseConfigurationsService.class); this.client = client; } /** * The interface defining all the services for AttachedDatabaseConfigurations to be * used by Retrofit to perform actually REST calls. */ interface AttachedDatabaseConfigurationsService { @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.kusto.v2019_11_09.AttachedDatabaseConfigurations listByCluster" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Kusto/clusters/{clusterName}/attachedDatabaseConfigurations") Observable<Response<ResponseBody>> listByCluster(@Path("resourceGroupName") String resourceGroupName, @Path("clusterName") String clusterName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.kusto.v2019_11_09.AttachedDatabaseConfigurations get" }) @GET("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Kusto/clusters/{clusterName}/attachedDatabaseConfigurations/{attachedDatabaseConfigurationName}") Observable<Response<ResponseBody>> get(@Path("resourceGroupName") String resourceGroupName, @Path("clusterName") String clusterName, @Path("attachedDatabaseConfigurationName") String attachedDatabaseConfigurationName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.kusto.v2019_11_09.AttachedDatabaseConfigurations createOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Kusto/clusters/{clusterName}/attachedDatabaseConfigurations/{attachedDatabaseConfigurationName}") Observable<Response<ResponseBody>> createOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("clusterName") String clusterName, @Path("attachedDatabaseConfigurationName") String attachedDatabaseConfigurationName, @Path("subscriptionId") String subscriptionId, @Body AttachedDatabaseConfigurationInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.kusto.v2019_11_09.AttachedDatabaseConfigurations beginCreateOrUpdate" }) @PUT("subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Kusto/clusters/{clusterName}/attachedDatabaseConfigurations/{attachedDatabaseConfigurationName}") Observable<Response<ResponseBody>> beginCreateOrUpdate(@Path("resourceGroupName") String resourceGroupName, @Path("clusterName") String clusterName, @Path("attachedDatabaseConfigurationName") String attachedDatabaseConfigurationName, @Path("subscriptionId") String subscriptionId, @Body AttachedDatabaseConfigurationInner parameters, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.kusto.v2019_11_09.AttachedDatabaseConfigurations delete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Kusto/clusters/{clusterName}/attachedDatabaseConfigurations/{attachedDatabaseConfigurationName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> delete(@Path("resourceGroupName") String resourceGroupName, @Path("clusterName") String clusterName, @Path("attachedDatabaseConfigurationName") String attachedDatabaseConfigurationName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); @Headers({ "Content-Type: application/json; charset=utf-8", "x-ms-logging-context: com.microsoft.azure.management.kusto.v2019_11_09.AttachedDatabaseConfigurations beginDelete" }) @HTTP(path = "subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Kusto/clusters/{clusterName}/attachedDatabaseConfigurations/{attachedDatabaseConfigurationName}", method = "DELETE", hasBody = true) Observable<Response<ResponseBody>> beginDelete(@Path("resourceGroupName") String resourceGroupName, @Path("clusterName") String clusterName, @Path("attachedDatabaseConfigurationName") String attachedDatabaseConfigurationName, @Path("subscriptionId") String subscriptionId, @Query("api-version") String apiVersion, @Header("accept-language") String acceptLanguage, @Header("User-Agent") String userAgent); } /** * Returns the list of attached database configurations of the given Kusto cluster. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the List&lt;AttachedDatabaseConfigurationInner&gt; object if successful. */ public List<AttachedDatabaseConfigurationInner> listByCluster(String resourceGroupName, String clusterName) { return listByClusterWithServiceResponseAsync(resourceGroupName, clusterName).toBlocking().single().body(); } /** * Returns the list of attached database configurations of the given Kusto cluster. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<List<AttachedDatabaseConfigurationInner>> listByClusterAsync(String resourceGroupName, String clusterName, final ServiceCallback<List<AttachedDatabaseConfigurationInner>> serviceCallback) { return ServiceFuture.fromResponse(listByClusterWithServiceResponseAsync(resourceGroupName, clusterName), serviceCallback); } /** * Returns the list of attached database configurations of the given Kusto cluster. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the List&lt;AttachedDatabaseConfigurationInner&gt; object */ public Observable<List<AttachedDatabaseConfigurationInner>> listByClusterAsync(String resourceGroupName, String clusterName) { return listByClusterWithServiceResponseAsync(resourceGroupName, clusterName).map(new Func1<ServiceResponse<List<AttachedDatabaseConfigurationInner>>, List<AttachedDatabaseConfigurationInner>>() { @Override public List<AttachedDatabaseConfigurationInner> call(ServiceResponse<List<AttachedDatabaseConfigurationInner>> response) { return response.body(); } }); } /** * Returns the list of attached database configurations of the given Kusto cluster. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the List&lt;AttachedDatabaseConfigurationInner&gt; object */ public Observable<ServiceResponse<List<AttachedDatabaseConfigurationInner>>> listByClusterWithServiceResponseAsync(String resourceGroupName, String clusterName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (clusterName == null) { throw new IllegalArgumentException("Parameter clusterName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.listByCluster(resourceGroupName, clusterName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<List<AttachedDatabaseConfigurationInner>>>>() { @Override public Observable<ServiceResponse<List<AttachedDatabaseConfigurationInner>>> call(Response<ResponseBody> response) { try { ServiceResponse<PageImpl<AttachedDatabaseConfigurationInner>> result = listByClusterDelegate(response); List<AttachedDatabaseConfigurationInner> items = null; if (result.body() != null) { items = result.body().items(); } ServiceResponse<List<AttachedDatabaseConfigurationInner>> clientResponse = new ServiceResponse<List<AttachedDatabaseConfigurationInner>>(items, result.response()); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<PageImpl<AttachedDatabaseConfigurationInner>> listByClusterDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<PageImpl<AttachedDatabaseConfigurationInner>, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<PageImpl<AttachedDatabaseConfigurationInner>>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Returns an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the AttachedDatabaseConfigurationInner object if successful. */ public AttachedDatabaseConfigurationInner get(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { return getWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName).toBlocking().single().body(); } /** * Returns an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<AttachedDatabaseConfigurationInner> getAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, final ServiceCallback<AttachedDatabaseConfigurationInner> serviceCallback) { return ServiceFuture.fromResponse(getWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName), serviceCallback); } /** * Returns an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AttachedDatabaseConfigurationInner object */ public Observable<AttachedDatabaseConfigurationInner> getAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { return getWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName).map(new Func1<ServiceResponse<AttachedDatabaseConfigurationInner>, AttachedDatabaseConfigurationInner>() { @Override public AttachedDatabaseConfigurationInner call(ServiceResponse<AttachedDatabaseConfigurationInner> response) { return response.body(); } }); } /** * Returns an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AttachedDatabaseConfigurationInner object */ public Observable<ServiceResponse<AttachedDatabaseConfigurationInner>> getWithServiceResponseAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (clusterName == null) { throw new IllegalArgumentException("Parameter clusterName is required and cannot be null."); } if (attachedDatabaseConfigurationName == null) { throw new IllegalArgumentException("Parameter attachedDatabaseConfigurationName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.get(resourceGroupName, clusterName, attachedDatabaseConfigurationName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<AttachedDatabaseConfigurationInner>>>() { @Override public Observable<ServiceResponse<AttachedDatabaseConfigurationInner>> call(Response<ResponseBody> response) { try { ServiceResponse<AttachedDatabaseConfigurationInner> clientResponse = getDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<AttachedDatabaseConfigurationInner> getDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<AttachedDatabaseConfigurationInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<AttachedDatabaseConfigurationInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the AttachedDatabaseConfigurationInner object if successful. */ public AttachedDatabaseConfigurationInner createOrUpdate(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName, parameters).toBlocking().last().body(); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<AttachedDatabaseConfigurationInner> createOrUpdateAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters, final ServiceCallback<AttachedDatabaseConfigurationInner> serviceCallback) { return ServiceFuture.fromResponse(createOrUpdateWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName, parameters), serviceCallback); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<AttachedDatabaseConfigurationInner> createOrUpdateAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters) { return createOrUpdateWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName, parameters).map(new Func1<ServiceResponse<AttachedDatabaseConfigurationInner>, AttachedDatabaseConfigurationInner>() { @Override public AttachedDatabaseConfigurationInner call(ServiceResponse<AttachedDatabaseConfigurationInner> response) { return response.body(); } }); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<AttachedDatabaseConfigurationInner>> createOrUpdateWithServiceResponseAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (clusterName == null) { throw new IllegalArgumentException("Parameter clusterName is required and cannot be null."); } if (attachedDatabaseConfigurationName == null) { throw new IllegalArgumentException("Parameter attachedDatabaseConfigurationName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); Observable<Response<ResponseBody>> observable = service.createOrUpdate(resourceGroupName, clusterName, attachedDatabaseConfigurationName, this.client.subscriptionId(), parameters, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPutOrPatchResultAsync(observable, new TypeToken<AttachedDatabaseConfigurationInner>() { }.getType()); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent * @return the AttachedDatabaseConfigurationInner object if successful. */ public AttachedDatabaseConfigurationInner beginCreateOrUpdate(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName, parameters).toBlocking().single().body(); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<AttachedDatabaseConfigurationInner> beginCreateOrUpdateAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters, final ServiceCallback<AttachedDatabaseConfigurationInner> serviceCallback) { return ServiceFuture.fromResponse(beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName, parameters), serviceCallback); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AttachedDatabaseConfigurationInner object */ public Observable<AttachedDatabaseConfigurationInner> beginCreateOrUpdateAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters) { return beginCreateOrUpdateWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName, parameters).map(new Func1<ServiceResponse<AttachedDatabaseConfigurationInner>, AttachedDatabaseConfigurationInner>() { @Override public AttachedDatabaseConfigurationInner call(ServiceResponse<AttachedDatabaseConfigurationInner> response) { return response.body(); } }); } /** * Creates or updates an attached database configuration. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param parameters The database parameters supplied to the CreateOrUpdate operation. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable to the AttachedDatabaseConfigurationInner object */ public Observable<ServiceResponse<AttachedDatabaseConfigurationInner>> beginCreateOrUpdateWithServiceResponseAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, AttachedDatabaseConfigurationInner parameters) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (clusterName == null) { throw new IllegalArgumentException("Parameter clusterName is required and cannot be null."); } if (attachedDatabaseConfigurationName == null) { throw new IllegalArgumentException("Parameter attachedDatabaseConfigurationName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (parameters == null) { throw new IllegalArgumentException("Parameter parameters is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Validator.validate(parameters); return service.beginCreateOrUpdate(resourceGroupName, clusterName, attachedDatabaseConfigurationName, this.client.subscriptionId(), parameters, this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<AttachedDatabaseConfigurationInner>>>() { @Override public Observable<ServiceResponse<AttachedDatabaseConfigurationInner>> call(Response<ResponseBody> response) { try { ServiceResponse<AttachedDatabaseConfigurationInner> clientResponse = beginCreateOrUpdateDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<AttachedDatabaseConfigurationInner> beginCreateOrUpdateDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<AttachedDatabaseConfigurationInner, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<AttachedDatabaseConfigurationInner>() { }.getType()) .register(201, new TypeToken<AttachedDatabaseConfigurationInner>() { }.getType()) .register(202, new TypeToken<AttachedDatabaseConfigurationInner>() { }.getType()) .registerError(CloudException.class) .build(response); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void delete(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { deleteWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName).toBlocking().last().body(); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> deleteAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(deleteWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName), serviceCallback); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<Void> deleteAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { return deleteWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the observable for the request */ public Observable<ServiceResponse<Void>> deleteWithServiceResponseAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (clusterName == null) { throw new IllegalArgumentException("Parameter clusterName is required and cannot be null."); } if (attachedDatabaseConfigurationName == null) { throw new IllegalArgumentException("Parameter attachedDatabaseConfigurationName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } Observable<Response<ResponseBody>> observable = service.delete(resourceGroupName, clusterName, attachedDatabaseConfigurationName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()); return client.getAzureClient().getPostOrDeleteResultAsync(observable, new TypeToken<Void>() { }.getType()); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @throws CloudException thrown if the request is rejected by server * @throws RuntimeException all other wrapped checked exceptions if the request fails to be sent */ public void beginDelete(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { beginDeleteWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName).toBlocking().single().body(); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @param serviceCallback the async ServiceCallback to handle successful and failed responses. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceFuture} object */ public ServiceFuture<Void> beginDeleteAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName, final ServiceCallback<Void> serviceCallback) { return ServiceFuture.fromResponse(beginDeleteWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName), serviceCallback); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<Void> beginDeleteAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { return beginDeleteWithServiceResponseAsync(resourceGroupName, clusterName, attachedDatabaseConfigurationName).map(new Func1<ServiceResponse<Void>, Void>() { @Override public Void call(ServiceResponse<Void> response) { return response.body(); } }); } /** * Deletes the attached database configuration with the given name. * * @param resourceGroupName The name of the resource group containing the Kusto cluster. * @param clusterName The name of the Kusto cluster. * @param attachedDatabaseConfigurationName The name of the attached database configuration. * @throws IllegalArgumentException thrown if parameters fail the validation * @return the {@link ServiceResponse} object if successful. */ public Observable<ServiceResponse<Void>> beginDeleteWithServiceResponseAsync(String resourceGroupName, String clusterName, String attachedDatabaseConfigurationName) { if (resourceGroupName == null) { throw new IllegalArgumentException("Parameter resourceGroupName is required and cannot be null."); } if (clusterName == null) { throw new IllegalArgumentException("Parameter clusterName is required and cannot be null."); } if (attachedDatabaseConfigurationName == null) { throw new IllegalArgumentException("Parameter attachedDatabaseConfigurationName is required and cannot be null."); } if (this.client.subscriptionId() == null) { throw new IllegalArgumentException("Parameter this.client.subscriptionId() is required and cannot be null."); } if (this.client.apiVersion() == null) { throw new IllegalArgumentException("Parameter this.client.apiVersion() is required and cannot be null."); } return service.beginDelete(resourceGroupName, clusterName, attachedDatabaseConfigurationName, this.client.subscriptionId(), this.client.apiVersion(), this.client.acceptLanguage(), this.client.userAgent()) .flatMap(new Func1<Response<ResponseBody>, Observable<ServiceResponse<Void>>>() { @Override public Observable<ServiceResponse<Void>> call(Response<ResponseBody> response) { try { ServiceResponse<Void> clientResponse = beginDeleteDelegate(response); return Observable.just(clientResponse); } catch (Throwable t) { return Observable.error(t); } } }); } private ServiceResponse<Void> beginDeleteDelegate(Response<ResponseBody> response) throws CloudException, IOException, IllegalArgumentException { return this.client.restClient().responseBuilderFactory().<Void, CloudException>newInstance(this.client.serializerAdapter()) .register(200, new TypeToken<Void>() { }.getType()) .register(202, new TypeToken<Void>() { }.getType()) .register(204, new TypeToken<Void>() { }.getType()) .registerError(CloudException.class) .build(response); } }
package org.robolectric.shadows; import android.content.IntentFilter; import android.net.Uri; import org.junit.Test; import org.junit.runner.RunWith; import org.robolectric.TestRunners; import static org.assertj.core.api.Assertions.assertThat; @RunWith(TestRunners.MultiApiSelfTest.class) public class ShadowIntentFilterTest { @Test public void copyConstructorTest() throws Exception { String action = "test"; IntentFilter intentFilter = new IntentFilter(action); IntentFilter copy = new IntentFilter(intentFilter); assertThat(copy.hasAction("test")).isTrue(); } @Test public void setsPriority() throws Exception { IntentFilter filter = new IntentFilter(); filter.setPriority(123); assertThat(filter.getPriority()).isEqualTo(123); } @Test public void addDataScheme_shouldAddTheDataScheme() throws Exception { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataScheme("http"); intentFilter.addDataScheme("ftp"); assertThat(intentFilter.getDataScheme(0)).isEqualTo("http"); assertThat(intentFilter.getDataScheme(1)).isEqualTo("ftp"); } @Test public void addDataAuthority_shouldAddTheDataAuthority() throws Exception { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataAuthority("test.com", "8080"); intentFilter.addDataAuthority("example.com", "42"); assertThat(intentFilter.getDataAuthority(0).getHost()).isEqualTo("test.com"); assertThat(intentFilter.getDataAuthority(0).getPort()).isEqualTo(8080); assertThat(intentFilter.getDataAuthority(1).getHost()).isEqualTo("example.com"); assertThat(intentFilter.getDataAuthority(1).getPort()).isEqualTo(42); } @Test public void addDataType_shouldAddTheDataType() throws Exception { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataType("image/test"); assertThat(intentFilter.getDataType(0)).isEqualTo("image/test"); } @Test public void hasAction() { IntentFilter intentFilter = new IntentFilter(); assertThat(intentFilter.hasAction("test")).isFalse(); intentFilter.addAction("test"); assertThat(intentFilter.hasAction("test")).isTrue(); } @Test public void hasDataScheme() { IntentFilter intentFilter = new IntentFilter(); assertThat(intentFilter.hasDataScheme("test")).isFalse(); intentFilter.addDataScheme("test"); assertThat(intentFilter.hasDataScheme("test")).isTrue(); } @Test public void hasDataType() throws IntentFilter.MalformedMimeTypeException{ IntentFilter intentFilter = new IntentFilter(); assertThat(intentFilter.hasDataType("image/test")).isFalse(); intentFilter.addDataType("image/test"); assertThat(intentFilter.hasDataType("image/test")).isTrue(); } @Test public void matchDataAuthority_matchHostAndPort() { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataAuthority("testHost1", "1"); intentFilter.addDataAuthority("testHost2", "2"); Uri uriTest1 = Uri.parse("http://testHost1:1"); Uri uriTest2 = Uri.parse("http://testHost2:2"); assertThat(intentFilter.matchDataAuthority(uriTest1)).isEqualTo(IntentFilter.MATCH_CATEGORY_PORT); assertThat(intentFilter.matchDataAuthority(uriTest2)).isEqualTo(IntentFilter.MATCH_CATEGORY_PORT); } @Test public void matchDataAuthority_matchHostWithNoPort() { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataAuthority("testHost1", "-1"); intentFilter.addDataAuthority("testHost2", "-1"); Uri uriTest1 = Uri.parse("http://testHost1:100"); Uri uriTest2 = Uri.parse("http://testHost2:200"); assertThat(intentFilter.matchDataAuthority(uriTest1)).isEqualTo(IntentFilter.MATCH_CATEGORY_HOST); assertThat(intentFilter.matchDataAuthority(uriTest2)).isEqualTo(IntentFilter.MATCH_CATEGORY_HOST); } @Test public void matchDataAuthority_NoMatch() { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataAuthority("testHost1", "1"); intentFilter.addDataAuthority("testHost2", "2"); // Port doesn't match Uri uriTest1 = Uri.parse("http://testHost1:2"); // Host doesn't match Uri uriTest2 = Uri.parse("http://testHost3:2"); assertThat(intentFilter.matchDataAuthority(uriTest1)).isEqualTo( IntentFilter.NO_MATCH_DATA); assertThat(intentFilter.matchDataAuthority(uriTest2)).isEqualTo( IntentFilter.NO_MATCH_DATA); } @Test public void matchData_MatchAll() throws IntentFilter.MalformedMimeTypeException{ IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataType("image/test"); intentFilter.addDataScheme("http"); intentFilter.addDataAuthority("testHost1", "1"); Uri uriTest1 = Uri.parse("http://testHost1:1"); assertThat(intentFilter.matchData("image/test", "http", uriTest1)) .isGreaterThanOrEqualTo(0); } @Test public void matchData_MatchType() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataType("image/test"); intentFilter.addDataScheme("http"); Uri uriTest1 = Uri.parse("http://testHost1:1"); assertThat(intentFilter.matchData("image/test", "http", uriTest1)) .isGreaterThanOrEqualTo(0); } @Test public void matchData_MatchScheme() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataScheme("http"); Uri uriTest1 = Uri.parse("http://testHost1:1"); assertThat(intentFilter.matchData(null, "http", uriTest1)) .isGreaterThanOrEqualTo(0); } @Test public void matchData_MatchEmpty() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); assertThat(intentFilter.matchData(null, "noscheme", null)) .isGreaterThanOrEqualTo(0); } @Test public void matchData_NoMatchType() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataType("image/testFail"); Uri uriTest1 = Uri.parse("http://testHost1:1"); assertThat(intentFilter.matchData("image/test", "http", uriTest1)) .isLessThan(0); } @Test public void matchData_NoMatchScheme() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataScheme("http"); intentFilter.addDataType("image/test"); Uri uriTest1 = Uri.parse("https://testHost1:1"); assertThat(intentFilter.matchData("image/test", "https", uriTest1)) .isLessThan(0); } @Test public void matchData_NoMatchDataAuthority() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataType("image/test"); intentFilter.addDataScheme("http"); intentFilter.addDataAuthority("testHost1", "1"); Uri uriTest1 = Uri.parse("http://testHost1:2"); assertThat(intentFilter.matchData("image/test", "http", uriTest1)) .isLessThan(0); } @Test public void matchData_MatchSchemeNoMatchType() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataScheme("http"); intentFilter.addDataType("image/testFail"); Uri uriTest1 = Uri.parse("http://testHost1:1"); assertThat(intentFilter.matchData("image/test", "http", uriTest1)) .isLessThan(0); } @Test public void matchData_MatchesPartialType() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataScheme("content"); intentFilter.addDataType("image/*"); Uri uri = Uri.parse("content://authority/images"); assertThat(intentFilter.matchData("image/test", "content", uri)).isGreaterThanOrEqualTo(0); assertThat(intentFilter.matchData("video/test", "content", uri)).isLessThan(0); } @Test public void matchData_MatchesAnyTypeAndSubtype() throws IntentFilter.MalformedMimeTypeException { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataScheme("content"); intentFilter.addDataType("*/*"); Uri uri = Uri.parse("content://authority/images"); assertThat(intentFilter.matchData("image/test", "content", uri)).isGreaterThanOrEqualTo(0); assertThat(intentFilter.matchData("image/*", "content", uri)).isGreaterThanOrEqualTo(0); assertThat(intentFilter.matchData("video/test", "content", uri)).isGreaterThanOrEqualTo(0); assertThat(intentFilter.matchData("video/*", "content", uri)).isGreaterThanOrEqualTo(0); } @Test public void testCountDataTypes() throws Exception { IntentFilter intentFilter = new IntentFilter(); intentFilter.addDataType("image/*"); intentFilter.addDataType("audio/*"); assertThat(intentFilter.countDataTypes()).isEqualTo(2); } }
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.testIntegration.createTest; import com.intellij.codeInsight.CodeInsightBundle; import com.intellij.codeInsight.CodeInsightUtil; import com.intellij.codeInsight.FileModificationService; import com.intellij.codeInsight.template.Template; import com.intellij.ide.fileTemplates.FileTemplate; import com.intellij.ide.fileTemplates.FileTemplateDescriptor; import com.intellij.ide.fileTemplates.FileTemplateManager; import com.intellij.ide.fileTemplates.FileTemplateUtil; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.fileEditor.ex.IdeDocumentHistory; import com.intellij.openapi.project.Project; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.Computable; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.*; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.psi.search.GlobalSearchScope; import com.intellij.psi.search.GlobalSearchScopesCore; import com.intellij.refactoring.util.classMembers.MemberInfo; import com.intellij.testIntegration.TestFramework; import com.intellij.testIntegration.TestIntegrationUtils; import com.intellij.util.Function; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.HashSet; import java.util.Properties; import java.util.Set; public class JavaTestGenerator implements TestGenerator { public JavaTestGenerator() { } public PsiElement generateTest(final Project project, final CreateTestDialog d) { return PostprocessReformattingAspect.getInstance(project).postponeFormattingInside(new Computable<PsiElement>() { public PsiElement compute() { return ApplicationManager.getApplication().runWriteAction(new Computable<PsiElement>() { public PsiElement compute() { try { IdeDocumentHistory.getInstance(project).includeCurrentPlaceAsChangePlace(); PsiClass targetClass = createTestClass(d); if (targetClass == null) { return null; } final TestFramework frameworkDescriptor = d.getSelectedTestFrameworkDescriptor(); final String defaultSuperClass = frameworkDescriptor.getDefaultSuperClass(); final String superClassName = d.getSuperClassName(); if (!Comparing.strEqual(superClassName, defaultSuperClass)) { addSuperClass(targetClass, project, superClassName); } Editor editor = CodeInsightUtil.positionCursorAtLBrace(project, targetClass.getContainingFile(), targetClass); addTestMethods(editor, targetClass, d.getTargetClass(), frameworkDescriptor, d.getSelectedMethods(), d.shouldGeneratedBefore(), d.shouldGeneratedAfter()); return targetClass; } catch (IncorrectOperationException e) { showErrorLater(project, d.getClassName()); return null; } } }); } }); } @Nullable private static PsiClass createTestClass(CreateTestDialog d) { final TestFramework testFrameworkDescriptor = d.getSelectedTestFrameworkDescriptor(); final FileTemplateDescriptor fileTemplateDescriptor = TestIntegrationUtils.MethodKind.TEST_CLASS.getFileTemplateDescriptor(testFrameworkDescriptor); final PsiDirectory targetDirectory = d.getTargetDirectory(); final PsiPackage aPackage = JavaDirectoryService.getInstance().getPackage(targetDirectory); if (aPackage != null) { final GlobalSearchScope scope = GlobalSearchScopesCore.directoryScope(targetDirectory, false); final PsiClass[] classes = aPackage.findClassByShortName(d.getClassName(), scope); if (classes.length > 0) { if (!FileModificationService.getInstance().preparePsiElementForWrite(classes[0])) { return null; } return classes[0]; } } if (fileTemplateDescriptor != null) { final PsiClass classFromTemplate = createTestClassFromCodeTemplate(d, fileTemplateDescriptor, targetDirectory); if (classFromTemplate != null) { return classFromTemplate; } } return JavaDirectoryService.getInstance().createClass(targetDirectory, d.getClassName()); } private static PsiClass createTestClassFromCodeTemplate(final CreateTestDialog d, final FileTemplateDescriptor fileTemplateDescriptor, final PsiDirectory targetDirectory) { final String templateName = fileTemplateDescriptor.getFileName(); final FileTemplate fileTemplate = FileTemplateManager.getInstance(targetDirectory.getProject()).getCodeTemplate(templateName); final Properties defaultProperties = FileTemplateManager.getInstance(targetDirectory.getProject()).getDefaultProperties(); Properties properties = new Properties(defaultProperties); properties.setProperty(FileTemplate.ATTRIBUTE_NAME, d.getClassName()); final PsiClass targetClass = d.getTargetClass(); if (targetClass != null && targetClass.isValid()) { properties.setProperty(FileTemplate.ATTRIBUTE_CLASS_NAME, targetClass.getQualifiedName()); } try { final PsiElement psiElement = FileTemplateUtil.createFromTemplate(fileTemplate, templateName, properties, targetDirectory); if (psiElement instanceof PsiClass) { return (PsiClass)psiElement; } return null; } catch (Exception e) { return null; } } private static void addSuperClass(PsiClass targetClass, Project project, String superClassName) throws IncorrectOperationException { if (superClassName == null) return; final PsiReferenceList extendsList = targetClass.getExtendsList(); if (extendsList == null) return; PsiElementFactory ef = JavaPsiFacade.getInstance(project).getElementFactory(); PsiJavaCodeReferenceElement superClassRef; PsiClass superClass = findClass(project, superClassName); if (superClass != null) { superClassRef = ef.createClassReferenceElement(superClass); } else { superClassRef = ef.createFQClassNameReferenceElement(superClassName, GlobalSearchScope.allScope(project)); } final PsiJavaCodeReferenceElement[] referenceElements = extendsList.getReferenceElements(); if (referenceElements.length == 0) { extendsList.add(superClassRef); } else { referenceElements[0].replace(superClassRef); } } @Nullable private static PsiClass findClass(Project project, String fqName) { GlobalSearchScope scope = GlobalSearchScope.allScope(project); return JavaPsiFacade.getInstance(project).findClass(fqName, scope); } public static void addTestMethods(Editor editor, PsiClass targetClass, final TestFramework descriptor, Collection<MemberInfo> methods, boolean generateBefore, boolean generateAfter) throws IncorrectOperationException { addTestMethods(editor, targetClass, null, descriptor, methods, generateBefore, generateAfter); } public static void addTestMethods(Editor editor, PsiClass targetClass, @Nullable PsiClass sourceClass, final TestFramework descriptor, Collection<MemberInfo> methods, boolean generateBefore, boolean generateAfter) throws IncorrectOperationException { final Set<String> existingNames = new HashSet<String>(); PsiMethod anchor = null; if (generateBefore && descriptor.findSetUpMethod(targetClass) == null) { anchor = generateMethod(TestIntegrationUtils.MethodKind.SET_UP, descriptor, targetClass, sourceClass, editor, null, existingNames, null); } if (generateAfter && descriptor.findTearDownMethod(targetClass) == null) { anchor = generateMethod(TestIntegrationUtils.MethodKind.TEAR_DOWN, descriptor, targetClass, sourceClass, editor, null, existingNames, anchor); } final Template template = TestIntegrationUtils.createTestMethodTemplate(TestIntegrationUtils.MethodKind.TEST, descriptor, targetClass, sourceClass, null, true, existingNames); final String prefix = JavaPsiFacade.getElementFactory(targetClass.getProject()).createMethodFromText(template.getTemplateText(), targetClass).getName(); existingNames.addAll(ContainerUtil.map(targetClass.getMethods(), new Function<PsiMethod, String>() { @Override public String fun(PsiMethod method) { return StringUtil.decapitalize(StringUtil.trimStart(method.getName(), prefix)); } })); for (MemberInfo m : methods) { anchor = generateMethod(TestIntegrationUtils.MethodKind.TEST, descriptor, targetClass, sourceClass, editor, m.getMember().getName(), existingNames, anchor); } } private static void showErrorLater(final Project project, final String targetClassName) { ApplicationManager.getApplication().invokeLater(new Runnable() { public void run() { Messages.showErrorDialog(project, CodeInsightBundle.message("intention.error.cannot.create.class.message", targetClassName), CodeInsightBundle.message("intention.error.cannot.create.class.title")); } }); } private static PsiMethod generateMethod(TestIntegrationUtils.MethodKind methodKind, TestFramework descriptor, PsiClass targetClass, @Nullable PsiClass sourceClass, Editor editor, @Nullable String name, Set<String> existingNames, PsiMethod anchor) { PsiMethod method = (PsiMethod)targetClass.addAfter(TestIntegrationUtils.createDummyMethod(targetClass), anchor); PsiDocumentManager.getInstance(targetClass.getProject()).doPostponedOperationsAndUnblockDocument(editor.getDocument()); TestIntegrationUtils.runTestMethodTemplate(methodKind, descriptor, editor, targetClass, sourceClass, method, name, true, existingNames); return method; } @Override public String toString() { return CodeInsightBundle.message("intention.create.test.dialog.java"); } }
package org.drools.guvnor.client.asseteditor.drools.standalone; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; import com.google.gwt.event.shared.EventBus; import com.google.gwt.user.client.Command; import com.google.gwt.user.client.Window; import com.google.gwt.user.client.ui.DockLayoutPanel; import com.google.gwt.user.client.ui.Panel; import com.google.gwt.user.client.ui.ScrollPanel; import org.drools.guvnor.client.common.GenericCallback; import org.drools.guvnor.client.common.LoadingPopup; import org.drools.guvnor.client.explorer.ClientFactory; import org.drools.guvnor.client.messages.Constants; import org.drools.guvnor.client.rpc.Asset; import org.drools.guvnor.client.rpc.StandaloneEditorService; import org.drools.guvnor.client.rpc.StandaloneEditorServiceAsync; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import org.drools.guvnor.client.asseteditor.MultiViewEditor; import org.drools.guvnor.client.asseteditor.MultiViewEditorMenuBarCreator; import org.drools.guvnor.client.asseteditor.drools.OryxMultiViewEditorMenuBarCreator; import org.drools.guvnor.client.asseteditor.drools.modeldriven.ui.RuleModellerConfiguration; import org.drools.guvnor.client.moduleeditor.drools.WorkingSetManager; import org.drools.guvnor.client.rpc.StandaloneEditorInvocationParameters; import org.drools.guvnor.client.widgets.toolbar.StandaloneEditorIndividualActionToolbarButtonsConfigurationProvider; /** * Class used to manage the stand-alone version of Guvnor's Editors */ public class StandaloneEditorManager { private final ClientFactory clientFactory; private MultiViewEditor editor; private StandaloneEditorServiceAsync standaloneEditorService = GWT.create(StandaloneEditorService.class); private Asset[] assets; private final EventBus eventBus; public StandaloneEditorManager(ClientFactory clientFactory, EventBus eventBus) { this.clientFactory = clientFactory; this.eventBus = eventBus; } public Panel getBaseLayout() { String parametersUUID = Window.Location.getParameter("pUUID"); if (parametersUUID == null || parametersUUID.trim().equals("")) { return null; } //init JS hooks this.setHooks(this); DockLayoutPanel mainLayout = new DockLayoutPanel(Unit.EM); final ScrollPanel mainPanel = new ScrollPanel(); mainLayout.add(mainPanel); //The package must exist (because we need at least a model to work with) //To make things easier (to me), the category must exist too. standaloneEditorService.getInvocationParameters(parametersUUID, new GenericCallback<StandaloneEditorInvocationParameters>() { public void onSuccess(final StandaloneEditorInvocationParameters parameters) { //no assets? This is an error! if (parameters.getAssetsToBeEdited().length == 0) { Window.alert(Constants.INSTANCE.NoRulesFound()); return; } //we need to store the assets. StandaloneEditorManager.this.assets = parameters.getAssetsToBeEdited(); //Load SCE and create a MultiViewEditor for the assets. //We take the package from the first asset (because all the assets //must belong to the same package) final Command afterWorkingSetsAreAppliedCommand = new Command() { public void execute() { LoadingPopup.close(); //Configure RuleModeller RuleModellerConfiguration ruleModellerConfiguration = RuleModellerConfiguration.getDefault(); ruleModellerConfiguration.setHideLHS(parameters.isHideLHS()); ruleModellerConfiguration.setHideRHS(parameters.isHideRHS()); ruleModellerConfiguration.setHideAttributes(parameters.isHideAttributes()); //Create the editor MultiViewEditorMenuBarCreator editorMenuBarCreator; if (parameters.isTemporalAssets()) { editorMenuBarCreator = new TemporalAssetsMultiViewEditorMenuBarCreator(new Command() { //"Done" buton command public void execute() { afterSaveAndCloseCallbackFunction(); } }, new Command() { //"Done" buton command public void execute() { afterCancelButtonCallbackFunction(); } }); } else if (parameters.getClientName().equalsIgnoreCase("oryx")) { editorMenuBarCreator = new OryxMultiViewEditorMenuBarCreator(new Command() { // "Close" button command public void execute() { afterCloseButtonCallbackFunction(); } }, new Command() { // Before "Save All" button command public void execute() { beforeSaveAllCallbackFunction(); } }, new Command() { // After "Save All" button command public void execute() { afterSaveAllCallbackFunction(); } }); } else { editorMenuBarCreator = new RealAssetsMultiViewEditorMenuBarCreator(new Command() { //"Cancel" button command public void execute() { afterCancelButtonCallbackFunction(); } }); } editor = new MultiViewEditor( parameters.getAssetsToBeEdited(), clientFactory, eventBus, new StandaloneEditorIndividualActionToolbarButtonsConfigurationProvider(), editorMenuBarCreator); editor.setCloseCommand(new Command() { public void execute() { afterSaveAndCloseCallbackFunction(); } }); //Add the editor to main panel mainPanel.add(editor); } }; //Apply working set configurations Set<Asset> workingSetAssets = new HashSet<Asset>(); if (parameters.getActiveTemporalWorkingSets() != null && parameters.getActiveTemporalWorkingSets().length > 0){ workingSetAssets.addAll(Arrays.asList(parameters.getActiveTemporalWorkingSets())); } if (parameters.getActiveWorkingSets() != null && parameters.getActiveWorkingSets().length > 0){ workingSetAssets.addAll(Arrays.asList(parameters.getActiveWorkingSets())); } if (!workingSetAssets.isEmpty()){ //if there is any working-set to apply, then turn auto verifier on WorkingSetManager.getInstance().setAutoVerifierEnabled(true); } WorkingSetManager.getInstance().applyWorkingSets(assets[0].getMetaData().getModuleName(), workingSetAssets, afterWorkingSetsAreAppliedCommand); } }); return mainLayout; } /** * This method should be invoked from JS using window.getEditorDRL(). * Returns the DRL of the assets we are editing. Because this method is * asynchronous, the DRL code is passed to a callback function specified * in the JS invocation. */ public void getDRLs() { if (assets == null || assets.length == 0) { returnDRL(""); } standaloneEditorService.getAsstesDRL(assets, new GenericCallback<String[]>() { public void onSuccess(String[] drls) { String result = ""; if (drls != null) { for (String drl : drls) { result += drl + "\n\n"; } } returnDRL(result); } }); } /** * This method should be invoked from JS using window.getEditorBRL(). * Returns the BRL of the assets we are editing. Because this method is * asynchronous, the BRL code is passed to a callback function specified * in the JS invocation. */ public void getBRLs() { if (assets == null || assets.length == 0) { returnDRL(""); } standaloneEditorService.getAsstesBRL(assets, new GenericCallback<String[]>() { public void onSuccess(String[] drls) { String result = ""; if (drls != null) { for (String drl : drls) { result += drl + "\n\n"; } } returnBRL(result); } }); } /** * Returns the uuids of the assets that are being edited in JSON format. * * @return */ public String getAssetsUUIDs() { StringBuilder uuids = new StringBuilder("["); String separator = ""; for (int i = 0; i < this.assets.length; i++) { uuids.append(separator); uuids.append("'"); uuids.append(this.assets[i].getUuid()); uuids.append("'"); if (separator.equals("")) { separator = ","; } } uuids.append("]"); return uuids.toString(); } /** * Creates 2 JS functions in window object: getDRLs() and getBRLs(). These * functions are used to retrieve the source code of the assets this component * is handling. * * @param app */ public native void setHooks(StandaloneEditorManager app)/*-{ var guvnorEditorObject = { drlCallbackFunction: null, brlCallbackFunction: null, //close function listener. The function you register here will be called //after the "Save and Close" button is pressed afterSaveAndCloseButtonCallbackFunction: null, afterCancelButtonCallbackFunction: null, afterSaveAllButtonCallbackFunction: null, beforeSaveAllButtonCallbackFunction: null, afterCloseButtonCallbackFunction: null, getDRL: function (callbackFunction) { this.drlCallbackFunction = callbackFunction; app.@org.drools.guvnor.client.asseteditor.drools.standalone.StandaloneEditorManager::getDRLs()(); }, getBRL: function (callbackFunction) { this.brlCallbackFunction = callbackFunction; app.@org.drools.guvnor.client.asseteditor.drools.standalone.StandaloneEditorManager::getBRLs()(); }, registerAfterSaveAndCloseButtonCallbackFunction: function (callbackFunction) { this.afterSaveAndCloseButtonCallbackFunction = callbackFunction; }, registerAfterSaveAllButtonCallbackFunction: function (callbackFunction) { this.afterSaveAllButtonCallbackFunction = callbackFunction; }, registerBeforeSaveAllButtonCallbackFunction: function (callbackFunction) { this.beforeSaveAllButtonCallbackFunction = callbackFunction; }, registerAfterCancelButtonCallbackFunction: function (callbackFunction) { this.afterCancelButtonCallbackFunction = callbackFunction; }, registerAfterCloseButtonCallbackFunction: function (callbackFunction) { this.afterCloseButtonCallbackFunction = callbackFunction; }, getAssetsUUIDs: function() { return app.@org.drools.guvnor.client.asseteditor.drools.standalone.StandaloneEditorManager::getAssetsUUIDs()(); } } $wnd.guvnorEditorObject = guvnorEditorObject; }-*/; /** * Callback method invoked from getDRLs(). * * @param drl */ public native void returnDRL(String drl)/*-{ if ($wnd.guvnorEditorObject.drlCallbackFunction) { $wnd.guvnorEditorObject.drlCallbackFunction(drl); } }-*/; /** * Callback method invoked from getDRLs(). * * @param brl */ public native void returnBRL(String brl)/*-{ if ($wnd.guvnorEditorObject.brlCallbackFunction) { $wnd.guvnorEditorObject.brlCallbackFunction(brl); } }-*/; /** * Method invoked after the "Save an Close" button is pressed. */ public native void afterSaveAndCloseCallbackFunction()/*-{ if ($wnd.guvnorEditorObject.afterSaveAndCloseButtonCallbackFunction) { $wnd.guvnorEditorObject.afterSaveAndCloseButtonCallbackFunction(); } }-*/; /** * Method invoked before the "Save All" button is pressed. */ public native void beforeSaveAllCallbackFunction()/*-{ if ($wnd.guvnorEditorObject.beforeSaveAllButtonCallbackFunction) { $wnd.guvnorEditorObject.beforeSaveAllButtonCallbackFunction(); } }-*/; /** * Method invoked after the "Save All" button is pressed. */ public native void afterSaveAllCallbackFunction()/*-{ if ($wnd.guvnorEditorObject.afterSaveAllButtonCallbackFunction) { $wnd.guvnorEditorObject.afterSaveAllButtonCallbackFunction(); } }-*/; public native void afterCancelButtonCallbackFunction()/*-{ if ($wnd.guvnorEditorObject.afterCancelButtonCallbackFunction) { $wnd.guvnorEditorObject.afterCancelButtonCallbackFunction(); } }-*/; public native void afterCloseButtonCallbackFunction()/*-{ if ($wnd.guvnorEditorObject.afterCloseButtonCallbackFunction) { $wnd.guvnorEditorObject.afterCloseButtonCallbackFunction(); }else{ //default behavior $wnd.opener.location.reload(); if (confirm("Are you sure you want to close this window?")) { $wnd.close(); } } }-*/; }