repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
ropik/error-prone
core/src/test/resources/com/google/errorprone/bugpatterns/ArrayToStringCompoundAssignmentPositiveCases.java
989
/* * Copyright 2013 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import java.util.*; /** * @author adgar@google.com (Mike Edgar) */ public class ArrayToStringCompoundAssignmentPositiveCases { private static final int[] a = {1, 2, 3}; public void stringVariableAddsArrayAndAssigns() { String b = "a string"; // BUG: Diagnostic contains: += Arrays.toString(a) b += a; } }
apache-2.0
kasungayan/product-is
modules/samples/oauth2/playground2/src/main/org/wso2/sample/identity/oauth2/OAuthPKCEAuthenticationRequestBuilder.java
1456
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.sample.identity.oauth2; import org.apache.oltu.oauth2.client.request.OAuthClientRequest; /** * This extends the default <code>OAuthClientRequest.AuthenticationRequestBuilder</code> to support PKCE as apache * oltu doesn't support pkce as of now. */ public class OAuthPKCEAuthenticationRequestBuilder extends OAuthClientRequest.AuthenticationRequestBuilder { public OAuthPKCEAuthenticationRequestBuilder(String url) { super(url); } public OAuthPKCEAuthenticationRequestBuilder setPKCECodeChallenge(String codeChallenge, String method) { this.parameters.put(OAuth2Constants.OAUTH2_PKCE_CODE_CHALLENGE, codeChallenge); this.parameters.put(OAuth2Constants.OAUTH2_PKCE_CODE_CHALLENGE_METHOD, method); return this; } }
apache-2.0
wjw465150/jodd
jodd-vtor/src/test/java/jodd/vtor/data/Zoo.java
1682
// Copyright (c) 2003-present, Jodd Team (http://jodd.org) // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, // this list of conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. package jodd.vtor.data; import jodd.vtor.constraint.MinLength; public class Zoo { @MinLength(value = 5, profiles = {"p1"}) String aaa = "a"; @MinLength(value = 5, profiles = {"p1", "p2"}) String bbb = "b"; @MinLength(value = 5) String ccc = "c"; }
bsd-2-clause
luttero/Maud
src/org/la4j/linear/LeastNormSolver.java
1815
/* * Copyright 2011-2013, by Vladimir Kostyukov and Contributors. * * This file is part of la4j project (http://la4j.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * You may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * Contributor(s): Maxim Samoylov * */ package org.la4j.linear; import org.la4j.LinearAlgebra; import org.la4j.Matrix; import org.la4j.Vector; /** * This class provides solution of "fat" linear system with least euclidean norm. * See details * <p> * <a href="http://see.stanford.edu/materials/lsoeldsee263/08-min-norm.pdf">here.</a> * </p> */ public class LeastNormSolver extends AbstractSolver implements LinearSystemSolver { protected LeastNormSolver(Matrix a) { super(a); } @Override public Vector solve(Vector b) { ensureRHSIsCorrect(b); Matrix temp = self().multiply(self().rotate()); Matrix pseudoInverse = self().rotate().multiply(temp.withInverter(LinearAlgebra.InverterFactory.GAUSS_JORDAN).inverse()); return pseudoInverse.multiply(b); } @Override public boolean applicableTo(Matrix matrix) { //TODO: we need to think about how to improve the speed here. //Note: Matrix.rank() works for O(N^3) which is quite slow. int r = matrix.rank(); return (r == matrix.rows() || r == matrix.columns()); } }
bsd-3-clause
dballesteros7/hackzurich2014
src/com/qualcomm/vuforia/samples/SampleApplication/SampleApplicationException.java
1593
/*=============================================================================== Copyright (c) 2012-2014 Qualcomm Connected Experiences, Inc. All Rights Reserved. Vuforia is a trademark of QUALCOMM Incorporated, registered in the United States and other countries. Trademarks of QUALCOMM Incorporated are used with permission. ===============================================================================*/ package com.qualcomm.vuforia.samples.SampleApplication; // Used to send back to the activity any error during vuforia processes public class SampleApplicationException extends Exception { private static final long serialVersionUID = 2L; public static final int INITIALIZATION_FAILURE = 0; public static final int VUFORIA_ALREADY_INITIALIZATED = 1; public static final int TRACKERS_INITIALIZATION_FAILURE = 2; public static final int LOADING_TRACKERS_FAILURE = 3; public static final int UNLOADING_TRACKERS_FAILURE = 4; public static final int TRACKERS_DEINITIALIZATION_FAILURE = 5; public static final int CAMERA_INITIALIZATION_FAILURE = 6; public static final int SET_FOCUS_MODE_FAILURE = 7; public static final int ACTIVATE_FLASH_FAILURE = 8; private int mCode = -1; private String mString = ""; public SampleApplicationException(int code, String description) { super(description); mCode = code; mString = description; } public int getCode() { return mCode; } public String getString() { return mString; } }
mit
rokn/Count_Words_2015
testing/openjdk2/jdk/src/share/classes/sun/util/resources/fr/TimeZoneNames_fr.java
60096
/* * Copyright (c) 1997, 2013, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * (C) Copyright Taligent, Inc. 1996, 1997 - All Rights Reserved * (C) Copyright IBM Corp. 1996 - 1998 - All Rights Reserved * * The original version of this source code and documentation * is copyrighted and owned by Taligent, Inc., a wholly-owned * subsidiary of IBM. These materials are provided under terms * of a License Agreement between Taligent and Sun. This technology * is protected by multiple US and International patents. * * This notice and attribution to Taligent may not be removed. * Taligent is a registered trademark of Taligent, Inc. * */ package sun.util.resources.fr; import sun.util.resources.TimeZoneNamesBundle; public final class TimeZoneNames_fr extends TimeZoneNamesBundle { protected final Object[][] getContents() { String ACT[] = new String[] {"Heure de l'Acre", "ACT", "Heure d'\u00e9t\u00e9 de l'Acre", "ACST", "Heure de l'Acre", "ACT"}; String ADELAIDE[] = new String[] {"Heure standard d'Australie centrale (Australie du sud)", "CST", "Heure d'\u00E9t\u00E9 d'Australie centrale (Australie du sud)", "CST", "Centre (Australie-M\u00E9ridionale)", "CT"}; String AGT[] = new String[] {"Heure D'Argentine", "ART", "Heure d'\u00e9t\u00e9 D'Argentine", "ARST", "Heure d'Argentine", "ART"} ; String AKST[] = new String[] {"Heure normale d'Alaska", "AKST", "Heure avanc\u00e9e d'Alaska", "AKDT", "Alaska", "AKT"} ; String AMT[] = new String[] {"Heure normale d'Amazonie", "AMT", "Heure d'\u00e9t\u00e9 d'Amazonie", "AMST", "Heure d'Amazonie", "AMT"} ; String ARAST[] = new String[] {"Heure normale d'Arabie", "AST", "Heure avanc\u00e9e d'Arabie", "ADT", "Arabie", "AT"} ; String ARMT[] = new String[] {"Heure d'Arm\u00e9nie", "AMT", "Heure d'\u00e9t\u00e9 d'Arm\u00e9nie", "AMST", "Heure d'Arm\u00E9nie", "AMT"} ; String AST[] = new String[] {"Heure normale de l'Atlantique", "AST", "Heure avanc\u00e9e de l'Atlantique", "ADT", "Atlantique", "AT"} ; String BDT[] = new String[] {"Heure du Bangladesh", "BDT", "Heure d'\u00e9t\u00e9 du Bangladesh", "BDST", "Heure du Bangladesh", "BDT"} ; String BRISBANE[] = new String[] {"Heure standard d'Australie orientale (Queensland)", "EST", "Heure d'\u00E9t\u00E9 d'Australie orientale (Queensland)", "EST", "C\u00F4te Est (Queensland)", "ET"}; String BROKEN_HILL[] = new String[] {"Heure standard d'Australie centrale (Australie du sud/Nouvelle-Galles du sud)", "CST", "Heure d'\u00E9t\u00E9 d'Australie centrale (Australie du sud/Nouvelle-Galles du sud)", "CST", "Centre (Australie-M\u00E9ridionale/Nouvelle-Galles du Sud)", "CT"}; String BRT[] = new String[] {"Heure du Br\u00e9sil", "BRT", "Heure d'\u00e9t\u00e9 du Br\u00e9sil", "BRST", "Heure du Br\u00E9sil", "BRT"} ; String BTT[] = new String[] {"Heure du Bhoutan", "BTT", "Heure d'\u00e9t\u00e9 du Bhoutan", "BTST", "Heure du Bhoutan", "BTT"} ; String CAT[] = new String[] {"Heure d'Afrique centrale", "CAT", "Heure d'\u00e9t\u00e9 d'Afrique centrale", "CAST", "Heure d'Afrique centrale", "CAT"} ; String CET[] = new String[] {"Heure d'Europe centrale", "CET", "Heure d'\u00e9t\u00e9 d'Europe centrale", "CEST", "Heure d'Europe centrale", "CET"} ; String CHAST[] = new String[] {"Heure standard de Chatham", "CHAST", "Heure avanc\u00e9e de Chatham", "CHADT", "Chatham", "CHAT"}; String CHUT[] = new String[] {"Heure de Chuuk", "CHUT", "Heure d'\u00E9t\u00E9 de Chuuk", "CHUST", "Heure de Chuuk", "CHUT"}; String CIT[] = new String[] {"Heure d'Indon\u00e9sie centrale", "WITA", "Heure d'\u00e9t\u00e9 d'Indon\u00e9sie centrale", "CIST", "Heure d'Indon\u00E9sie centrale", "WITA"}; String CLT[] = new String[] {"Heure du Chili", "CLT", "Heure d'\u00e9t\u00e9 du Chili", "CLST", "Heure du Chili", "CLT"} ; String CST[] = new String[] {"Heure normale du Centre", "CST", "Heure avanc\u00e9e du Centre", "CDT", "Centre", "CT"} ; String CTT[] = new String[] {"Heure normale de Chine", "CST", "Heure avanc\u00e9e de Chine", "CDT", "Chine", "CT"} ; String CUBA[] = new String[] {"Heure standard de Cuba", "CST", "Heure d'\u00e9t\u00e9 de Cuba", "CDT", "Heure de Cuba", "CT"}; String DARWIN[] = new String[] {"Heure standard d'Australie centrale (Territoire du Nord)", "CST", "Heure d'\u00E9t\u00E9 d'Australie centrale (Territoire du Nord)", "CST", "Centre (Territoire du Nord)", "CT"}; String DUBLIN[] = new String[] {"Heure du m\u00e9ridien de Greenwich", "GMT", "Heure d'\u00e9t\u00e9 irlandaise", "IST", "Heure irlandaise", "IT"}; String EAT[] = new String[] {"Heure d'Afrique de l'Est", "EAT", "Heure d'\u00e9t\u00e9 d'Afrique de l'Est", "EAST", "Heure d'Afrique de l'Est", "EAT"} ; String EASTER[] = new String[] {"Heure de l'Ile de P\u00e2ques", "EAST", "Heure d'\u00e9t\u00e9 de l'Ile de P\u00e2ques", "EASST", "Heure de l'Ile de P\u00E2ques", "EAST"}; String EET[] = new String[] {"Heure d'Europe de l'Est", "EET", "Heure d'\u00e9t\u00e9 d'Europe de l'Est", "EEST", "Heure d'Europe de l'Est", "EET"} ; String EGT[] = new String[] {"Heure du Groenland de l'Est", "EGT", "Heure d'\u00e9t\u00e9 du Groenland de l'Est", "EGST", "Heure du Groenland de l'Est", "EGT"} ; String EST[] = new String[] {"Heure normale de l'Est", "EST", "Heure avanc\u00e9e de l'Est", "EDT", "C\u00F4te Est", "ET"} ; String EST_NSW[] = new String[] {"Heure normale de l'Est (Nouvelle-Galles du Sud)", "EST", "Heure d'\u00E9t\u00E9 de l'Est (Nouvelle-Galles du Sud)", "EST", "C\u00F4te Est (Nouvelle-Galles du Sud)", "ET"} ; String FET[] = new String[] {"Heure d'Europe de l'Est UTC+3", "FET", "Heure d'\u00E9t\u00E9 d'Europe de l'Est UTC+3", "FEST", "Heure d'Europe de l'Est UTC+3", "FET"}; String GHMT[] = new String[] {"Heure du Ghana", "GMT", "Heure d'\u00e9t\u00e9 du Ghana", "GHST", "Heure du Ghana", "GMT"}; String GAMBIER[] = new String[] {"Heure de Gambi", "GAMT", "Heure d'\u00e9t\u00e9 de Gambi", "GAMST", "Heure de Gambi", "GAMT"}; String GMT[] = new String[] {"Heure de Greenwich", "GMT", "Heure de Greenwich", "GMT", "Heure de Greenwich", "GMT"} ; String GMTBST[] = new String[] {"Heure du m\u00e9ridien de Greenwich", "GMT", "Heure d'\u00e9t\u00e9 britannique", "BST", "Heure britannique", "BT"}; String GST[] = new String[] {"Heure normale du Golfe", "GST", "Heure avanc\u00e9e du Golfe", "GDT", "Golfe", "GT"} ; String HAST[] = new String[] {"Heure normale d'Hawa\u00ef-Al\u00e9outiennes", "HAST", "Heure avanc\u00e9e d'Hawa\u00ef-Al\u00e9outiennes", "HADT", "Hawa\u00EF-Iles Al\u00E9outiennes", "HAT"} ; String HKT[] = new String[] {"Heure de Hong Kong", "HKT", "Heure d'\u00e9t\u00e9 de Hong Kong", "HKST", "Heure de Hong-Kong", "HKT"}; String HST[] = new String[] {"Heure normale d'Hawa\u00ef", "HST", "Heure avanc\u00e9e d'Hawa\u00ef", "HDT", "Hawa\u00EF", "HT"} ; String ICT[] = new String[] {"Heure d'Indochine", "ICT", "Heure d'\u00e9t\u00e9 d'Indochine", "ICST", "Heure d'Indochine", "ICT"} ; String IRT[] = new String[] {"Heure normale d'Iran", "IRST", "Heure avanc\u00e9e d'Iran", "IRDT", "Heure d'Iran", "IRT"} ; String ISRAEL[] = new String[] {"Heure standard d'Isra\u00ebl", "IST", "Heure avanc\u00e9e d'Isra\u00ebl", "IDT", "Isra\u00EBl", "IT"}; String IST[] = new String[] {"Heure normale d'Inde", "IST", "Heure avanc\u00e9e d'Inde", "IDT", "Inde", "IT"} ; String JST[] = new String[] {"Heure normale du Japon", "JST", "Heure avanc\u00e9e du Japon", "JDT", "Japon", "JT"} ; String KST[] = new String[] {"Heure normale de Cor\u00e9e", "KST", "Heure avanc\u00e9e de Cor\u00e9e", "KDT", "Cor\u00E9e", "KT"} ; String LORD_HOWE[] = new String[] {"Heure standard de Lord Howe", "LHST", "Heure d'\u00e9t\u00e9 de Lord Howe", "LHST", "Heure de Lord Howe", "LHT"}; String MHT[] = new String[] {"Heure des Iles Marshall", "MHT", "Heure d'\u00e9t\u00e9 des Iles Marshall", "MHST", "Heure des Iles Marshall", "MHT"}; String MSK[] = new String[] {"Heure standard de Moscou", "MSK", "Heure avanc\u00e9e de Moscou", "MSD", "Moscou", "MT"}; String MST[] = new String[] {"Heure normale des Rocheuses", "MST", "Heure avanc\u00e9e des Rocheuses", "MDT", "Rocheuses", "MT"} ; String MYT[] = new String[] {"Heure de Malaisie", "MYT", "Heure d'\u00e9t\u00e9 de Malaisie", "MYST", "Heure de Malaisie", "MYT"}; String NORONHA[] = new String[] {"Heure de Fernando de Noronha", "FNT", "Heure d'\u00e9t\u00e9 de Fernando de Noronha", "FNST", "Heure de Fernando de Noronha", "FNT"}; String NOVT[] = new String[] {"Heure de Novossibirsk", "NOVT", "Heure d'\u00e9t\u00e9 de Novossibirsk", "NOVST", "Heure de Novossibirsk", "NOVT"}; String NPT[] = new String[] {"Heure du N\u00e9pal", "NPT", "Heure d'\u00e9t\u00e9 du N\u00e9pal", "NPST", "Heure du N\u00E9pal", "NPT"}; String NST[] = new String[] {"Heure normale de Terre-Neuve", "NST", "Heure avanc\u00e9e de Terre-Neuve", "NDT", "Terre-Neuve", "NT"} ; String NZST[] = new String[] {"Heure normale de Nouvelle-Z\u00e9lande", "NZST", "Heure avanc\u00e9e de Nouvelle-Z\u00e9lande", "NZDT", "Nouvelle-Z\u00E9lande", "NZT"} ; String PITCAIRN[] = new String[] {"Heure standard des Pitcairn", "PST", "heure avanc\u00e9e des Pitcairn", "PDT", "Pitcairn", "PT"}; String PKT[] = new String[] {"Heure du Pakistan", "PKT", "Heure d'\u00e9t\u00e9 du Pakistan", "PKST", "Heure du Pakistan", "PKT"} ; String PONT[] = new String[] {"Heure de Pohnpei", "PONT", "Heure d'\u00E9t\u00E9 de Pohnpei", "PONST", "Ponape", "PONT"}; String PST[] = new String[] {"Heure normale du Pacifique", "PST", "Heure avanc\u00e9e du Pacifique", "PDT", "Pacifique", "PT"} ; String SAST[] = new String[] {"Heure normale d'Afrique du Sud", "SAST", "Heure d'\u00e9t\u00e9 d'Afrique du Sud", "SAST", "Afrique du Sud", "SAT"} ; String SBT[] = new String[] {"Heure des \u00celes Salomon", "SBT", "Heure d'\u00e9t\u00e9 des \u00celes Salomon", "SBST", "Heure des Iles Salomon", "SBT"} ; String SGT[] = new String[] {"Heure de Singapour", "SGT", "Heure d'\u00e9t\u00e9 de Singapour", "SGST", "Heure de Singapour", "SGT"}; String SLST[] = new String[] {"Heure du m\u00e9ridien de Greenwich", "GMT", "Heure d'\u00e9t\u00e9 de Sierra Leone", "SLST", "Heure de Sierra Leone", "SLT"}; String TASMANIA[] = new String[] {"Heure standard d'Australie orientale (Tasmanie)", "EST", "Heure d'\u00E9t\u00E9 d'Australie orientale (Tasmanie)", "EST", "C\u00F4te Est (Tasmanie)", "ET"}; String TMT[] = new String[] {"Heure du Turkm\u00e9nistan", "TMT", "Heure d'\u00e9t\u00e9 du Turkm\u00e9nistan", "TMST", "Heure du Turkm\u00E9nistan", "TMT"} ; String ULAT[]= new String[] {"Heure de l'Ulaanbaatar", "ULAT", "Heure d'\u00e9t\u00e9 de l'Ulaanbaatar", "ULAST", "Heure de l'Ulaanbaatar", "ULAT"} ; String WART[] = new String[] {"Heure D'Argentine de l'Ouest", "WART", "Heure d'\u00e9t\u00e9 D'Argentine de l'Ouest", "WARST"} ; String WAT[] = new String[] {"Heure d'Afrique de l'Ouest", "WAT", "Heure d'\u00e9t\u00e9 d'Afrique de l'Ouest", "WAST", "Heure d'Afrique de l'Ouest", "WAT"} ; String WET[] = new String[] {"Heure d'Europe de l'Ouest", "WET", "Heure d'\u00e9t\u00e9 d'Europe de l'Ouest", "WEST", "Heure d'Europe de l'Ouest", "WET"} ; String WIT[] = new String[] {"Heure de l'Indon\u00e9sie occidentale", "WIB", "Heure d'\u00e9t\u00e9 de l'Indon\u00e9sie occidentale", "WIST", "Heure de l'Indon\u00E9sie occidentale", "WIB"}; String WST_AUS[] = new String[] {"Heure normale de l'Ouest (Australie)", "WST", "Heure d'\u00E9t\u00E9 de l'Ouest (Australie)", "WST", "Ouest (Australie)", "WT"} ; String SAMOA[] = new String[] {"Heure standard de Samoa", "SST", "Heure avanc\u00e9e de Samoa", "SDT", "Samoa", "ST"}; String WST_SAMOA[] = new String[] {"Heure des Samoas occidentales", "WST", "Heure d'\u00e9t\u00e9 des Samoas occidentales", "WSDT", "Heure des Samoas occidentales", "WST"} ; String ChST[] = new String[] {"Heure normale des \u00eeles Mariannes", "ChST", "Heure d'\u00e9t\u00e9 des \u00eeles Mariannes", "ChDT", "Chamorro", "ChT"}; String VICTORIA[] = new String[] {"Heure standard d'Australie orientale (Victoria)", "EST", "Heure d'\u00E9t\u00E9 d'Australie orientale (Victoria)", "EST", "C\u00F4te Est (Victoria)", "ET"}; String UTC[] = new String[] {"Temps universel coordonn\u00e9", "UTC", "Temps universel coordonn\u00e9", "UTC", "Temps universel coordonn\u00E9", "UTC"}; String UZT[] = new String[] {"Heure de l'Ouzb\u00e9kistan", "UZT", "Heure d'\u00e9t\u00e9 de l'Ouzb\u00e9kistan", "UZST", "Heure de l'Ouzb\u00E9kistan", "UZT"}; return new Object[][] { {"America/Los_Angeles", PST}, {"PST", PST}, {"America/Denver", MST}, {"MST", MST}, {"America/Phoenix", MST}, {"PNT", MST}, {"America/Chicago", CST}, {"CST", CST}, {"America/New_York", EST}, {"EST", EST}, {"America/Indianapolis", EST}, {"IET", EST}, {"Pacific/Honolulu", HST}, {"HST", HST}, {"America/Anchorage", AKST}, {"AST", AKST}, {"America/Halifax", AST}, {"America/Sitka", AKST}, {"America/St_Johns", NST}, {"CNT", NST}, {"Europe/Paris", CET}, {"ECT", CET}, {"GMT", GMT}, {"Africa/Casablanca", WET}, {"Asia/Jerusalem", ISRAEL}, {"Asia/Tokyo", JST}, {"JST", JST}, {"Europe/Bucharest", EET}, {"Asia/Shanghai", CTT}, {"CTT", CTT}, /* Don't change the order of the above zones * to keep compatibility with the previous version. */ {"ACT", DARWIN}, {"AET", EST_NSW}, {"AGT", AGT}, {"ART", EET}, {"Africa/Abidjan", GMT}, {"Africa/Accra", GHMT}, {"Africa/Addis_Ababa", EAT}, {"Africa/Algiers", CET}, {"Africa/Asmara", EAT}, {"Africa/Asmera", EAT}, {"Africa/Bamako", GMT}, {"Africa/Bangui", WAT}, {"Africa/Banjul", GMT}, {"Africa/Bissau", GMT}, {"Africa/Blantyre", CAT}, {"Africa/Brazzaville", WAT}, {"Africa/Bujumbura", CAT}, {"Africa/Cairo", EET}, {"Africa/Ceuta", CET}, {"Africa/Conakry", GMT}, {"Africa/Dakar", GMT}, {"Africa/Dar_es_Salaam", EAT}, {"Africa/Djibouti", EAT}, {"Africa/Douala", WAT}, {"Africa/El_Aaiun", WET}, {"Africa/Freetown", SLST}, {"Africa/Gaborone", CAT}, {"Africa/Harare", CAT}, {"Africa/Johannesburg", SAST}, {"Africa/Juba", EAT}, {"Africa/Kampala", EAT}, {"Africa/Khartoum", EAT}, {"Africa/Kigali", CAT}, {"Africa/Kinshasa", WAT}, {"Africa/Lagos", WAT}, {"Africa/Libreville", WAT}, {"Africa/Lome", GMT}, {"Africa/Luanda", WAT}, {"Africa/Lubumbashi", CAT}, {"Africa/Lusaka", CAT}, {"Africa/Malabo", WAT}, {"Africa/Maputo", CAT}, {"Africa/Maseru", SAST}, {"Africa/Mbabane", SAST}, {"Africa/Mogadishu", EAT}, {"Africa/Monrovia", GMT}, {"Africa/Nairobi", EAT}, {"Africa/Ndjamena", WAT}, {"Africa/Niamey", WAT}, {"Africa/Nouakchott", GMT}, {"Africa/Ouagadougou", GMT}, {"Africa/Porto-Novo", WAT}, {"Africa/Sao_Tome", GMT}, {"Africa/Timbuktu", GMT}, {"Africa/Tripoli", EET}, {"Africa/Tunis", CET}, {"Africa/Windhoek", WAT}, {"America/Adak", HAST}, {"America/Anguilla", AST}, {"America/Antigua", AST}, {"America/Araguaina", BRT}, {"America/Argentina/Buenos_Aires", AGT}, {"America/Argentina/Catamarca", AGT}, {"America/Argentina/ComodRivadavia", AGT}, {"America/Argentina/Cordoba", AGT}, {"America/Argentina/Jujuy", AGT}, {"America/Argentina/La_Rioja", AGT}, {"America/Argentina/Mendoza", AGT}, {"America/Argentina/Rio_Gallegos", AGT}, {"America/Argentina/Salta", AGT}, {"America/Argentina/San_Juan", AGT}, {"America/Argentina/San_Luis", AGT}, {"America/Argentina/Tucuman", AGT}, {"America/Argentina/Ushuaia", AGT}, {"America/Aruba", AST}, {"America/Asuncion", new String[] {"Heure du Paraguay", "PYT", "Heure d'\u00e9t\u00e9 du Paraguay", "PYST", "Heure du Paraguay", "PYT"}}, {"America/Atikokan", EST}, {"America/Atka", HAST}, {"America/Bahia", BRT}, {"America/Bahia_Banderas", CST}, {"America/Barbados", AST}, {"America/Belem", BRT}, {"America/Belize", CST}, {"America/Blanc-Sablon", AST}, {"America/Boa_Vista", AMT}, {"America/Bogota", new String[] {"Heure de Colombie", "COT", "Heure d'\u00e9t\u00e9 de Colombie", "COST", "Heure de Colombie", "COT"}}, {"America/Boise", MST}, {"America/Buenos_Aires", AGT}, {"America/Cambridge_Bay", MST}, {"America/Campo_Grande", AMT}, {"America/Cancun", CST}, {"America/Caracas", new String[] {"Heure du Venezuela", "VET", "Heure d'\u00e9t\u00e9 du Venezuela", "VEST", "Heure du Venezuela", "VET"}}, {"America/Catamarca", AGT}, {"America/Cayenne", new String[] {"Heure de Guyane fran\u00e7aise", "GFT", "Heure d'\u00e9t\u00e9 de Guyane fran\u00e7aise", "GFST", "Heure de Guyane fran\u00E7aise", "GFT"}}, {"America/Cayman", EST}, {"America/Chihuahua", MST}, {"America/Creston", MST}, {"America/Coral_Harbour", EST}, {"America/Cordoba", AGT}, {"America/Costa_Rica", CST}, {"America/Cuiaba", AMT}, {"America/Curacao", AST}, {"America/Danmarkshavn", GMT}, {"America/Dawson", PST}, {"America/Dawson_Creek", MST}, {"America/Detroit", EST}, {"America/Dominica", AST}, {"America/Edmonton", MST}, {"America/Eirunepe", ACT}, {"America/El_Salvador", CST}, {"America/Ensenada", PST}, {"America/Fort_Wayne", EST}, {"America/Fortaleza", BRT}, {"America/Glace_Bay", AST}, {"America/Godthab", new String[] {"Heure du Groenland de l'Ouest", "WGT", "Heure d'\u00e9t\u00e9 du Groenland de l'Ouest", "WGST", "Heure du Groenland de l'Ouest", "WGT"}}, {"America/Goose_Bay", AST}, {"America/Grand_Turk", EST}, {"America/Grenada", AST}, {"America/Guadeloupe", AST}, {"America/Guatemala", CST}, {"America/Guayaquil", new String[] {"Heure de l'\u00c9quateur", "ECT", "Heure d'\u00e9t\u00e9 de l'\u00c9quateur", "ECST", "Heure de l'Equateur", "ECT"}}, {"America/Guyana", new String[] {"Heure de Guyana", "GYT", "Heure d'\u00e9t\u00e9 de Guyana", "GYST", "Heure de Guyana", "GYT"}}, {"America/Havana", CUBA}, {"America/Hermosillo", MST}, {"America/Indiana/Indianapolis", EST}, {"America/Indiana/Knox", CST}, {"America/Indiana/Marengo", EST}, {"America/Indiana/Petersburg", EST}, {"America/Indiana/Tell_City", CST}, {"America/Indiana/Vevay", EST}, {"America/Indiana/Vincennes", EST}, {"America/Indiana/Winamac", EST}, {"America/Inuvik", MST}, {"America/Iqaluit", EST}, {"America/Jamaica", EST}, {"America/Jujuy", AGT}, {"America/Juneau", AKST}, {"America/Kentucky/Louisville", EST}, {"America/Kentucky/Monticello", EST}, {"America/Knox_IN", CST}, {"America/Kralendijk", AST}, {"America/La_Paz", new String[] {"Heure de Bolivie", "BOT", "Heure d'\u00e9t\u00e9 de Bolivie", "BOST", "Heure de Bolivie", "BOT"}}, {"America/Lima", new String[] {"Heure du P\u00e9rou", "PET", "Heure d'\u00e9t\u00e9 du P\u00e9rou", "PEST", "Heure du P\u00E9rou", "PET"}}, {"America/Louisville", EST}, {"America/Lower_Princes", AST}, {"America/Maceio", BRT}, {"America/Managua", CST}, {"America/Manaus", AMT}, {"America/Marigot", AST}, {"America/Martinique", AST}, {"America/Matamoros", CST}, {"America/Mazatlan", MST}, {"America/Mendoza", AGT}, {"America/Menominee", CST}, {"America/Merida", CST}, {"America/Metlakatla", new String[] {"Heure normale de Metlakatla", "MeST", "Heure avanc\u00E9e de Metlakatla", "MeDT", "Heure de Metlakatla", "MeT"}}, {"America/Mexico_City", CST}, {"America/Miquelon", new String[] {"Heure normale de Saint-Pierre et Miquelon", "PMST", "Heure avanc\u00e9e de Saint-Pierre et Miquelon", "PMDT", "Saint-Pierre-et-Miquelon", "PMT"}}, {"America/Moncton", AST}, {"America/Montevideo", new String[] {"Heure de l'Uruguay", "UYT", "Heure d'\u00e9t\u00e9 de l'Uruguay", "UYST", "Heure de l'Uruguay", "UYT"}}, {"America/Monterrey", CST}, {"America/Montreal", EST}, {"America/Montserrat", AST}, {"America/Nassau", EST}, {"America/Nipigon", EST}, {"America/Nome", AKST}, {"America/Noronha", NORONHA}, {"America/North_Dakota/Beulah", CST}, {"America/North_Dakota/Center", CST}, {"America/North_Dakota/New_Salem", CST}, {"America/Ojinaga", MST}, {"America/Panama", EST}, {"America/Pangnirtung", EST}, {"America/Paramaribo", new String[] {"Heure du Surinam", "SRT", "Heure d'\u00e9t\u00e9 du Surinam", "SRST", "Heure du Surinam", "SRT"}}, {"America/Port-au-Prince", EST}, {"America/Port_of_Spain", AST}, {"America/Porto_Acre", ACT}, {"America/Porto_Velho", AMT}, {"America/Puerto_Rico", AST}, {"America/Rainy_River", CST}, {"America/Rankin_Inlet", CST}, {"America/Recife", BRT}, {"America/Regina", CST}, {"America/Resolute", CST}, {"America/Rio_Branco", ACT}, {"America/Rosario", AGT}, {"America/Santa_Isabel", PST}, {"America/Santarem", BRT}, {"America/Santiago", CLT}, {"America/Santo_Domingo", AST}, {"America/Sao_Paulo", BRT}, {"America/Scoresbysund", EGT}, {"America/Shiprock", MST}, {"America/St_Barthelemy", AST}, {"America/St_Kitts", AST}, {"America/St_Lucia", AST}, {"America/St_Thomas", AST}, {"America/St_Vincent", AST}, {"America/Swift_Current", CST}, {"America/Tegucigalpa", CST}, {"America/Thule", AST}, {"America/Thunder_Bay", EST}, {"America/Tijuana", PST}, {"America/Toronto", EST}, {"America/Tortola", AST}, {"America/Vancouver", PST}, {"America/Virgin", AST}, {"America/Whitehorse", PST}, {"America/Winnipeg", CST}, {"America/Yakutat", AKST}, {"America/Yellowknife", MST}, {"Antarctica/Casey", WST_AUS}, {"Antarctica/Davis", new String[] {"Heure de Davis", "DAVT", "Heure d'\u00e9t\u00e9 de Davis", "DAVST", "Heure de Davis", "DAVT"}}, {"Antarctica/DumontDUrville", new String[] {"Heure de Dumont-d'Urville", "DDUT", "Heure d'\u00e9t\u00e9 de Dumont-d'Urville", "DDUST", "Heure de Dumont-d'Urville", "DDUT"}}, {"Antarctica/Macquarie", new String[] {"Heure de l'Ile Macquarie", "MIST", "Heure d'\u00E9t\u00E9 de l'Ile Macquarie", "MIST", "Heure de l'Ile Macquarie", "MIST"}}, {"Antarctica/Mawson", new String[] {"Heure de Mawson", "MAWT", "Heure d'\u00e9t\u00e9 de Mawson", "MAWST", "Heure de Mawson", "MAWT"}}, {"Antarctica/McMurdo", NZST}, {"Antarctica/Palmer", CLT}, {"Antarctica/Rothera", new String[] {"Heure de Rothera", "ROTT", "Heure d'\u00e9t\u00e9 de Rothera", "ROTST", "Heure de Rothera", "ROTT"}}, {"Antarctica/South_Pole", NZST}, {"Antarctica/Syowa", new String[] {"Heure de Syowa", "SYOT", "Heure d'\u00e9t\u00e9 de Syowa", "SYOST", "Heure de Syowa", "SYOT"}}, {"Antarctica/Vostok", new String[] {"Heure de Vostok", "VOST", "Heure d'\u00e9t\u00e9 de Vostok", "VOSST", "Heure de Vostok", "VOST"}}, {"Arctic/Longyearbyen", CET}, {"Asia/Aden", ARAST}, {"Asia/Almaty", new String[] {"Heure d'Alma-Ata", "ALMT", "Heure d'\u00e9t\u00e9 d'Alma-Ata", "ALMST", "Heure d'Alma-Ata", "ALMT"}}, {"Asia/Amman", ARAST}, {"Asia/Anadyr", new String[] {"Heure d'Anadyr", "ANAT", "Heure d'\u00e9t\u00e9 d'Anadyr", "ANAST", "Heure d'Anadyr", "ANAT"}}, {"Asia/Aqtau", new String[] {"Heure d'Aqtau", "AQTT", "Heure d'\u00e9t\u00e9 d'Aqtau", "AQTST", "Heure d'Aqtau", "AQTT"}}, {"Asia/Aqtobe", new String[] {"Heure d'Aqtobe", "AQTT", "Heure d'\u00e9t\u00e9 d'Aqtobe", "AQTST", "Heure d'Aqtobe", "AQTT"}}, {"Asia/Ashgabat", TMT}, {"Asia/Ashkhabad", TMT}, {"Asia/Baghdad", ARAST}, {"Asia/Bahrain", ARAST}, {"Asia/Baku", new String[] {"Heure d'Azerba\u00efdjan", "AZT", "Heure d'\u00e9t\u00e9 d'Azerba\u00efdjan", "AZST", "Heure d'Azerba\u00EFdjan", "AZT"}}, {"Asia/Bangkok", ICT}, {"Asia/Beirut", EET}, {"Asia/Bishkek", new String[] {"Heure du Kirghizistan", "KGT", "Heure d'\u00e9t\u00e9 du Kirghizistan", "KGST", "Heure du Kirghizistan", "KGT"}}, {"Asia/Brunei", new String[] {"Heure du Brunei", "BNT", "Heure d'\u00e9t\u00e9 du Brunei", "BNST", "Heure du Brunei", "BNT"}}, {"Asia/Calcutta", IST}, {"Asia/Choibalsan", new String[] {"Heure de Choibalsan", "CHOT", "Heure d'\u00e9t\u00e9 de Choibalsan", "CHOST", "Heure de Choibalsan", "CHOT"}}, {"Asia/Chongqing", CTT}, {"Asia/Chungking", CTT}, {"Asia/Colombo", IST}, {"Asia/Dacca", BDT}, {"Asia/Dhaka", BDT}, {"Asia/Dili", new String[] {"Heure de Timor-Leste", "TLT", "Heure d'\u00e9t\u00e9 de Timor-Leste", "TLST", "Heure de Timor-Leste", "TLT"}}, {"Asia/Damascus", EET}, {"Asia/Dubai", GST}, {"Asia/Dushanbe", new String[] {"Heure du Tadjikistan", "TJT", "Heure d'\u00e9t\u00e9 du Tadjikistan", "TJST", "Heure du Tadjikistan", "TJT"}}, {"Asia/Gaza", EET}, {"Asia/Harbin", CTT}, {"Asia/Hebron", EET}, {"Asia/Ho_Chi_Minh", ICT}, {"Asia/Hong_Kong", HKT}, {"Asia/Hovd", new String[] {"Heure de Hovd", "HOVT", "Heure d'\u00e9t\u00e9 de Hovd", "HOVST", "Heure de Hovd", "HOVT"}}, {"Asia/Irkutsk", new String[] {"Heure d'Irkutsk", "IRKT", "Heure d'\u00e9t\u00e9 d'Irkutsk", "IRKST", "Heure d'Irkutsk", "IRKT"}}, {"Asia/Istanbul", EET}, {"Asia/Jakarta", WIT}, {"Asia/Jayapura", new String[] {"Heure d'Indon\u00e9sie orientale", "WIT", "Heure d'\u00e9t\u00e9 d'Indon\u00e9sie orientale", "EIST", "Heure d'Indon\u00E9sie orientale", "WIT"}}, {"Asia/Kabul", new String[] {"Heure d'Afghanistan", "AFT", "Heure d'\u00e9t\u00e9 d'Afghanistan", "AFST", "Heure d'Afghanistan", "AFT"}}, {"Asia/Kamchatka", new String[] {"Heure de Petropavlovsk-Kamchatski", "PETT", "Heure d'\u00e9t\u00e9 de Petropavlovsk-Kamchatski", "PETST", "Heure de Petropavlovsk-Kamchatski", "PETT"}}, {"Asia/Karachi", PKT}, {"Asia/Kashgar", CTT}, {"Asia/Kathmandu", NPT}, {"Asia/Katmandu", NPT}, {"Asia/Khandyga", new String[] {"Heure de Khandyga", "YAKT", "Heure d'\u00E9t\u00E9 de Khandyga", "YAKST", "Heure de Khandyga", "YAKT"}}, {"Asia/Kolkata", IST}, {"Asia/Krasnoyarsk", new String[] {"Heure de Krasno\u00efarsk", "KRAT", "Heure d'\u00e9t\u00e9 de Krasno\u00efarsk", "KRAST", "Heure de Krasno\u00EFarsk", "KRAT"}}, {"Asia/Kuala_Lumpur", MYT}, {"Asia/Kuching", MYT}, {"Asia/Kuwait", ARAST}, {"Asia/Macao", CTT}, {"Asia/Macau", CTT}, {"Asia/Magadan", new String[] {"Heure de Magadan", "MAGT", "Heure d'\u00e9t\u00e9 de Magadan", "MAGST", "Heure de Magadan", "MAGT"}}, {"Asia/Makassar", CIT}, {"Asia/Manila", new String[] {"Heure des Philippines", "PHT", "Heure d'\u00e9t\u00e9 des Philippines", "PHST", "Heure des Philippines", "PHT"}}, {"Asia/Muscat", GST}, {"Asia/Nicosia", EET}, {"Asia/Novokuznetsk", NOVT}, {"Asia/Novosibirsk", NOVT}, {"Asia/Oral", new String[] {"Heure d'Oral", "ORAT", "Heure d'\u00e9t\u00e9 d'Oral", "ORAST", "Heure d'Oral", "ORAT"}}, {"Asia/Omsk", new String[] {"Heure d'Omsk", "OMST", "Heure d'\u00e9t\u00e9 d'Omsk", "OMSST", "Heure d'Omsk", "OMST"}}, {"Asia/Phnom_Penh", ICT}, {"Asia/Pontianak", WIT}, {"Asia/Pyongyang", KST}, {"Asia/Qatar", ARAST}, {"Asia/Qyzylorda", new String[] {"Heure de Kyzylorda", "QYZT", "Heure d'\u00e9t\u00e9 de Kyzylorda", "QYZST", "Heure de Kyzylorda", "QYZT"}}, {"Asia/Rangoon", new String[] {"Heure de Myanmar", "MMT", "Heure d'\u00e9t\u00e9 de Myanmar", "MMST", "Heure de Myanmar", "MMT"}}, {"Asia/Riyadh", ARAST}, {"Asia/Saigon", ICT}, {"Asia/Sakhalin", new String[] {"Heure de Sakhalin", "SAKT", "Heure d'\u00e9t\u00e9 de Sakhalin", "SAKST", "Heure de Sakhalin", "SAKT"}}, {"Asia/Samarkand", UZT}, {"Asia/Seoul", KST}, {"Asia/Singapore", SGT}, {"Asia/Taipei", CTT}, {"Asia/Tel_Aviv", ISRAEL}, {"Asia/Tashkent", UZT}, {"Asia/Tbilisi", new String[] {"Heure de G\u00e9orgie", "GET", "Heure d'\u00e9t\u00e9 de G\u00e9orgie", "GEST", "Heure de G\u00E9orgie", "GET"}}, {"Asia/Tehran", IRT}, {"Asia/Thimbu", BTT}, {"Asia/Thimphu", BTT}, {"Asia/Ujung_Pandang", CIT}, {"Asia/Ulaanbaatar", ULAT}, {"Asia/Ulan_Bator", ULAT}, {"Asia/Urumqi", CTT}, {"Asia/Ust-Nera", new String[] {"Heure d'Ust-Nera", "VLAT", "Heure d'\u00E9t\u00E9 d'Ust-Nera", "VLAST", "Heure d'Ust-Nera", "VLAT"}}, {"Asia/Vientiane", ICT}, {"Asia/Vladivostok", new String[] {"Heure de Vladivostok", "VLAT", "Heure d'\u00e9t\u00e9 de Vladivostok", "VLAST", "Heure de Vladivostok", "VLAT"}}, {"Asia/Yakutsk", new String[] {"Heure du Iakoutsk", "YAKT", "Heure d'\u00e9t\u00e9 du Iakoutsk", "YAKST", "Heure du Iakoutsk", "YAKT"}}, {"Asia/Yekaterinburg", new String[] {"Heure de Yekaterinburg", "YEKT", "Heure d'\u00e9t\u00e9 de Yekaterinburg", "YEKST", "Heure de Yekaterinburg", "YEKT"}}, {"Asia/Yerevan", ARMT}, {"Atlantic/Azores", new String[] {"Heure des A\u00e7ores", "AZOT", "Heure d'\u00e9t\u00e9 des A\u00e7ores", "AZOST", "Heure des A\u00E7ores", "AZOT"}}, {"Atlantic/Bermuda", AST}, {"Atlantic/Canary", WET}, {"Atlantic/Cape_Verde", new String[] {"Heure de Cap-Vert", "CVT", "Heure d'\u00e9t\u00e9 de Cap-Vert", "CVST", "Heure de Cap-Vert", "CVT"}}, {"Atlantic/Faeroe", WET}, {"Atlantic/Faroe", WET}, {"Atlantic/Jan_Mayen", CET}, {"Atlantic/Madeira", WET}, {"Atlantic/Reykjavik", GMT}, {"Atlantic/South_Georgia", new String[] {"Heure normale de G\u00e9orgie du Sud", "GST", "Heure avanc\u00e9e de G\u00e9orgie du Sud", "GDT", "G\u00E9orgie du Sud", "GT"}}, {"Atlantic/St_Helena", GMT}, {"Atlantic/Stanley", new String[] {"Heure des \u00eeles Falkland", "FKT", "Heure d'\u00e9t\u00e9 des \u00eeles Falkland", "FKST", "Heure des \u00EEles Falkland", "FKT"}}, {"Australia/ACT", EST_NSW}, {"Australia/Adelaide", ADELAIDE}, {"Australia/Brisbane", BRISBANE}, {"Australia/Broken_Hill", BROKEN_HILL}, {"Australia/Canberra", EST_NSW}, {"Australia/Currie", EST_NSW}, {"Australia/Darwin", DARWIN}, {"Australia/Eucla", new String[] {"Heure standard de l'Australie occidentale (centre)", "CWST", "Heure d'\u00E9t\u00E9 de l'Australie occidentale (centre)", "CWST", "Heure de l'Australie occidentale (centre)", "CWT"}}, {"Australia/Hobart", TASMANIA}, {"Australia/LHI", LORD_HOWE}, {"Australia/Lindeman", BRISBANE}, {"Australia/Lord_Howe", LORD_HOWE}, {"Australia/Melbourne", VICTORIA}, {"Australia/North", DARWIN}, {"Australia/NSW", EST_NSW}, {"Australia/Perth", WST_AUS}, {"Australia/Queensland", BRISBANE}, {"Australia/South", ADELAIDE}, {"Australia/Sydney", EST_NSW}, {"Australia/Tasmania", TASMANIA}, {"Australia/Victoria", VICTORIA}, {"Australia/West", WST_AUS}, {"Australia/Yancowinna", BROKEN_HILL}, {"BET", BRT}, {"BST", BDT}, {"Brazil/Acre", ACT}, {"Brazil/DeNoronha", NORONHA}, {"Brazil/East", BRT}, {"Brazil/West", AMT}, {"Canada/Atlantic", AST}, {"Canada/Central", CST}, {"Canada/East-Saskatchewan", CST}, {"Canada/Eastern", EST}, {"Canada/Mountain", MST}, {"Canada/Newfoundland", NST}, {"Canada/Pacific", PST}, {"Canada/Yukon", PST}, {"Canada/Saskatchewan", CST}, {"CAT", CAT}, {"CET", CET}, {"Chile/Continental", CLT}, {"Chile/EasterIsland", EASTER}, {"CST6CDT", CST}, {"Cuba", CUBA}, {"EAT", EAT}, {"EET", EET}, {"Egypt", EET}, {"Eire", DUBLIN}, {"EST5EDT", EST}, {"Etc/Greenwich", GMT}, {"Etc/UCT", UTC}, {"Etc/Universal", UTC}, {"Etc/UTC", UTC}, {"Etc/Zulu", UTC}, {"Europe/Amsterdam", CET}, {"Europe/Andorra", CET}, {"Europe/Athens", EET}, {"Europe/Belfast", GMTBST}, {"Europe/Belgrade", CET}, {"Europe/Berlin", CET}, {"Europe/Bratislava", CET}, {"Europe/Brussels", CET}, {"Europe/Budapest", CET}, {"Europe/Busingen", CET}, {"Europe/Chisinau", EET}, {"Europe/Copenhagen", CET}, {"Europe/Dublin", DUBLIN}, {"Europe/Gibraltar", CET}, {"Europe/Guernsey", GMTBST}, {"Europe/Helsinki", EET}, {"Europe/Isle_of_Man", GMTBST}, {"Europe/Istanbul", EET}, {"Europe/Jersey", GMTBST}, {"Europe/Kaliningrad", FET}, {"Europe/Kiev", EET}, {"Europe/Lisbon", WET}, {"Europe/Ljubljana", CET}, {"Europe/London", GMTBST}, {"Europe/Luxembourg", CET}, {"Europe/Madrid", CET}, {"Europe/Malta", CET}, {"Europe/Mariehamn", EET}, {"Europe/Minsk", FET}, {"Europe/Monaco", CET}, {"Europe/Moscow", MSK}, {"Europe/Nicosia", EET}, {"Europe/Oslo", CET}, {"Europe/Podgorica", CET}, {"Europe/Prague", CET}, {"Europe/Riga", EET}, {"Europe/Rome", CET}, {"Europe/Samara", new String[] {"Heure normale de Samara", "SAMT", "Heure d'\u00e9t\u00e9 de Samara", "SAMST", "Heure de Samara", "SAMT"}}, {"Europe/San_Marino", CET}, {"Europe/Sarajevo", CET}, {"Europe/Simferopol", EET}, {"Europe/Skopje", CET}, {"Europe/Sofia", EET}, {"Europe/Stockholm", CET}, {"Europe/Tallinn", EET}, {"Europe/Tirane", CET}, {"Europe/Tiraspol", EET}, {"Europe/Uzhgorod", EET}, {"Europe/Vaduz", CET}, {"Europe/Vatican", CET}, {"Europe/Vienna", CET}, {"Europe/Vilnius", EET}, {"Europe/Volgograd", new String[] {"Heure de Volgograd", "VOLT", "Heure d'\u00e9t\u00e9 de Volgograd", "VOLST", "Heure de Volgograd", "VOLT"}}, {"Europe/Warsaw", CET}, {"Europe/Zagreb", CET}, {"Europe/Zaporozhye", EET}, {"Europe/Zurich", CET}, {"GB", GMTBST}, {"GB-Eire", GMTBST}, {"Greenwich", GMT}, {"Hongkong", HKT}, {"Iceland", GMT}, {"Iran", IRT}, {"IST", IST}, {"Indian/Antananarivo", EAT}, {"Indian/Chagos", new String[] {"Heure de l'oc\u00e9an Indien", "IOT", "Heure d'\u00e9t\u00e9 de l'oc\u00e9an Indien", "IOST", "Heure de l'oc\u00E9an Indien", "IOT"}}, {"Indian/Christmas", new String[] {"Heure de l'\u00cele Christmas", "CXT", "Heure d'\u00e9t\u00e9 de l'\u00cele Christmas", "CXST", "Heure de l'Ile Christmas", "CIT"}}, {"Indian/Cocos", new String[] {"Heure des \u00celes Cocos", "CCT", "Heure d'\u00e9t\u00e9 des \u00celes Cocos", "CCST", "Heure des Iles Cocos", "CCT"}}, {"Indian/Comoro", EAT}, {"Indian/Kerguelen", new String[] {"Heure des Terres australes antartiques fran\u00e7aises", "TFT", "Heure d'\u00e9t\u00e9 des Terres australes antartiques fran\u00e7aises", "TFST", "Heure des Terres australes antarctiques fran\u00E7aises", "TFT"}}, {"Indian/Mahe", new String[] {"Heure des Seychelles", "SCT", "Heure d'\u00e9t\u00e9 des Seychelles", "SCST", "Heure des Seychelles", "SCT"}}, {"Indian/Maldives", new String[] {"Heure des Maldives", "MVT", "Heure d'\u00e9t\u00e9 des Maldives", "MVST", "Heure des Maldives", "MVT"}}, {"Indian/Mauritius", new String[] {"Heure de Maurice", "MUT", "Heure d'\u00e9t\u00e9 de Maurice", "MUST", "Heure de Maurice", "MUT"}}, {"Indian/Mayotte", EAT}, {"Indian/Reunion", new String[] {"Heure de la R\u00e9union", "RET", "Heure d'\u00e9t\u00e9 de la R\u00e9union", "REST", "Heure de la R\u00E9union", "RET"}}, {"Israel", ISRAEL}, {"Jamaica", EST}, {"Japan", JST}, {"Kwajalein", MHT}, {"Libya", EET}, {"MET", new String[] {"Heure de l'Europe centrale", "MET", "Heure d'\u00e9t\u00e9 de l'Europe centrale", "MEST", "MET", "MET"}}, {"Mexico/BajaNorte", PST}, {"Mexico/BajaSur", MST}, {"Mexico/General", CST}, {"MIT", WST_SAMOA}, {"MST7MDT", MST}, {"Navajo", MST}, {"NET", ARMT}, {"NST", NZST}, {"NZ", NZST}, {"NZ-CHAT", CHAST}, {"PLT", PKT}, {"Portugal", WET}, {"PRT", AST}, {"Pacific/Apia", WST_SAMOA}, {"Pacific/Auckland", NZST}, {"Pacific/Chatham", CHAST}, {"Pacific/Chuuk", CHUT}, {"Pacific/Easter", EASTER}, {"Pacific/Efate", new String[] {"Heure du Vanuatu", "VUT", "Heure d'\u00e9t\u00e9 du Vanuatu", "VUST", "Heure du Vanuatu", "VUT"}}, {"Pacific/Enderbury", new String[] {"Heure de l'\u00cele de Phoenix", "PHOT", "Heure d'\u00e9t\u00e9 de l'\u00cele de Phoenix", "PHOST", "Heure de l'Ile de Phoenix", "PHOT"}}, {"Pacific/Fakaofo", new String[] {"Heure de Tokelau", "TKT", "Heure d'\u00e9t\u00e9 de Tokelau", "TKST", "Heure de Tokelau", "TKT"}}, {"Pacific/Fiji", new String[] {"Heure de Fidji", "FJT", "Heure d'\u00e9t\u00e9 de Fidji", "FJST", "Heure de Fidji", "FJT"}}, {"Pacific/Funafuti", new String[] {"Heure de Tuvalu", "TVT", "Heure d'\u00e9t\u00e9 de Tuvalu", "TVST", "Heure de Tuvalu", "TVT"}}, {"Pacific/Galapagos", new String[] {"Heure des Galapagos", "GALT", "Heure d'\u00e9t\u00e9 des Galapagos", "GALST", "Heure des Galapagos", "GALT"}}, {"Pacific/Gambier", GAMBIER}, {"Pacific/Guadalcanal", SBT}, {"Pacific/Guam", ChST}, {"Pacific/Johnston", HST}, {"Pacific/Kiritimati", new String[] {"Heure de l'\u00cele de Line", "LINT", "Heure d'\u00e9t\u00e9 de l'\u00cele de Line", "LINST", "Heure de l'Ile de Line", "LINT"}}, {"Pacific/Kosrae", new String[] {"Heure de Kusaie", "KOST", "Heure d'\u00e9t\u00e9 de Kusaie", "KOSST", "Heure de Kusaie", "KOST"}}, {"Pacific/Kwajalein", MHT}, {"Pacific/Majuro", MHT}, {"Pacific/Marquesas", new String[] {"Heure des Marquises", "MART", "Heure d'\u00e9t\u00e9 des Marquises", "MARST", "Heure des Marquises", "MART"}}, {"Pacific/Midway", SAMOA}, {"Pacific/Nauru", new String[] {"Heure de Nauru", "NRT", "Heure d'\u00e9t\u00e9 de Nauru", "NRST", "Heure de Nauru", "NRT"}}, {"Pacific/Niue", new String[] {"Heure de Niue", "NUT", "Heure d'\u00e9t\u00e9 de Niue", "NUST", "Heure de Niue", "NUT"}}, {"Pacific/Norfolk", new String[] {"Heure de Norfolk", "NFT", "Heure d'\u00e9t\u00e9 de Norfolk", "NFST", "Heure de Norfolk", "NFT"}}, {"Pacific/Noumea", new String[] {"Heure de Nouvelle-Cal\u00e9donie", "NCT", "Heure d'\u00e9t\u00e9 de Nouvelle-Cal\u00e9donie", "NCST", "Heure de Nouvelle-Cal\u00E9donie", "NCT"}}, {"Pacific/Pago_Pago", SAMOA}, {"Pacific/Palau", new String[] {"Heure de Palaos", "PWT", "Heure d'\u00e9t\u00e9 de Palaos", "PWST", "Heure de Palaos", "PWT"}}, {"Pacific/Pitcairn", PITCAIRN}, {"Pacific/Pohnpei", PONT}, {"Pacific/Ponape", PONT}, {"Pacific/Port_Moresby", new String[] {"Heure de Papouasie-Nouvelle-Guin\u00e9e", "PGT", "Heure d'\u00e9t\u00e9 de de Papouasie-Nouvelle-Guin\u00e9e", "PGST", "Heure de Papouasie-Nouvelle-Guin\u00E9e", "PGT"}}, {"Pacific/Rarotonga", new String[] {"Heure des \u00celes Cook", "CKT", "Heure d'\u00e9t\u00e9 des \u00celes Cook", "CKHST", "Heure des Iles Cook", "CKT"}}, {"Pacific/Saipan", ChST}, {"Pacific/Samoa", SAMOA}, {"Pacific/Tahiti", new String[] {"Heure de Tahiti", "TAHT", "Heure d'\u00e9t\u00e9 de Tahiti", "TAHST", "Heure de Tahiti", "TAHT"}}, {"Pacific/Tarawa", new String[] {"Heure de Kiribati", "GILT", "Heure d'\u00e9t\u00e9 de Kiribati", "GILST", "Heure de Kiribati", "GILT"}}, {"Pacific/Tongatapu", new String[] {"Heure de Tonga", "TOT", "Heure d'\u00e9t\u00e9 de Tonga", "TOST", "Heure de Tonga", "TOT"}}, {"Pacific/Truk", CHUT}, {"Pacific/Wake", new String[] {"Heure de Wake", "WAKT", "Heure d'\u00e9t\u00e9 de Wake", "WAKST", "Heure de Wake", "WAKT"}}, {"Pacific/Wallis", new String[] {"Heure de Wallis et Futuna", "WFT", "Heure d'\u00e9t\u00e9 de Wallis et Futuna", "WFST", "Heure de Wallis-et-Futuna", "WFT"}}, {"Pacific/Yap", CHUT}, {"Poland", CET}, {"PRC", CTT}, {"PST8PDT", PST}, {"ROK", KST}, {"Singapore", SGT}, {"SST", SBT}, {"SystemV/AST4", AST}, {"SystemV/AST4ADT", AST}, {"SystemV/CST6", CST}, {"SystemV/CST6CDT", CST}, {"SystemV/EST5", EST}, {"SystemV/EST5EDT", EST}, {"SystemV/HST10", HST}, {"SystemV/MST7", MST}, {"SystemV/MST7MDT", MST}, {"SystemV/PST8", PST}, {"SystemV/PST8PDT", PST}, {"SystemV/YST9", AKST}, {"SystemV/YST9YDT", AKST}, {"Turkey", EET}, {"UCT", UTC}, {"Universal", UTC}, {"US/Alaska", AKST}, {"US/Aleutian", HAST}, {"US/Arizona", MST}, {"US/Central", CST}, {"US/Eastern", EST}, {"US/Hawaii", HST}, {"US/Indiana-Starke", CST}, {"US/East-Indiana", EST}, {"US/Michigan", EST}, {"US/Mountain", MST}, {"US/Pacific", PST}, {"US/Pacific-New", PST}, {"US/Samoa", SAMOA}, {"UTC", UTC}, {"VST", ICT}, {"W-SU", MSK}, {"WET", WET}, {"Zulu", UTC}, }; } }
mit
robertoandrade/cyclos
src/nl/strohalm/cyclos/scheduling/tasks/AccountFeeScheduledTask.java
1510
/* This file is part of Cyclos (www.cyclos.org). A project of the Social Trade Organisation (www.socialtrade.org). Cyclos is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. Cyclos is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Cyclos; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package nl.strohalm.cyclos.scheduling.tasks; import java.util.Calendar; import nl.strohalm.cyclos.services.accountfees.AccountFeeServiceLocal; /** * Runs the account fees * @author luis */ public class AccountFeeScheduledTask extends BaseScheduledTask { private AccountFeeServiceLocal accountFeeService; public AccountFeeScheduledTask() { super("Account fee charging", true); } public void setAccountFeeServiceLocal(final AccountFeeServiceLocal accountFeeService) { this.accountFeeService = accountFeeService; } @Override protected void doRun(final Calendar time) { accountFeeService.chargeScheduledFees(time); } }
gpl-2.0
md-5/jdk10
test/hotspot/jtreg/vmTestbase/nsk/jdi/ArrayReference/setValues_ilii/setvaluesilii001/TestDescription.java
4877
/* * Copyright (c) 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * * @summary converted from VM Testbase nsk/jdi/ArrayReference/setValues_ilii/setvaluesilii001. * VM Testbase keywords: [jpda, jdi] * VM Testbase readme: * DESCRIPTION * This test checks the setValues(int, List, int, int) method of ArrayReference * interface of com.sun.jdi package. * The method spec: * public void setValues(int index, List values, int srcIndex, int length) * throws InvalidTypeException, ClassNotLoadedException * Replaces a range of array components with other values. * Object values must be assignment compatible with the component type (This * implies that the component type must be loaded through the enclosing class's * class loader). Primitive values must be either assignment compatible with * the component type or must be convertible to the component type without loss * of information. See JLS section 5.2 for more information on assignment * compatibility. * Parameters: index - the index of the first component to set. * values - a list of Value objects to be placed in this array. * srcIndex - the index of the first source value to to use. * length - the number of components to set, or -1 to set all * components to the end of this array. * Throws: InvalidTypeException - if any element of values is not compatible * with the declared type of array components. * java.lang.IndexOutOfBoundsException - if srcIndex + length is beyond * the end of this array or if values is smaller inside than the * given range. * ObjectCollectedException - if this object or any of the new values * has been garbage collected. * VMMismatchException - if a Mirror argument and this object do not * belong to the same VirtualMachine. * nsk/jdi/ArrayReference/setValues_ilii/setvaluesilii001 checks assertion: * public void setValues(int index, List values, int srcIndex, int length) * 1. Replaces a range of array components with other values. Array has * components of primitive types only. Index and srcIndex are in array's * bounds. * Debuggee defines eight sample array fields. One for each primitive type. * Also, it defines tested array fields. All arrays have 14 elements. * Debugger gets each sample field from debuggee by name and gets its * Value, casts it to ArrayReference and gets list of its Values. After that * the test gets tested array fields by name, gets their values, casts to * ArrayReference types. For i field the method * setValues(0, List, 0, LENGTH - i) is invoked to set LENGTH - i elements of * sample array from index 0 to ArrayReference from index 0. * After that the test gets all Values of the array and checks them. Debugger * determines component's type (by field's name), gets each element of the * list, casts it to correspondent PrimitiveType and then gets its primitive * value. Then the test compares returned and expected primitive values. * COMMENTS * 4419982: JDI: two StackFrame methods return incorrect values for double * * @library /vmTestbase * /test/lib * @run driver jdk.test.lib.FileInstaller . . * @build nsk.jdi.ArrayReference.setValues_ilii.setvaluesilii001 * nsk.jdi.ArrayReference.setValues_ilii.setvaluesilii001a * @run main/othervm PropertyResolvingWrapper * nsk.jdi.ArrayReference.setValues_ilii.setvaluesilii001 * -verbose * -arch=${os.family}-${os.simpleArch} * -waittime=5 * -debugee.vmkind=java * -transport.address=dynamic * "-debugee.vmkeys=${test.vm.opts} ${test.java.opts}" */
gpl-2.0
AlexSussex/Dissertation---ONE
src/routing/MaxPropRouter.java
20036
/* * Copyright 2010 Aalto University, ComNet * Released under GPLv3. See LICENSE.txt for details. */ package routing; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import routing.maxprop.MaxPropDijkstra; import routing.maxprop.MeetingProbabilitySet; import routing.util.RoutingInfo; import util.Tuple; import core.Connection; import core.DTNHost; import core.Message; import core.Settings; /** * Implementation of MaxProp router as described in * <I>MaxProp: Routing for Vehicle-Based Disruption-Tolerant Networks</I> by * John Burgess et al. * @version 1.0 * * Extension of the protocol by adding a parameter alpha (default 1) * By new connection, the delivery likelihood is increased by alpha * and divided by 1+alpha. Using the default results in the original * algorithm. Refer to Karvo and Ott, <I>Time Scales and Delay-Tolerant Routing * Protocols</I> Chants, 2008 */ public class MaxPropRouter extends ActiveRouter { /** Router's setting namespace ({@value})*/ public static final String MAXPROP_NS = "MaxPropRouter"; /** * Meeting probability set maximum size -setting id ({@value}). * The maximum amount of meeting probabilities to store. */ public static final String PROB_SET_MAX_SIZE_S = "probSetMaxSize"; /** Default value for the meeting probability set maximum size ({@value}).*/ public static final int DEFAULT_PROB_SET_MAX_SIZE = 50; private static int probSetMaxSize; /** probabilities of meeting hosts */ private MeetingProbabilitySet probs; /** meeting probabilities of all hosts from this host's point of view * mapped using host's network address */ private Map<Integer, MeetingProbabilitySet> allProbs; /** the cost-to-node calculator */ private MaxPropDijkstra dijkstra; /** IDs of the messages that are known to have reached the final dst */ private Set<String> ackedMessageIds; /** mapping of the current costs for all messages. This should be set to * null always when the costs should be updated (a host is met or a new * message is received) */ private Map<Integer, Double> costsForMessages; /** From host of the last cost calculation */ private DTNHost lastCostFrom; /** Map of which messages have been sent to which hosts from this host */ private Map<DTNHost, Set<String>> sentMessages; /** Over how many samples the "average number of bytes transferred per * transfer opportunity" is taken */ public static int BYTES_TRANSFERRED_AVG_SAMPLES = 10; private int[] avgSamples; private int nextSampleIndex = 0; /** current value for the "avg number of bytes transferred per transfer * opportunity" */ private int avgTransferredBytes = 0; /** The alpha parameter string*/ public static final String ALPHA_S = "alpha"; /** The alpha variable, default = 1;*/ private double alpha; /** The default value for alpha */ public static final double DEFAULT_ALPHA = 1.0; /** * Constructor. Creates a new prototype router based on the settings in * the given Settings object. * @param settings The settings object */ public MaxPropRouter(Settings settings) { super(settings); Settings maxPropSettings = new Settings(MAXPROP_NS); if (maxPropSettings.contains(ALPHA_S)) { alpha = maxPropSettings.getDouble(ALPHA_S); } else { alpha = DEFAULT_ALPHA; } Settings mpSettings = new Settings(MAXPROP_NS); if (mpSettings.contains(PROB_SET_MAX_SIZE_S)) { probSetMaxSize = mpSettings.getInt(PROB_SET_MAX_SIZE_S); } else { probSetMaxSize = DEFAULT_PROB_SET_MAX_SIZE; } } /** * Copy constructor. Creates a new router based on the given prototype. * @param r The router prototype where setting values are copied from */ protected MaxPropRouter(MaxPropRouter r) { super(r); this.alpha = r.alpha; this.probs = new MeetingProbabilitySet(probSetMaxSize, this.alpha); this.allProbs = new HashMap<Integer, MeetingProbabilitySet>(); this.dijkstra = new MaxPropDijkstra(this.allProbs); this.ackedMessageIds = new HashSet<String>(); this.avgSamples = new int[BYTES_TRANSFERRED_AVG_SAMPLES]; this.sentMessages = new HashMap<DTNHost, Set<String>>(); } @Override public void changedConnection(Connection con) { super.changedConnection(con); if (con.isUp()) { // new connection this.costsForMessages = null; // invalidate old cost estimates if (con.isInitiator(getHost())) { /* initiator performs all the actions on behalf of the * other node too (so that the meeting probs are updated * for both before exchanging them) */ DTNHost otherHost = con.getOtherNode(getHost()); MessageRouter mRouter = otherHost.getRouter(); assert mRouter instanceof MaxPropRouter : "MaxProp only works "+ " with other routers of same type"; MaxPropRouter otherRouter = (MaxPropRouter)mRouter; /* exchange ACKed message data */ this.ackedMessageIds.addAll(otherRouter.ackedMessageIds); otherRouter.ackedMessageIds.addAll(this.ackedMessageIds); deleteAckedMessages(); otherRouter.deleteAckedMessages(); /* update both meeting probabilities */ probs.updateMeetingProbFor(otherHost.getAddress()); otherRouter.probs.updateMeetingProbFor(getHost().getAddress()); /* exchange the transitive probabilities */ this.updateTransitiveProbs(otherRouter.allProbs); otherRouter.updateTransitiveProbs(this.allProbs); this.allProbs.put(otherHost.getAddress(), otherRouter.probs.replicate()); otherRouter.allProbs.put(getHost().getAddress(), this.probs.replicate()); } } else { /* connection went down, update transferred bytes average */ updateTransferredBytesAvg(con.getTotalBytesTransferred()); } } /** * Updates transitive probability values by replacing the current * MeetingProbabilitySets with the values from the given mapping * if the given sets have more recent updates. * @param p Mapping of the values of the other host */ private void updateTransitiveProbs(Map<Integer, MeetingProbabilitySet> p) { for (Map.Entry<Integer, MeetingProbabilitySet> e : p.entrySet()) { MeetingProbabilitySet myMps = this.allProbs.get(e.getKey()); if (myMps == null || e.getValue().getLastUpdateTime() > myMps.getLastUpdateTime() ) { this.allProbs.put(e.getKey(), e.getValue().replicate()); } } } /** * Deletes the messages from the message buffer that are known to be ACKed */ private void deleteAckedMessages() { for (String id : this.ackedMessageIds) { if (this.hasMessage(id) && !isSending(id)) { this.deleteMessage(id, false); } } } @Override public Message messageTransferred(String id, DTNHost from) { this.costsForMessages = null; // new message -> invalidate costs Message m = super.messageTransferred(id, from); /* was this node the final recipient of the message? */ if (isDeliveredMessage(m)) { this.ackedMessageIds.add(id); } return m; } /** * Method is called just before a transfer is finalized * at {@link ActiveRouter#update()}. MaxProp makes book keeping of the * delivered messages so their IDs are stored. * @param con The connection whose transfer was finalized */ @Override protected void transferDone(Connection con) { Message m = con.getMessage(); String id = m.getId(); DTNHost recipient = con.getOtherNode(getHost()); Set<String> sentMsgIds = this.sentMessages.get(recipient); /* was the message delivered to the final recipient? */ if (m.getTo() == recipient) { this.ackedMessageIds.add(m.getId()); // yes, add to ACKed messages this.deleteMessage(m.getId(), false); // delete from buffer } /* update the map of where each message is already sent */ if (sentMsgIds == null) { sentMsgIds = new HashSet<String>(); this.sentMessages.put(recipient, sentMsgIds); } sentMsgIds.add(id); } /** * Updates the average estimate of the number of bytes transferred per * transfer opportunity. * @param newValue The new value to add to the estimate */ private void updateTransferredBytesAvg(int newValue) { int realCount = 0; int sum = 0; this.avgSamples[this.nextSampleIndex++] = newValue; if(this.nextSampleIndex >= BYTES_TRANSFERRED_AVG_SAMPLES) { this.nextSampleIndex = 0; } for (int i=0; i < BYTES_TRANSFERRED_AVG_SAMPLES; i++) { if (this.avgSamples[i] > 0) { // only values above zero count realCount++; sum += this.avgSamples[i]; } } if (realCount > 0) { this.avgTransferredBytes = sum / realCount; } else { // no samples or all samples are zero this.avgTransferredBytes = 0; } } /** * Returns the next message that should be dropped, according to MaxProp's * message ordering scheme (see {@link MaxPropTupleComparator}). * @param excludeMsgBeingSent If true, excludes message(s) that are * being sent from the next-to-be-dropped check (i.e., if next message to * drop is being sent, the following message is returned) * @return The oldest message or null if no message could be returned * (no messages in buffer or all messages in buffer are being sent and * exludeMsgBeingSent is true) */ @Override protected Message getNextMessageToRemove(boolean excludeMsgBeingSent) { Collection<Message> messages = this.getMessageCollection(); List<Message> validMessages = new ArrayList<Message>(); for (Message m : messages) { if (excludeMsgBeingSent && isSending(m.getId())) { continue; // skip the message(s) that router is sending } validMessages.add(m); } Collections.sort(validMessages, new MaxPropComparator(this.calcThreshold())); return validMessages.get(validMessages.size()-1); // return last message } @Override public void update() { super.update(); if (!canStartTransfer() ||isTransferring()) { return; // nothing to transfer or is currently transferring } // try messages that could be delivered to final recipient if (exchangeDeliverableMessages() != null) { return; } tryOtherMessages(); } /** * Returns the message delivery cost between two hosts from this host's * point of view. If there is no path between "from" and "to" host, * Double.MAX_VALUE is returned. Paths are calculated only to hosts * that this host has messages to. * @param from The host where a message is coming from * @param to The host where a message would be destined to * @return The cost of the cheapest path to the destination or * Double.MAX_VALUE if such a path doesn't exist */ public double getCost(DTNHost from, DTNHost to) { /* check if the cached values are OK */ if (this.costsForMessages == null || lastCostFrom != from) { /* cached costs are invalid -> calculate new costs */ this.allProbs.put(getHost().getAddress(), this.probs); int fromIndex = from.getAddress(); /* calculate paths only to nodes we have messages to * (optimization) */ Set<Integer> toSet = new HashSet<Integer>(); for (Message m : getMessageCollection()) { toSet.add(m.getTo().getAddress()); } this.costsForMessages = dijkstra.getCosts(fromIndex, toSet); this.lastCostFrom = from; // store source host for caching checks } if (costsForMessages.containsKey(to.getAddress())) { return costsForMessages.get(to.getAddress()); } else { /* there's no known path to the given host */ return Double.MAX_VALUE; } } /** * Tries to send all other messages to all connected hosts ordered by * hop counts and their delivery probability * @return The return value of {@link #tryMessagesForConnected(List)} */ private Tuple<Message, Connection> tryOtherMessages() { List<Tuple<Message, Connection>> messages = new ArrayList<Tuple<Message, Connection>>(); Collection<Message> msgCollection = getMessageCollection(); /* for all connected hosts that are not transferring at the moment, * collect all the messages that could be sent */ for (Connection con : getConnections()) { DTNHost other = con.getOtherNode(getHost()); MaxPropRouter othRouter = (MaxPropRouter)other.getRouter(); Set<String> sentMsgIds = this.sentMessages.get(other); if (othRouter.isTransferring()) { continue; // skip hosts that are transferring } for (Message m : msgCollection) { /* skip messages that the other host has or that have * passed the other host */ if (othRouter.hasMessage(m.getId()) || m.getHops().contains(other)) { continue; } /* skip message if this host has already sent it to the other host (regardless of if the other host still has it) */ if (sentMsgIds != null && sentMsgIds.contains(m.getId())) { continue; } /* message was a good candidate for sending */ messages.add(new Tuple<Message, Connection>(m,con)); } } if (messages.size() == 0) { return null; } /* sort the message-connection tuples according to the criteria * defined in MaxPropTupleComparator */ Collections.sort(messages, new MaxPropTupleComparator(calcThreshold())); return tryMessagesForConnected(messages); } /** * Calculates and returns the current threshold value for the buffer's split * based on the average number of bytes transferred per transfer opportunity * and the hop counts of the messages in the buffer. Method is public only * to make testing easier. * @return current threshold value (hop count) for the buffer's split */ public int calcThreshold() { /* b, x and p refer to respective variables in the paper's equations */ int b = this.getBufferSize(); int x = this.avgTransferredBytes; int p; if (x == 0) { /* can't calc the threshold because there's no transfer data */ return 0; } /* calculates the portion (bytes) of the buffer selected for priority */ if (x < b/2) { p = x; } else if (b/2 <= x && x < b) { p = Math.min(x, b-x); } else { return 0; // no need for the threshold } /* creates a copy of the messages list, sorted by hop count */ ArrayList<Message> msgs = new ArrayList<Message>(); msgs.addAll(getMessageCollection()); if (msgs.size() == 0) { return 0; // no messages -> no need for threshold } /* anonymous comparator class for hop count comparison */ Comparator<Message> hopCountComparator = new Comparator<Message>() { public int compare(Message m1, Message m2) { return m1.getHopCount() - m2.getHopCount(); } }; Collections.sort(msgs, hopCountComparator); /* finds the first message that is beyond the calculated portion */ int i=0; for (int n=msgs.size(); i<n && p>0; i++) { p -= msgs.get(i).getSize(); } i--; // the last round moved i one index too far if (i < 0) { return 0; } /* now i points to the first packet that exceeds portion p; * the threshold is that packet's hop count + 1 (so that packet and * perhaps some more are included in the priority part) */ return msgs.get(i).getHopCount() + 1; } /** * Message comparator for the MaxProp routing module. * Messages that have a hop count smaller than the given * threshold are given priority and they are ordered by their hop count. * Other messages are ordered by their delivery cost. */ private class MaxPropComparator implements Comparator<Message> { private int threshold; private DTNHost from1; private DTNHost from2; /** * Constructor. Assumes that the host where all the costs are calculated * from is this router's host. * @param treshold Messages with the hop count smaller than this * value are transferred first (and ordered by the hop count) */ public MaxPropComparator(int treshold) { this.threshold = treshold; this.from1 = this.from2 = getHost(); } /** * Constructor. * @param treshold Messages with the hop count smaller than this * value are transferred first (and ordered by the hop count) * @param from1 The host where the cost of msg1 is calculated from * @param from2 The host where the cost of msg2 is calculated from */ public MaxPropComparator(int treshold, DTNHost from1, DTNHost from2) { this.threshold = treshold; this.from1 = from1; this.from2 = from2; } /** * Compares two messages and returns -1 if the first given message * should be first in order, 1 if the second message should be first * or 0 if message order can't be decided. If both messages' hop count * is less than the threshold, messages are compared by their hop count * (smaller is first). If only other's hop count is below the threshold, * that comes first. If both messages are below the threshold, the one * with smaller cost (determined by * {@link MaxPropRouter#getCost(DTNHost, DTNHost)}) is first. */ public int compare(Message msg1, Message msg2) { double p1, p2; int hopc1 = msg1.getHopCount(); int hopc2 = msg2.getHopCount(); if (msg1 == msg2) { return 0; } /* if one message's hop count is above and the other one's below the * threshold, the one below should be sent first */ if (hopc1 < threshold && hopc2 >= threshold) { return -1; // message1 should be first } else if (hopc2 < threshold && hopc1 >= threshold) { return 1; // message2 -"- } /* if both are below the threshold, one with lower hop count should * be sent first */ if (hopc1 < threshold && hopc2 < threshold) { return hopc1 - hopc2; } /* both messages have more than threshold hops -> cost of the * message path is used for ordering */ p1 = getCost(from1, msg1.getTo()); p2 = getCost(from2, msg2.getTo()); /* the one with lower cost should be sent first */ if (p1-p2 == 0) { /* if costs are equal, hop count breaks ties. If even hop counts are equal, the queue ordering is used */ if (hopc1 == hopc2) { return compareByQueueMode(msg1, msg2); } else { return hopc1 - hopc2; } } else if (p1-p2 < 0) { return -1; // msg1 had the smaller cost } else { return 1; // msg2 had the smaller cost } } } /** * Message-Connection tuple comparator for the MaxProp routing * module. Uses {@link MaxPropComparator} on the messages of the tuples * setting the "from" host for that message to be the one in the connection * tuple (i.e., path is calculated starting from the host on the other end * of the connection). */ private class MaxPropTupleComparator implements Comparator <Tuple<Message, Connection>> { private int threshold; public MaxPropTupleComparator(int threshold) { this.threshold = threshold; } /** * Compares two message-connection tuples using the * {@link MaxPropComparator#compare(Message, Message)}. */ public int compare(Tuple<Message, Connection> tuple1, Tuple<Message, Connection> tuple2) { MaxPropComparator comp; DTNHost from1 = tuple1.getValue().getOtherNode(getHost()); DTNHost from2 = tuple2.getValue().getOtherNode(getHost()); comp = new MaxPropComparator(threshold, from1, from2); return comp.compare(tuple1.getKey(), tuple2.getKey()); } } @Override public RoutingInfo getRoutingInfo() { RoutingInfo top = super.getRoutingInfo(); RoutingInfo ri = new RoutingInfo(probs.getAllProbs().size() + " meeting probabilities"); /* show meeting probabilities for this host */ for (Map.Entry<Integer, Double> e : probs.getAllProbs().entrySet()) { Integer host = e.getKey(); Double value = e.getValue(); ri.addMoreInfo(new RoutingInfo(String.format("host %d : %.6f", host, value))); } top.addMoreInfo(ri); top.addMoreInfo(new RoutingInfo("Avg transferred bytes: " + this.avgTransferredBytes)); return top; } @Override public MessageRouter replicate() { MaxPropRouter r = new MaxPropRouter(this); return r; } }
gpl-3.0
HossainKhademian/Studio3
plugins/com.aptana.core.epl/src/com/aptana/projects/primary/natures/IPrimaryNatureContributor.java
1523
/** * Aptana Studio * Copyright (c) 2012 by Appcelerator, Inc. All Rights Reserved. * Licensed under the terms of the GNU Public License (GPL) v3 (with exceptions). * Please see the license.html included with this distribution for details. * Any modifications to this file must keep this entire header intact. */ package com.aptana.projects.primary.natures; import org.eclipse.core.resources.IProject; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; /** * @author pinnamuri */ public interface IPrimaryNatureContributor { /** * Indicates not an eligible one for being Primary */ public int NOT_PRIMARY = 0; /** * Indicates eligible for being primary */ public int CAN_BE_PRIMARY = 1; /** * Indicates it has to be primary nature for the given project */ public int IS_PRIMARY = 2; /** * Gets the primary nature rank based on the project type, current perspective and may be, other conditions. * * @param projectPath * @return */ public int getPrimaryNatureRank(IPath projectPath); /** * Perform extra configuration tasks on a project that is being created/imported using this nature contributor. * * @param project * @throws CoreException */ public void configure(IProject project) throws CoreException; /** * Returns the absolute location of the folder that contains the primary library contents for the project type. * * @param projectPath * @return */ public IPath getLibraryContainerPath(IPath projectPath); }
gpl-3.0
tdefilip/opennms
opennms-webapp/src/main/java/org/opennms/web/event/filter/DescriptionSubstringFilter.java
2717
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2002-2014 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <license@opennms.org> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.web.event.filter; import org.opennms.web.filter.SubstringFilter; /** * <p>DescriptionSubstringFilter class.</p> * * @author ranger * @version $Id: $ * @since 1.8.1 */ public class DescriptionSubstringFilter extends SubstringFilter { /** Constant <code>TYPE="descsub"</code> */ public static final String TYPE = "descsub"; /** * <p>Constructor for DescriptionSubstringFilter.</p> * * @param substring a {@link java.lang.String} object. */ public DescriptionSubstringFilter(String substring) { super(TYPE, "EVENTDESCR", "eventDescr", substring); } /** * <p>getTextDescription</p> * * @return a {@link java.lang.String} object. */ @Override public String getTextDescription() { return ("description containing \"" + getValue() + "\""); } /** * <p>toString</p> * * @return a {@link java.lang.String} object. */ @Override public String toString() { return ("<DescriptionSubstringFilter: " + this.getDescription() + ">"); } /** * <p>getSubstring</p> * * @return a {@link java.lang.String} object. */ public String getSubstring() { return getValue(); } /** {@inheritDoc} */ @Override public boolean equals(Object obj) { if (obj == null) return false; if (!(obj instanceof DescriptionSubstringFilter)) return false; return (this.toString().equals(obj.toString())); } }
agpl-3.0
moliva/proactive
src/Tests/functionalTests/component/collectiveitf/reduction/primitive/NonReduction2.java
1711
/* * ################################################################ * * ProActive Parallel Suite(TM): The Java(TM) library for * Parallel, Distributed, Multi-Core Computing for * Enterprise Grids & Clouds * * Copyright (C) 1997-2012 INRIA/University of * Nice-Sophia Antipolis/ActiveEon * Contact: proactive@ow2.org or contact@activeeon.com * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Affero General Public License * as published by the Free Software Foundation; version 3 of * the License. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 * USA * * If needed, contact us to obtain a release under GPL Version 2 or 3 * or a different license than the AGPL. * * Initial developer(s): The ProActive Team * http://proactive.inria.fr/team_members.htm * Contributor(s): * * ################################################################ * $$PROACTIVE_INITIAL_DEV$$ */ package functionalTests.component.collectiveitf.reduction.primitive; import org.objectweb.proactive.core.util.wrapper.IntWrapper; public interface NonReduction2 { public IntWrapper doIt(); public IntWrapper doItInt(IntWrapper val); public void voidDoIt(); }
agpl-3.0
kuali/kfs
kfs-bc/src/main/java/org/kuali/kfs/module/bc/document/service/BudgetConstructionAccountSummaryReportService.java
2129
/* * The Kuali Financial System, a comprehensive financial management system for higher education. * * Copyright 2005-2014 The Kuali Foundation * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.kuali.kfs.module.bc.document.service; import java.util.Collection; import org.kuali.kfs.module.bc.businessobject.BudgetConstructionOrgAccountSummaryReport; /** * This interface defines the methods for BudgetConstructionAccountSummaryReports */ public interface BudgetConstructionAccountSummaryReportService { /** * updates account summary table. * * @param principalName */ public void updateReportsAccountSummaryTable(String principalName); /** * updates account summary table when users choose consolidation. * * @param principalName */ public void updateReportsAccountSummaryTableWithConsolidation(String principalName); /** * updates account summary table. * * @param principalName - user requesting the report * @param consolidated - whether to produce a consolidate report */ public void updateReportsAccountSummaryTable(String principalName, boolean consolidated); /** * * builds BudgetConstructionAccountSummaryReports * * @param universityFiscalYear * @param accountSummaryList */ public Collection<BudgetConstructionOrgAccountSummaryReport> buildReports(Integer universityFiscalYear, String principalName, boolean consolidated); }
agpl-3.0
tdefilip/opennms
core/lib/src/main/java/org/opennms/core/utils/Base64.java
7452
/******************************************************************************* * This file is part of OpenNMS(R). * * Copyright (C) 2002-2014 The OpenNMS Group, Inc. * OpenNMS(R) is Copyright (C) 1999-2014 The OpenNMS Group, Inc. * * OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. * * OpenNMS(R) is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as published * by the Free Software Foundation, either version 3 of the License, * or (at your option) any later version. * * OpenNMS(R) is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with OpenNMS(R). If not, see: * http://www.gnu.org/licenses/ * * For more information contact: * OpenNMS(R) Licensing <license@opennms.org> * http://www.opennms.org/ * http://www.opennms.com/ *******************************************************************************/ package org.opennms.core.utils; /** * Performs base 64 encoding and decoding on byte arrays. * * @author <A HREF="mailto:weave@oculan.com">Brian Weaver </A> */ public final class Base64 { /** * <P> * The base64 encoding map. Using 6-bit values it is possible to map 24-bits * into 4 characters. If there are not sufficent amount of bits to makeup * six then it is padded with BASE64_PAD. * </P> */ private static final char[] BASE64_CHARS = { 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '/' }; // 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 private static final byte[] BASE64_VALUES = { -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* 16 - 31 */-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, /* 32 - 47 */-1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, 62, -1, -1, -1, 63, /* 48 - 63 */52, 53, 54, 55, 56, 57, 58, 59, 60, 61, -1, -1, -1, 0, -1, -1, /* 64 - 79 */-1, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, /* 80 - 95 */15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, -1, -1, -1, -1, -1, /* 96 - 111 */-1, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, /* 112 - 127 */41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, -1, -1, -1, -1, -1 }; /** * The base64 padding character */ private static final char BASE64_PAD = '='; /** * <P> * Encodes the passed byte array using the base64 rules. The base64 encoding * schema is performed by grouping the bytes in to 6-bit quantities and then * encoding them. * </P> * * <P> * For more information see RFC1341 for the format used for base64 encoding. * </P> * * @param data * The input byte array * @return The converted data in a character stream. */ public static char[] encodeBase64(byte[] data) { int destlen = ((data.length + 2) / 3) * 4; char[] dest = new char[destlen]; int destndx = 0; for (int i = 0; i < data.length; i += 3) { int quantum = 0; int pad = 0; int bytes = data.length - i; if (bytes >= 1) { quantum = (data[i] < 0 ? (256 + (int) data[i]) : (int) data[i]); pad = 2; } quantum <<= 8; if (bytes >= 2) { quantum |= (data[i + 1] < 0 ? (256 + (int) data[i + 1]) : (int) data[i + 1]); pad = 1; } quantum <<= 8; if (bytes > 2) { quantum |= (data[i + 2] < 0 ? (256 + (int) data[i + 2]) : (int) data[i + 2]); pad = 0; } for (int j = 3; j >= pad; j--) { int ndx = (quantum >> (j * 6)) & 0x3f; dest[destndx++] = BASE64_CHARS[ndx]; } for (int j = pad; j > 0; j--) { dest[destndx++] = BASE64_PAD; } } return dest; } /** * <P> * Decodes a character array into the corresponding byte array. The buffer * must be an intergral number of 4 character. I.E. size mod 4 is equal to * zero or an exception will be thrown. Likewise, if there is an invalid * character in the input array then an exception will be thrown. * </P> * * @param data * The data stream to be filtered. * @return The coverted array of bytes. * @exception java.lang.IllegalArgumentException * Thrown if an invalid buffer that cannot be decoded is * passed. */ public static byte[] decodeBase64(char[] data) { //. If the data is zero length just return a zero length byte array if (data.length == 0) { return new byte[0]; } // // check the length, it must be an integral number of 4 characters. // if ((data.length % 4) != 0) throw new IllegalArgumentException("Invalid base64 encoding, improper length"); // // get the raw length and check for // the appended padding characters // if any. // int rawlen = (data.length / 4) * 3; for (int i = 1; i <= 2; i++) { if (data[data.length - i] == BASE64_PAD) --rawlen; } // // allocate the new buffer // byte[] rawdata = new byte[rawlen]; int rawndx = 0; // // convert the character array into // a byte array. // int quantum = 0; for (int i = 0; i < data.length; i++) { if ((i % 4) == 0 && i > 0) { int c = ((quantum >> 16) & 0xff); rawdata[rawndx++] = (byte) (c > 127 ? c - 256 : c); c = ((quantum >> 8) & 0xff); rawdata[rawndx++] = (byte) (c > 127 ? c - 256 : c); c = quantum & 0xff; rawdata[rawndx++] = (byte) (c > 127 ? c - 256 : c); quantum = 0; } quantum <<= 6; char c = data[i]; if ((int) c >= BASE64_VALUES.length || BASE64_VALUES[(int) c] == -1) throw new IllegalArgumentException("Invalid character in decode stream"); quantum |= BASE64_VALUES[(int) c]; } // // hand the last byte(s) of data // int c = ((quantum >> 16) & 0xff); rawdata[rawndx++] = (byte) (c > 127 ? c - 256 : c); if (rawndx < rawlen) { c = ((quantum >> 8) & 0xff); rawdata[rawndx++] = (byte) (c > 127 ? c - 256 : c); } if (rawndx < rawlen) { c = quantum & 0xff; rawdata[rawndx++] = (byte) (c > 127 ? c - 256 : c); } // // return the raw data // return rawdata; } /** Empty, private constructor so this object will not be instantiated. */ private Base64() { } }
agpl-3.0
mahak/hbase
hbase-zookeeper/src/main/java/org/apache/hadoop/hbase/zookeeper/SnapshotCleanupTracker.java
4304
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.zookeeper; import java.io.IOException; import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.util.Bytes; import org.apache.yetus.audience.InterfaceAudience; import org.apache.zookeeper.KeeperException; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotCleanupProtos; /** * Tracks status of snapshot auto cleanup based on TTL */ @InterfaceAudience.Private public class SnapshotCleanupTracker extends ZKNodeTracker { /** * Constructs a new ZK node tracker. * * <p>After construction, use {@link #start} to kick off tracking. * * @param watcher reference to the {@link ZKWatcher} which also contains configuration and * constants * @param abortable used to abort if a fatal error occurs */ public SnapshotCleanupTracker(ZKWatcher watcher, Abortable abortable) { super(watcher, watcher.getZNodePaths().snapshotCleanupZNode, abortable); } /** * Returns the current state of the snapshot auto cleanup based on TTL * * @return <code>true</code> if the snapshot auto cleanup is enabled, * <code>false</code> otherwise. */ public boolean isSnapshotCleanupEnabled() { byte[] snapshotCleanupZNodeData = super.getData(false); try { // if data in ZK is null, use default of on. return snapshotCleanupZNodeData == null || parseFrom(snapshotCleanupZNodeData).getSnapshotCleanupEnabled(); } catch (DeserializationException dex) { LOG.error("ZK state for Snapshot Cleanup could not be parsed " + Bytes.toStringBinary(snapshotCleanupZNodeData), dex); // return false to be safe. return false; } } /** * Set snapshot auto clean on/off * * @param snapshotCleanupEnabled true if the snapshot auto cleanup should be on, * false otherwise * @throws KeeperException if ZooKeeper operation fails */ public void setSnapshotCleanupEnabled(final boolean snapshotCleanupEnabled) throws KeeperException { byte [] snapshotCleanupZNodeData = toByteArray(snapshotCleanupEnabled); try { ZKUtil.setData(watcher, watcher.getZNodePaths().snapshotCleanupZNode, snapshotCleanupZNodeData); } catch(KeeperException.NoNodeException nne) { ZKUtil.createAndWatch(watcher, watcher.getZNodePaths().snapshotCleanupZNode, snapshotCleanupZNodeData); } super.nodeDataChanged(watcher.getZNodePaths().snapshotCleanupZNode); } private byte[] toByteArray(final boolean isSnapshotCleanupEnabled) { SnapshotCleanupProtos.SnapshotCleanupState.Builder builder = SnapshotCleanupProtos.SnapshotCleanupState.newBuilder(); builder.setSnapshotCleanupEnabled(isSnapshotCleanupEnabled); return ProtobufUtil.prependPBMagic(builder.build().toByteArray()); } private SnapshotCleanupProtos.SnapshotCleanupState parseFrom(final byte[] pbBytes) throws DeserializationException { ProtobufUtil.expectPBMagicPrefix(pbBytes); SnapshotCleanupProtos.SnapshotCleanupState.Builder builder = SnapshotCleanupProtos.SnapshotCleanupState.newBuilder(); try { int magicLen = ProtobufUtil.lengthOfPBMagic(); ProtobufUtil.mergeFrom(builder, pbBytes, magicLen, pbBytes.length - magicLen); } catch (IOException e) { throw new DeserializationException(e); } return builder.build(); } }
apache-2.0
dlnufox/ignite
examples/src/test/java8/org/apache/ignite/java8/examples/HibernateL2CacheExampleSelfTest.java
1531
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.java8.examples; //import org.apache.ignite.examples.java8.datagrid.hibernate.*; import org.apache.ignite.testframework.junits.common.GridAbstractExamplesTest; /** * Tests the {@link org.apache.ignite.examples.java8.datagrid.hibernate.HibernateL2CacheExample}. */ public class HibernateL2CacheExampleSelfTest extends GridAbstractExamplesTest { /** * TODO: IGNITE-711 next example(s) should be implemented for java 8 * or testing method(s) should be removed if example(s) does not applicable for java 8. * * @throws Exception If failed. */ // public void testHibernateL2CacheExample() throws Exception { // HibernateL2CacheExample.main(EMPTY_ARGS); // } }
apache-2.0
etirelli/drools-wb
drools-wb-extensions/drools-wb-drl-extension/drools-wb-drl-extension-server/src/test/java/org/kie/workbench/common/services/datamodel/backend/server/DSLExtensionTest.java
3800
/* * Copyright 2017 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.services.datamodel.backend.server; import java.util.List; import org.drools.workbench.models.datamodel.oracle.DSLActionSentence; import org.drools.workbench.models.datamodel.oracle.DSLConditionSentence; import org.junit.Test; import org.kie.soup.project.datamodel.oracle.ExtensionKind; import org.kie.workbench.common.services.datamodel.backend.server.cache.DSLExtension; import org.kie.workbench.common.services.datamodel.spi.DataModelExtension.ExtensionMapping; import static junit.framework.Assert.assertEquals; public class DSLExtensionTest { private DSLExtension extension = new DSLExtension(); @Test public void testAddConditionDSLSentence() { List<ExtensionMapping<?>> mappings = extension.getExtensions(null, "[when]There is a Smurf=Smurf()"); assertEquals(1, totalValues(mappings)); assertEquals(1, valuesOfKind(mappings, DSLConditionSentence.INSTANCE)); } @Test public void testAddActionDSLSentence() { List<ExtensionMapping<?>> mappings = extension.getExtensions(null, "[then]Greet Smurf=System.out.println(\"Hello Smurf\");"); assertEquals(1, totalValues(mappings)); assertEquals(1, valuesOfKind(mappings, DSLActionSentence.INSTANCE)); } @Test public void testAddMultipleConditionDSLSentence() { List<ExtensionMapping<?>> mappings = extension.getExtensions(null, "[when]There is a Smurf=Smurf()\n" + "[when]There is Happy Smurf=Smurf( nature = HAPPY )"); assertEquals(2, totalValues(mappings)); assertEquals(2, valuesOfKind(mappings, DSLConditionSentence.INSTANCE)); } @Test public void testAddMultipleActionDSLSentence() { List<ExtensionMapping<?>> mappings = extension.getExtensions(null, "[then]Report Smurfs=System.out.println(\"There is a Smurf\");\n" + "[then]Greet Happy Smurf=System.out.println(\"Hello Happy Smurf\");"); assertEquals(2, totalValues(mappings)); assertEquals(2, valuesOfKind(mappings, DSLActionSentence.INSTANCE)); } private static int totalValues(final List<ExtensionMapping<?>> mappings) { return (int) mappings .stream() .flatMap(em -> em.getValues().stream()) .count(); } private static int valuesOfKind(final List<ExtensionMapping<?>> mappings, final ExtensionKind kind) { return (int) mappings .stream() .filter(em -> kind.equals(em.getKind())) .flatMap(em -> em.getValues().stream()) .count(); } }
apache-2.0
facebook/presto
presto-hive/src/main/java/com/facebook/presto/hive/HiveBucketHandle.java
2339
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonProperty; import com.google.common.collect.ImmutableList; import java.util.List; import static com.facebook.presto.hive.HiveColumnHandle.pathColumnHandle; import static java.util.Objects.requireNonNull; public class HiveBucketHandle { private final List<HiveColumnHandle> columns; // Number of buckets in the table, as specified in table metadata private final int tableBucketCount; // Number of buckets the table will appear to have when the Hive connector // presents the table to the engine for read. private final int readBucketCount; @JsonCreator public HiveBucketHandle( @JsonProperty("columns") List<HiveColumnHandle> columns, @JsonProperty("tableBucketCount") int tableBucketCount, @JsonProperty("readBucketCount") int readBucketCount) { this.columns = requireNonNull(columns, "columns is null"); this.tableBucketCount = tableBucketCount; this.readBucketCount = readBucketCount; } @JsonProperty public List<HiveColumnHandle> getColumns() { return columns; } @JsonProperty public int getTableBucketCount() { return tableBucketCount; } @JsonProperty public int getReadBucketCount() { return readBucketCount; } public boolean isVirtuallyBucketed() { return columns.size() == 1 && columns.get(0).equals(pathColumnHandle()); } public static HiveBucketHandle createVirtualBucketHandle(int virtualBucketCount) { return new HiveBucketHandle(ImmutableList.of(pathColumnHandle()), virtualBucketCount, virtualBucketCount); } }
apache-2.0
PGer/incubator-hawq
pxf/pxf-hdfs/src/main/java/com/pivotal/pxf/plugins/hdfs/utilities/DataSchemaException.java
1397
package com.pivotal.pxf.plugins.hdfs.utilities; /** * Thrown when there is a data schema problem detected by any plugin that * requires a schema. * {@link DataSchemaException.MessageFmt#SCHEMA_NOT_ON_CLASSPATH} when the specified schema is missing from the CLASSPATH. * {@link DataSchemaException.MessageFmt#SCHEMA_NOT_INDICATED} when a schema was required but was not specified in the pxf uri. */ public class DataSchemaException extends RuntimeException { public static enum MessageFmt { SCHEMA_NOT_INDICATED("%s requires a data schema to be specified in the "+ "pxf uri, but none was found. Please supply it" + "using the DATA-SCHEMA option "), SCHEMA_NOT_ON_CLASSPATH("schema resource \"%s\" is not located on the classpath"); String format; MessageFmt(String format) { this.format = format; } public String getFormat() { return format; } } private MessageFmt msgFormat; /** * Constructs a DataSchemaException. * * @param msgFormat the message format * @param msgArgs the message arguments */ public DataSchemaException(MessageFmt msgFormat, String... msgArgs) { super(String.format(msgFormat.getFormat(), (Object[]) msgArgs)); this.msgFormat = msgFormat; } public MessageFmt getMsgFormat() { return msgFormat; } }
apache-2.0
AndreasAbdi/jackrabbit-oak
oak-core/src/test/java/org/apache/jackrabbit/oak/spi/security/user/action/PasswordValidationActionTest.java
8081
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.user.action; import java.util.ArrayList; import java.util.List; import javax.annotation.Nonnull; import javax.annotation.Nullable; import javax.jcr.RepositoryException; import javax.jcr.nodetype.ConstraintViolationException; import com.google.common.collect.ImmutableList; import org.apache.jackrabbit.api.security.user.User; import org.apache.jackrabbit.oak.AbstractSecurityTest; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.security.SecurityProviderImpl; import org.apache.jackrabbit.oak.security.user.UserConfigurationImpl; import org.apache.jackrabbit.oak.spi.security.ConfigurationParameters; import org.apache.jackrabbit.oak.spi.security.SecurityProvider; import org.apache.jackrabbit.oak.spi.security.principal.PrincipalProvider; import org.apache.jackrabbit.oak.spi.security.user.UserConfiguration; import org.apache.jackrabbit.oak.spi.security.user.UserConstants; import org.apache.jackrabbit.oak.spi.security.user.util.PasswordUtil; import org.junit.After; import org.junit.Before; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class PasswordValidationActionTest extends AbstractSecurityTest { private PasswordValidationAction pwAction = new PasswordValidationAction(); private TestAction testAction = new TestAction(); private User user; private User testUser; @Before public void before() throws Exception { super.before(); user = (User) getUserManager(root).getAuthorizable(adminSession.getAuthInfo().getUserID()); testAction.reset(); pwAction.init(getSecurityProvider(), ConfigurationParameters.of( PasswordValidationAction.CONSTRAINT, "^.*(?=.{8,})(?=.*[a-z])(?=.*[A-Z]).*")); } @After public void after() throws Exception { if (testUser != null) { testUser.remove(); root.commit(); } root = null; super.after(); } @Override protected SecurityProvider getSecurityProvider() { if (securityProvider == null) { securityProvider = new TestSecurityProvider(); } return securityProvider; } @Test public void testActionIsCalled() throws Exception { testUser = getUserManager(root).createUser("testUser", "testUser12345"); root.commit(); assertEquals(1, testAction.onCreateCalled); testUser.changePassword("pW12345678"); assertEquals(1, testAction.onPasswordChangeCalled); testUser.changePassword("pW1234567890", "pW12345678"); assertEquals(2, testAction.onPasswordChangeCalled); } @Test public void testPasswordValidationAction() throws Exception { List<String> invalid = new ArrayList<String>(); invalid.add("pw1"); invalid.add("only6C"); invalid.add("12345678"); invalid.add("WITHOUTLOWERCASE"); invalid.add("withoutuppercase"); for (String pw : invalid) { try { pwAction.onPasswordChange(user, pw, root, NamePathMapper.DEFAULT); fail("should throw constraint violation"); } catch (ConstraintViolationException e) { // success } } List<String> valid = new ArrayList<String>(); valid.add("abCDefGH"); valid.add("Abbbbbbbbbbbb"); valid.add("cDDDDDDDDDDDDDDDDD"); valid.add("gH%%%%%%%%%%%%%%%%^^"); valid.add("&)(*&^%23qW"); for (String pw : valid) { pwAction.onPasswordChange(user, pw, root, NamePathMapper.DEFAULT); } } @Test public void testPasswordValidationActionOnCreate() throws Exception { String hashed = PasswordUtil.buildPasswordHash("DWkej32H"); testUser = getUserManager(root).createUser("testuser", hashed); root.commit(); String pwValue = root.getTree(testUser.getPath()).getProperty(UserConstants.REP_PASSWORD).getValue(Type.STRING); assertFalse(PasswordUtil.isPlainTextPassword(pwValue)); assertTrue(PasswordUtil.isSame(pwValue, hashed)); } @Test public void testPasswordValidationActionOnChange() throws Exception { testUser = getUserManager(root).createUser("testuser", "testPw123456"); root.commit(); try { pwAction.init(getSecurityProvider(), ConfigurationParameters.of(PasswordValidationAction.CONSTRAINT, "abc")); String hashed = PasswordUtil.buildPasswordHash("abc"); testUser.changePassword(hashed); fail("Password change must always enforce password validation."); } catch (ConstraintViolationException e) { // success } } //-------------------------------------------------------------------------- private class TestAction extends AbstractAuthorizableAction { private int onCreateCalled = 0; private int onPasswordChangeCalled = 0; void reset() { onCreateCalled = 0; onPasswordChangeCalled = 0; } @Override public void onCreate(@Nonnull User user, @Nullable String password, @Nonnull Root root, @Nonnull NamePathMapper namePathMapper) throws RepositoryException { onCreateCalled++; } @Override public void onPasswordChange(@Nonnull User user, @Nullable String newPassword, @Nonnull Root root, @Nonnull NamePathMapper namePathMapper) throws RepositoryException { onPasswordChangeCalled++; } } private class TestSecurityProvider extends SecurityProviderImpl { private final AuthorizableActionProvider actionProvider; private TestSecurityProvider() { actionProvider = new AuthorizableActionProvider() { @Nonnull @Override public List<? extends AuthorizableAction> getAuthorizableActions(@Nonnull SecurityProvider securityProvider) { return ImmutableList.of(pwAction, testAction); } }; } @Nonnull public <T> T getConfiguration(@Nonnull Class<T> configClass) { if (UserConfiguration.class == configClass) { return (T) new UserConfigurationImpl(this) { @Nonnull @Override public ConfigurationParameters getParameters() { return ConfigurationParameters.of(super.getParameters(), ConfigurationParameters.of(UserConstants.PARAM_AUTHORIZABLE_ACTION_PROVIDER, actionProvider)); } @Nullable @Override public PrincipalProvider getUserPrincipalProvider(@Nonnull Root root, @Nonnull NamePathMapper namePathMapper) { return null; } }; } else { return super.getConfiguration(configClass); } } } }
apache-2.0
SupunS/carbon-identity
components/identity/org.wso2.carbon.identity.entitlement/src/main/java/org/wso2/carbon/identity/entitlement/PDPConstants.java
10724
/* * Copyright (c) WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.identity.entitlement; /** * Entitlement and XACML related constant values */ public class PDPConstants { public static final String POLICY_TYPE = "policyType"; public static final String POLICY_EDITOR_TYPE = "policyEditor"; public static final String BASIC_POLICY_EDITOR_META_DATA = "basicPolicyEditorMetaData"; public static final String BASIC_POLICY_EDITOR_META_DATA_AMOUNT = "NoOfBasicPolicyEditorMetaData"; public static final String ACTIVE_POLICY = "isActive"; public static final String PROMOTED_POLICY = "promoted"; public static final String POLICY_VERSION = "version"; public static final String LAST_MODIFIED_TIME = "lastModifiedTime"; public static final String LAST_MODIFIED_USER = "lastModifiedUser"; public static final String POLICY_LIFE_CYCLE = "policyLifeStatus"; public static final String POLICY_ORDER = "policyOrder"; public static final String MAX_POLICY_ORDER = "maxPolicyOrder"; public static final String POLICY_ELEMENT = "Policy"; public static final String POLICY_REFERENCE = "policyIdReferences"; public static final String POLICY_SET_REFERENCE = "policySetIdReferences"; public static final String APPLY_ELEMENT = "Apply"; public static final String MATCH_ELEMENT = "Match"; public static final String SUBJECT_ELEMENT = "Subject"; public static final String ACTION_ELEMENT = "Action"; public static final String RESOURCE_ELEMENT = "Resource"; public static final String ENVIRONMENT_ELEMENT = "Environment"; public static final String SUBJECT_CATEGORY_ID = "Subject"; public static final String ACTION_CATEGORY_ID = "Action"; public static final String RESOURCE_CATEGORY_ID = "Resource"; public static final String ENVIRONMENT_CATEGORY_ID = "Environment"; public static final String ANY_OF = "AnyOf"; public static final String ALL_OF = "AllOf"; public static final String RESOURCE_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" + "attribute-category:resource"; public static final String SUBJECT_CATEGORY_URI = "urn:oasis:names:tc:xacml:1.0:" + "subject-category:access-subject"; public static final String ACTION_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" + "attribute-category:action"; public static final String ENVIRONMENT_CATEGORY_URI = "urn:oasis:names:tc:xacml:3.0:" + "attribute-category:environment"; public static final String TARGET_ELEMENT = "Target"; public static final String RULE_ELEMENT = "Rule"; public static final String CONDITION_ELEMENT = "Condition"; public static final String FUNCTION_ELEMENT = "Function"; public static final String ATTRIBUTE_SELECTOR = "AttributeSelector"; public static final String ATTRIBUTE_VALUE = "AttributeValue"; public static final String FUNCTION = "Function"; public static final String VARIABLE_REFERENCE = "VariableReference"; public static final String ATTRIBUTE_DESIGNATOR = "AttributeDesignator"; public static final String ATTRIBUTE_ID = "AttributeId"; public static final String ATTRIBUTE = "Attribute"; public static final String DATA_TYPE = "DataType"; public static final String CATEGORY = "Category"; public static final String REQUEST_CONTEXT_PATH = "RequestContextPath"; public static final String SUBJECT_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:subject:subject-id"; public static final String SUBJECT_CATEGORY_DEFAULT = "urn:oasis:names:tc:xacml:1.0:subject-category:access-subject"; public static final String SUBJECT_ID_ROLE = "http://wso2.org/claims/role"; public static final String RULE_EFFECT_PERMIT = "Permit"; public static final String RULE_EFFECT_DENY = "Deny"; public static final String RESPONSE_RESULT = "Result"; public static final String RESPONSE_DECISION = "Decision"; public static final String RESPONSE_RESOURCE_ID = "ResourceId"; public static final String POLICY_META_DATA = "policyMetaData"; public static final int POLICY_META_DATA_ARRAY_LENGTH = 4; public static final String AUTHORIZATION_PERMISSION = "/permission/admin/configure"; public static final String ENTITLEMENT_CACHE_MANAGER = "ENTITLEMENT_CACHE_MANAGER"; public static final String PIP_RESOURCE_CACHE = "PIP_RESOURCE_CACHE"; public static final String PDP_DECISION_CACHE = "PDP_DECISION_CACHE"; public static final String PDP_SIMPLE_DECISION_CACHE = "PDP_SIMPLE_DECISION_CACHE"; public static final String PDP_DECISION_INVALIDATION_CACHE = "PDP_DECISION_INVALIDATION_CACHE"; public static final String PIP_ABSTRACT_INVALIDATION_CACHE = "PIP_ABSTRACT_INVALIDATION_CACHE"; public static final String POLICY_SEARCH_CACHE = "POLICY_SEARCH_CACHE"; public static final String PIP_ABSTRACT_RESOURCE_CACHE = "PIP_ABSTRACT_RESOURCE_CACHE"; public static final String PIP_ATTRIBUTE_CACHE = "PIP_ATTRIBUTE_CACHE"; public static final String PIP_ABSTRACT_ATTRIBUTE_CACHE = "PIP_ABSTRACT_ATTRIBUTE_CACHE"; public static final String ENTITLEMENT_POLICY_INVALIDATION_CACHE = "ENTITLEMENT_POLICY_INVALIDATION_CACHE"; public static final int DEFAULT_ITEMS_PER_PAGE = 10; public static final String UNKNOWN = "UNKNOWN"; public static final String REQUEST_ELEMENT = "Request"; public static final String REQ_RES_CONTEXT = "urn:oasis:names:tc:xacml:2.0:context:schema:os"; public static final String REQ_SCHEME = "http://www.w3.org/2001/XMLSchema-instance"; public static final String STRING_DATA_TYPE = "http://www.w3.org/2001/XMLSchema#string"; public static final String RESOURCE_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:resource:resource-id"; public static final String ACTION_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:action:action-id"; public static final String ENVIRONMENT_ID_DEFAULT = "urn:oasis:names:tc:xacml:1.0:environment:environment-id"; public static final String RESOURCE_SCOPE_ID = "urn:oasis:names:tc:xacml:1.0:resource:scope"; public static final String RESOURCE_DESCENDANTS = "Descendants"; public static final String RESOURCE_CHILDREN = "Children"; public static final String ATTRIBUTE_SEPARATOR = ","; public static final String SEARCH_WARNING_MESSAGE1 = "Attribute values are not defined directly"; public static final String SEARCH_WARNING_MESSAGE2 = "No Attributes are defined"; public static final String SEARCH_WARNING_MESSAGE3 = "Attribute Selector Element is contained " + "with Xpath expression"; public static final String SEARCH_WARNING_MESSAGE4 = "Apply Element is not contained within Condition Element"; public static final String SEARCH_ERROR = "Search_Error"; public static final String SEARCH_ERROR_MESSAGE = "Therefore Advance Search can not be proceeded. " + "Please de-active this policy, If policy is not" + " relevant for the search"; public static final String XACML_3_POLICY_XMLNS = "urn:oasis:names:tc:xacml:3.0:core:schema:wd-17"; public static final String XACML_2_POLICY_XMLNS = "urn:oasis:names:tc:xacml:2.0:policy:schema:os"; public static final String XACML_1_POLICY_XMLNS = "urn:oasis:names:tc:xacml:1.0:policy"; public static final String XACML_3_POLICY_SCHEMA_FILE = "xacml3.xsd"; public static final String XACML_2_POLICY_SCHEMA_FILE = "xacml2.xsd"; public static final String XACML_1_POLICY_SCHEMA_FILE = "xacml1.xsd"; public static final String ENTITLEMENT_POLICY_PUBLISHER = "/repository/identity/entitlement/publisher/"; public static final String ENTITLEMENT_POLICY_PUBLISHER_VERIFICATION = "/repository/identity/entitlement/publisher/verification/"; public static final String ENTITLEMENT_POLICY_VERSION = "/repository/identity/entitlement/policy/version/"; public static final String ENTITLEMENT_POLICY_DATA = "/repository/identity/entitlement/policy/data/"; public static final String ENTITLEMENT_POLICY_PAP = "/repository/identity/entitlement/policy/pap/"; // entitlement.properties file configurations public static final String ON_DEMAND_POLICY_LOADING = "PDP.OnDemangPolicyLoading.Enable"; public static final String ON_DEMAND_POLICY_MAX_POLICY_ENTRIES = "PDP.OnDemangPolicyLoading.MaxInMemoryPolicies"; public static final String MAX_POLICY_REFERENCE_ENTRIES = "PDP.References.MaxPolicyEntries"; public static final int MAX_NO_OF_IN_MEMORY_POLICIES = 10; public static final String DECISION_CACHING = "PDP.DecisionCaching.Enable"; public static final String DECISION_CACHING_INTERVAL = "PDP.DecisionCaching.CachingInterval"; public static final String ATTRIBUTE_CACHING = "PDP.AttributeCaching.Enable"; public static final String ATTRIBUTE_CACHING_INTERVAL = "PDP.AttributeCaching.CachingInterval"; public static final String RESOURCE_CACHING = "PDP.ResourceCaching.Enable"; public static final String RESOURCE_CACHING_INTERVAL = "PDP.DecisionCaching.CachingInterval"; public static final String PDP_ENABLE = "PDP.Enable"; public static final String PAP_ENABLE = "PAP.Enable"; public static final String BALANA_CONFIG_ENABLE = "PDP.Balana.Config.Enable"; public static final String MULTIPLE_DECISION_PROFILE_ENABLE = "PDP.Multiple.Decision.Profile.Enable"; public static final String FILESYSTEM_POLICY_PATH = "PAP.Policy.Add.Start.Policy.File.Path"; public static final String START_UP_POLICY_ADDING = "PAP.Policy.Add.Start.Enable"; public static final String POLICY_ID_REGEXP_PATTERN = "PAP.Policy.Id.Regexp.Pattern"; public static final String ENTITLEMENT_ITEMS_PER_PAGE = "PAP.Items.Per.Page"; public static final String PDP_GLOBAL_COMBINING_ALGORITHM = "PDP.Global.Policy.Combining.Algorithm"; public static final String REGISTRY_MEDIA_TYPE = "application/xacml-policy+xml"; }
apache-2.0
izrik/blueflood
blueflood-http/src/test/java/com/rackspacecloud/blueflood/inputs/handlers/TestIndexHandler.java
1247
package com.rackspacecloud.blueflood.inputs.handlers; import com.rackspacecloud.blueflood.io.SearchResult; import com.rackspacecloud.blueflood.outputs.handlers.HttpMetricsIndexHandler; import junit.framework.Assert; import org.junit.Test; import java.util.ArrayList; import java.util.List; public class TestIndexHandler { @Test public void testSearchResultToJSON() { List<SearchResult> results = new ArrayList<SearchResult>(); results.add(new SearchResult("tenant0", "a.b.c.d.foo", "parsecs")); results.add(new SearchResult("tenant1", "a.b.c.d.bar", "furlongs")); String searchResultsJson = HttpMetricsIndexHandler.getSerializedJSON(results); Assert.assertFalse("[]".equals(searchResultsJson)); Assert.assertTrue(searchResultsJson.contains("unit")); } @Test public void testNullUnitsDontGetAdded() { List<SearchResult> results = new ArrayList<SearchResult>(); results.add(new SearchResult("tenant0", "a.b.c.d.foo", null)); String searchResultsJson = HttpMetricsIndexHandler.getSerializedJSON(results); Assert.assertTrue(searchResultsJson.contains("a.b.c.d.foo")); Assert.assertFalse(searchResultsJson.contains("unit")); } }
apache-2.0
asurve/arvind-sysml
src/test/java/org/apache/sysml/test/integration/functions/unary/matrix/EigenFactorizeTest.java
3807
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.test.integration.functions.unary.matrix; import org.junit.Test; import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM; import org.apache.sysml.runtime.matrix.MatrixCharacteristics; import org.apache.sysml.test.integration.AutomatedTestBase; import org.apache.sysml.test.integration.TestConfiguration; public class EigenFactorizeTest extends AutomatedTestBase { private final static String TEST_NAME1 = "eigen"; private final static String TEST_DIR = "functions/unary/matrix/"; private static final String TEST_CLASS_DIR = TEST_DIR + EigenFactorizeTest.class.getSimpleName() + "/"; private final static int rows1 = 500; private final static int rows2 = 1000; private final static double sparsity = 0.9; private final static int numEigenValuesToEvaluate = 15; @Override public void setUp() { addTestConfiguration( TEST_NAME1, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "D" }) ); } @Test public void testEigenFactorizeDenseCP() { runTestEigenFactorize( rows1, RUNTIME_PLATFORM.SINGLE_NODE ); } @Test public void testEigenFactorizeDenseSP() { if(rtplatform == RUNTIME_PLATFORM.SPARK) runTestEigenFactorize( rows1, RUNTIME_PLATFORM.SPARK ); } @Test public void testEigenFactorizeDenseMR() { runTestEigenFactorize( rows1, RUNTIME_PLATFORM.HADOOP ); } @Test public void testEigenFactorizeDenseHybrid() { runTestEigenFactorize( rows1, RUNTIME_PLATFORM.HYBRID ); } @Test public void testLargeEigenFactorizeDenseCP() { runTestEigenFactorize( rows2, RUNTIME_PLATFORM.SINGLE_NODE ); } @Test public void testLargeEigenFactorizeDenseSP() { if(rtplatform == RUNTIME_PLATFORM.SPARK) runTestEigenFactorize( rows2, RUNTIME_PLATFORM.SPARK ); } @Test public void testLargeEigenFactorizeDenseMR() { runTestEigenFactorize( rows2, RUNTIME_PLATFORM.HADOOP ); } @Test public void testLargeEigenFactorizeDenseHybrid() { runTestEigenFactorize( rows2, RUNTIME_PLATFORM.HYBRID ); } private void runTestEigenFactorize( int rows, RUNTIME_PLATFORM rt) { RUNTIME_PLATFORM rtold = rtplatform; rtplatform = rt; try { getAndLoadTestConfiguration(TEST_NAME1); String HOME = SCRIPT_DIR + TEST_DIR; fullDMLScriptName = HOME + TEST_NAME1 + ".dml"; programArgs = new String[]{"-args", input("A"), Integer.toString(numEigenValuesToEvaluate), output("D") }; double[][] A = getRandomMatrix(rows, rows, 0, 1, sparsity, 10); MatrixCharacteristics mc = new MatrixCharacteristics(rows, rows, -1, -1, -1); writeInputMatrixWithMTD("A", A, false, mc); // Expected matrix = 1x1 zero matrix double[][] D = new double[numEigenValuesToEvaluate][1]; for(int i=0; i < numEigenValuesToEvaluate; i++) { D[i][0] = 0.0; } writeExpectedMatrix("D", D); boolean exceptionExpected = false; runTest(true, exceptionExpected, null, -1); compareResults(1e-8); } finally { rtplatform = rtold; } } }
apache-2.0
supersven/intellij-community
python/src/com/jetbrains/python/documentation/PythonDocumentationProvider.java
27891
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jetbrains.python.documentation; import com.intellij.codeInsight.TargetElementUtil; import com.intellij.ide.actions.ShowSettingsUtilImpl; import com.intellij.lang.documentation.AbstractDocumentationProvider; import com.intellij.lang.documentation.ExternalDocumentationProvider; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.ModalityState; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.extensions.Extensions; import com.intellij.openapi.module.Module; import com.intellij.openapi.module.ModuleUtilCore; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.psi.*; import com.intellij.psi.util.PsiTreeUtil; import com.intellij.psi.util.QualifiedName; import com.intellij.util.Function; import com.jetbrains.python.PyNames; import com.jetbrains.python.codeInsight.PyCodeInsightSettings; import com.jetbrains.python.console.PydevConsoleRunner; import com.jetbrains.python.console.PydevDocumentationProvider; import com.jetbrains.python.debugger.PySignature; import com.jetbrains.python.debugger.PySignatureCacheManager; import com.jetbrains.python.debugger.PySignatureUtil; import com.jetbrains.python.psi.*; import com.jetbrains.python.psi.impl.PyBuiltinCache; import com.jetbrains.python.psi.resolve.QualifiedNameFinder; import com.jetbrains.python.psi.types.PyClassType; import com.jetbrains.python.psi.types.PyType; import com.jetbrains.python.psi.types.PyTypeParser; import com.jetbrains.python.psi.types.TypeEvalContext; import com.jetbrains.python.toolbox.ChainIterable; import com.jetbrains.python.toolbox.FP; import org.apache.commons.httpclient.HttpClient; import org.apache.commons.httpclient.methods.HeadMethod; import org.apache.commons.httpclient.params.HttpConnectionManagerParams; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.List; import static com.jetbrains.python.documentation.DocumentationBuilderKit.*; /** * Provides quick docs for classes, methods, and functions. * Generates documentation stub */ public class PythonDocumentationProvider extends AbstractDocumentationProvider implements ExternalDocumentationProvider { @NonNls static final String LINK_TYPE_CLASS = "#class#"; @NonNls static final String LINK_TYPE_PARENT = "#parent#"; @NonNls static final String LINK_TYPE_PARAM = "#param#"; @NonNls static final String LINK_TYPE_TYPENAME = "#typename#"; @NonNls private static final String RST_PREFIX = ":"; @NonNls private static final String EPYDOC_PREFIX = "@"; // provides ctrl+hover info @Override @Nullable public String getQuickNavigateInfo(final PsiElement element, final PsiElement originalElement) { for (final PythonDocumentationQuickInfoProvider point : PythonDocumentationQuickInfoProvider.EP_NAME.getExtensions()) { String info = point.getQuickInfo(originalElement); if (info != null) { return info; } } if (element instanceof PyFunction) { PyFunction func = (PyFunction)element; StringBuilder cat = new StringBuilder(); PyClass cls = func.getContainingClass(); if (cls != null) { String cls_name = cls.getName(); cat.append("class ").append(cls_name).append("\n"); // It would be nice to have class import info here, but we don't know the ctrl+hovered reference and context } String summary = ""; final PyStringLiteralExpression docStringExpression = func.getDocStringExpression(); if (docStringExpression != null) { final StructuredDocString docString = DocStringUtil.parse(docStringExpression.getStringValue()); if (docString != null) { summary = docString.getSummary(); } } return $(cat.toString()).add(describeDecorators(func, LSame2, ", ", LSame1)).add(describeFunction(func, LSame2, LSame1)) .toString() + "\n" + summary; } else if (element instanceof PyClass) { PyClass cls = (PyClass)element; String summary = ""; PyStringLiteralExpression docStringExpression = cls.getDocStringExpression(); if (docStringExpression == null) { final PyFunction initOrNew = cls.findInitOrNew(false, null); if (initOrNew != null) { docStringExpression = initOrNew.getDocStringExpression(); } } if (docStringExpression != null) { final StructuredDocString docString = DocStringUtil.parse(docStringExpression.getStringValue()); if (docString != null) { summary = docString.getSummary(); } } return describeDecorators(cls, LSame2, ", ", LSame1).add(describeClass(cls, LSame2, false, false)).toString() + "\n" + summary; } else if (element instanceof PyExpression) { return describeExpression((PyExpression)element, originalElement); } return null; } /** * Creates a HTML description of function definition. * * @param fun the function * @param func_name_wrapper puts a tag around the function name * @param escaper sanitizes values that come directly from doc string or code * @return chain of strings for further chaining */ static ChainIterable<String> describeFunction( PyFunction fun, FP.Lambda1<Iterable<String>, Iterable<String>> func_name_wrapper, FP.Lambda1<String, String> escaper ) { ChainIterable<String> cat = new ChainIterable<String>(); final String name = fun.getName(); cat.addItem("def ").addWith(func_name_wrapper, $(name)); final TypeEvalContext context = TypeEvalContext.userInitiated(fun.getProject(), fun.getContainingFile()); final List<PyParameter> parameters = PyUtil.getParameters(fun, context); final String paramStr = "(" + StringUtil.join(parameters, new Function<PyParameter, String>() { @Override public String fun(PyParameter parameter) { return PyUtil.getReadableRepr(parameter, false); } }, ", ") + ")"; cat.addItem(escaper.apply(paramStr)); if (!PyNames.INIT.equals(name)) { cat.addItem(escaper.apply("\nInferred type: ")); getTypeDescription(fun, cat); cat.addItem(BR); } return cat; } @Nullable private static String describeExpression(@NotNull PyExpression expr, @NotNull PsiElement originalElement) { final String name = expr.getName(); if (name != null) { StringBuilder result = new StringBuilder((expr instanceof PyNamedParameter) ? "parameter" : "variable"); result.append(String.format(" \"%s\"", name)); if (expr instanceof PyNamedParameter) { final PyFunction function = PsiTreeUtil.getParentOfType(expr, PyFunction.class); if (function != null) { result.append(" of ").append(function.getContainingClass() == null ? "function" : "method"); result.append(String.format(" \"%s\"", function.getName())); } } if (originalElement instanceof PyTypedElement) { result.append("\n").append(describeType((PyTypedElement)originalElement)); } return result.toString(); } return null; } static String describeType(@NotNull PyTypedElement element) { final TypeEvalContext context = TypeEvalContext.userInitiated(element.getProject(), element.getContainingFile()); return String.format("Inferred type: %s", getTypeName(context.getType(element), context)); } public static void getTypeDescription(@NotNull PyFunction fun, ChainIterable<String> body) { final TypeEvalContext context = TypeEvalContext.userInitiated(fun.getProject(), fun.getContainingFile()); PyTypeModelBuilder builder = new PyTypeModelBuilder(context); builder.build(context.getType(fun), true).toBodyWithLinks(body, fun); } public static String getTypeName(@Nullable PyType type, @NotNull final TypeEvalContext context) { PyTypeModelBuilder.TypeModel typeModel = buildTypeModel(type, context); return typeModel.asString(); } private static PyTypeModelBuilder.TypeModel buildTypeModel(PyType type, TypeEvalContext context) { PyTypeModelBuilder builder = new PyTypeModelBuilder(context); return builder.build(type, true); } public static void describeExpressionTypeWithLinks(ChainIterable<String> body, PyReferenceExpression expression, @NotNull TypeEvalContext context) { PyType type = context.getType(expression); describeTypeWithLinks(body, expression, type, context); } public static void describeTypeWithLinks(ChainIterable<String> body, PsiElement anchor, PyType type, TypeEvalContext context) { PyTypeModelBuilder builder = new PyTypeModelBuilder(context); builder.build(type, true).toBodyWithLinks(body, anchor); } static ChainIterable<String> describeDecorators(PyDecoratable what, FP.Lambda1<Iterable<String>, Iterable<String>> deco_name_wrapper, String deco_separator, FP.Lambda1<String, String> escaper) { ChainIterable<String> cat = new ChainIterable<String>(); PyDecoratorList deco_list = what.getDecoratorList(); if (deco_list != null) { for (PyDecorator deco : deco_list.getDecorators()) { cat.add(describeDeco(deco, deco_name_wrapper, escaper)).addItem(deco_separator); // can't easily pass describeDeco to map() %) } } return cat; } /** * Creates a HTML description of function definition. * * @param cls the class * @param name_wrapper wrapper to render the name with * @param allow_html * @param link_own_name if true, add link to class's own name @return cat for easy chaining */ static ChainIterable<String> describeClass(PyClass cls, FP.Lambda1<Iterable<String>, Iterable<String>> name_wrapper, boolean allow_html, boolean link_own_name) { ChainIterable<String> cat = new ChainIterable<String>(); final String name = cls.getName(); cat.addItem("class "); if (allow_html && link_own_name) { cat.addWith(LinkMyClass, $(name)); } else { cat.addWith(name_wrapper, $(name)); } final PyExpression[] ancestors = cls.getSuperClassExpressions(); if (ancestors.length > 0) { cat.addItem("("); boolean is_not_first = false; for (PyExpression parent : ancestors) { final String parentName = parent.getName(); if (parentName == null) { continue; } if (is_not_first) { cat.addItem(", "); } else { is_not_first = true; } if (allow_html) { cat.addWith(new LinkWrapper(LINK_TYPE_PARENT + parentName), $(parentName)); } else { cat.addItem(parentName); } } cat.addItem(")"); } return cat; } // private static Iterable<String> describeDeco(PyDecorator deco, final FP.Lambda1<Iterable<String>, Iterable<String>> name_wrapper, // addWith in tags, if need be final FP.Lambda1<String, String> arg_wrapper // add escaping, if need be ) { ChainIterable<String> cat = new ChainIterable<String>(); cat.addItem("@").addWith(name_wrapper, $(PyUtil.getReadableRepr(deco.getCallee(), true))); if (deco.hasArgumentList()) { PyArgumentList arglist = deco.getArgumentList(); if (arglist != null) { cat .addItem("(") .add(interleave(FP.map(FP.combine(LReadableRepr, arg_wrapper), arglist.getArguments()), ", ")) .addItem(")") ; } } return cat; } // provides ctrl+Q doc public String generateDoc(PsiElement element, @Nullable PsiElement originalElement) { if (element != null && PydevConsoleRunner.isInPydevConsole(element) || originalElement != null && PydevConsoleRunner.isInPydevConsole(originalElement)) { return PydevDocumentationProvider.createDoc(element, originalElement); } originalElement = findRealOriginalElement(originalElement); //original element can be whitespace or bracket, // but we need identifier that resolves to element return new PyDocumentationBuilder(element, originalElement).build(); } private static PsiElement findRealOriginalElement(@Nullable PsiElement element) { if (element == null) { return null; } PsiFile file = element.getContainingFile(); if (file == null) { return element; } Document document = PsiDocumentManager.getInstance(element.getProject()).getDocument(file); if (document == null) { return element; } int newOffset = TargetElementUtil.adjustOffset(file, document, element.getTextOffset()); PsiElement newElement = file.findElementAt(newOffset); return newElement != null ? newElement : element; } @Override public PsiElement getDocumentationElementForLink(PsiManager psiManager, String link, PsiElement context) { if (link.equals(LINK_TYPE_CLASS)) { return inferContainingClassOf(context); } else if (link.equals(LINK_TYPE_PARAM)) { return inferClassOfParameter(context); } else if (link.startsWith(LINK_TYPE_PARENT)) { PyClass cls = inferContainingClassOf(context); if (cls != null) { String desired_name = link.substring(LINK_TYPE_PARENT.length()); for (PyClass parent : cls.getAncestorClasses(null)) { final String parent_name = parent.getName(); if (parent_name != null && parent_name.equals(desired_name)) return parent; } } } else if (link.startsWith(LINK_TYPE_TYPENAME)) { String typeName = link.substring(LINK_TYPE_TYPENAME.length()); PyType type = PyTypeParser.getTypeByName(context, typeName); if (type instanceof PyClassType) { return ((PyClassType)type).getPyClass(); } } return null; } @Override public List<String> getUrlFor(final PsiElement element, PsiElement originalElement) { final String url = getUrlFor(element, originalElement, true); return url == null ? null : Collections.singletonList(url); } @Nullable private static String getUrlFor(PsiElement element, PsiElement originalElement, boolean checkExistence) { PsiFileSystemItem file = element instanceof PsiFileSystemItem ? (PsiFileSystemItem)element : element.getContainingFile(); if (file == null) return null; if (PyNames.INIT_DOT_PY.equals(file.getName())) { file = file.getParent(); assert file != null; } Sdk sdk = PyBuiltinCache.findSdkForFile(file); if (sdk == null) { return null; } QualifiedName qName = QualifiedNameFinder.findCanonicalImportPath(element, originalElement); if (qName == null) { return null; } PythonDocumentationMap map = PythonDocumentationMap.getInstance(); String pyVersion = pyVersion(sdk.getVersionString()); PsiNamedElement namedElement = (element instanceof PsiNamedElement && !(element instanceof PsiFileSystemItem)) ? (PsiNamedElement)element : null; if (namedElement instanceof PyFunction && PyNames.INIT.equals(namedElement.getName())) { final PyClass containingClass = ((PyFunction)namedElement).getContainingClass(); if (containingClass != null) { namedElement = containingClass; } } String url = map.urlFor(qName, namedElement, pyVersion); if (url != null) { if (checkExistence && !pageExists(url)) { return map.rootUrlFor(qName); } return url; } for (PythonDocumentationLinkProvider provider : Extensions.getExtensions(PythonDocumentationLinkProvider.EP_NAME)) { final String providerUrl = provider.getExternalDocumentationUrl(element, originalElement); if (providerUrl != null) { if (checkExistence && !pageExists(providerUrl)) { return provider.getExternalDocumentationRoot(sdk); } return providerUrl; } } return null; } private static boolean pageExists(String url) { if (new File(url).exists()) { return true; } HttpClient client = new HttpClient(); HttpConnectionManagerParams params = client.getHttpConnectionManager().getParams(); params.setSoTimeout(5 * 1000); params.setConnectionTimeout(5 * 1000); try { HeadMethod method = new HeadMethod(url); int rc = client.executeMethod(method); if (rc == 404) { return false; } } catch (IllegalArgumentException e) { return false; } catch (IOException ignored) { } return true; } @Nullable public static String pyVersion(@Nullable String versionString) { String prefix = "Python "; if (versionString != null && versionString.startsWith(prefix)) { String version = versionString.substring(prefix.length()); int dot = version.indexOf('.'); if (dot > 0) { dot = version.indexOf('.', dot + 1); if (dot > 0) { return version.substring(0, dot); } return version; } } return null; } @Override public String fetchExternalDocumentation(Project project, PsiElement element, List<String> docUrls) { return null; } @Override public boolean hasDocumentationFor(PsiElement element, PsiElement originalElement) { return getUrlFor(element, originalElement, false) != null; } @Override public boolean canPromptToConfigureDocumentation(PsiElement element) { final PsiFile containingFile = element.getContainingFile(); if (containingFile instanceof PyFile) { final Project project = element.getProject(); final VirtualFile vFile = containingFile.getVirtualFile(); if (vFile != null && ProjectRootManager.getInstance(project).getFileIndex().isInLibraryClasses(vFile)) { final QualifiedName qName = QualifiedNameFinder.findCanonicalImportPath(element, element); if (qName != null && qName.getComponentCount() > 0) { return true; } } } return false; } @Override public void promptToConfigureDocumentation(PsiElement element) { final Project project = element.getProject(); final QualifiedName qName = QualifiedNameFinder.findCanonicalImportPath(element, element); if (qName != null && qName.getComponentCount() > 0) { ApplicationManager.getApplication().invokeLater(new Runnable() { @Override public void run() { int rc = Messages.showOkCancelDialog(project, "No external documentation URL configured for module " + qName.getComponents().get(0) + ".\nWould you like to configure it now?", "Python External Documentation", Messages.getQuestionIcon()); if (rc == Messages.OK) { ShowSettingsUtilImpl.showSettingsDialog(project, PythonDocumentationConfigurable.ID, ""); } } }, ModalityState.NON_MODAL); } } @Nullable private static PyClass inferContainingClassOf(PsiElement context) { if (context instanceof PyClass) return (PyClass)context; if (context instanceof PyFunction) { return ((PyFunction)context).getContainingClass(); } else { return PsiTreeUtil.getParentOfType(context, PyClass.class); } } @Nullable private static PyClass inferClassOfParameter(PsiElement context) { if (context instanceof PyNamedParameter) { final PyType type = TypeEvalContext.userInitiated(context.getProject(), context.getContainingFile()).getType((PyNamedParameter)context); if (type instanceof PyClassType) { return ((PyClassType)type).getPyClass(); } } return null; } public static final LinkWrapper LinkMyClass = new LinkWrapper(LINK_TYPE_CLASS); // link item to containing class public static String generateDocumentationContentStub(PyFunction element, String offset, boolean checkReturn) { final Module module = ModuleUtilCore.findModuleForPsiElement(element); if (module == null) return ""; PyDocumentationSettings documentationSettings = PyDocumentationSettings.getInstance(module); String result = ""; if (documentationSettings.isEpydocFormat(element.getContainingFile())) { result += generateContent(element, offset, EPYDOC_PREFIX, checkReturn); } else if (documentationSettings.isReSTFormat(element.getContainingFile())) { result += generateContent(element, offset, RST_PREFIX, checkReturn); } else { result += offset; } return result; } public static void insertDocStub(PyFunction function, PyStatementList insertPlace, Project project, Editor editor) { PyElementGenerator elementGenerator = PyElementGenerator.getInstance(project); PsiWhiteSpace whitespace = PsiTreeUtil.getPrevSiblingOfType(insertPlace, PsiWhiteSpace.class); String ws = "\n"; if (whitespace != null) { String[] spaces = whitespace.getText().split("\n"); if (spaces.length > 1) { ws += spaces[spaces.length - 1]; } } String docContent = ws + generateDocumentationContentStub(function, ws, true); PyExpressionStatement string = elementGenerator.createDocstring("\"\"\"" + docContent + "\"\"\""); if (insertPlace != null) { final PyStatement[] statements = insertPlace.getStatements(); if (statements.length != 0) { insertPlace.addBefore(string, statements[0]); } } PyStringLiteralExpression docstring = function.getDocStringExpression(); if (editor != null && docstring != null) { int offset = docstring.getTextOffset(); editor.getCaretModel().moveToOffset(offset); editor.getCaretModel().moveCaretRelatively(0, 1, false, false, false); } } public String generateDocumentationContentStub(PyFunction element, boolean checkReturn) { PsiWhiteSpace whitespace = PsiTreeUtil.getPrevSiblingOfType(element.getStatementList(), PsiWhiteSpace.class); String ws = "\n"; if (whitespace != null) { String[] spaces = whitespace.getText().split("\n"); if (spaces.length > 1) { ws += whitespace.getText().split("\n")[1]; } } return generateDocumentationContentStub(element, ws, checkReturn); } private static String generateContent(PyFunction function, String offset, String prefix, boolean checkReturn) { //TODO: this code duplicates PyDocstringGenerator in some parts final StringBuilder builder = new StringBuilder(offset); final TypeEvalContext context = TypeEvalContext.userInitiated(function.getProject(), function.getContainingFile()); PySignature signature = PySignatureCacheManager.getInstance(function.getProject()).findSignature(function); final PyDecoratorList decoratorList = function.getDecoratorList(); final PyDecorator classMethod = decoratorList == null ? null : decoratorList.findDecorator(PyNames.CLASSMETHOD); for (PyParameter p : PyUtil.getParameters(function, context)) { final String parameterName = p.getName(); if (p.getText().equals(PyNames.CANONICAL_SELF) || parameterName == null) { continue; } if (classMethod != null && parameterName.equals(PyNames.CANONICAL_CLS)) continue; String argType = signature == null ? null : signature.getArgTypeQualifiedName(parameterName); if (argType == null) { builder.append(prefix); builder.append("param "); builder.append(parameterName); builder.append(": "); builder.append(offset); } if (PyCodeInsightSettings.getInstance().INSERT_TYPE_DOCSTUB || argType != null) { builder.append(prefix); builder.append("type "); builder.append(parameterName); builder.append(": "); if (signature != null && argType != null) { builder.append(PySignatureUtil.getShortestImportableName(function, argType)); } builder.append(offset); } } builder.append(generateRaiseOrReturn(function, offset, prefix, checkReturn)); return builder.toString(); } public static String generateRaiseOrReturn(PyFunction element, String offset, String prefix, boolean checkReturn) { StringBuilder builder = new StringBuilder(); if (checkReturn) { RaiseVisitor visitor = new RaiseVisitor(); PyStatementList statementList = element.getStatementList(); statementList.accept(visitor); if (visitor.myHasReturn) { builder.append(prefix).append("return:").append(offset); if (PyCodeInsightSettings.getInstance().INSERT_TYPE_DOCSTUB) { builder.append(prefix).append("rtype:").append(offset); } } if (visitor.myHasRaise) { builder.append(prefix).append("raise"); if (visitor.myRaiseTarget != null) { String raiseTarget = visitor.myRaiseTarget.getText(); if (visitor.myRaiseTarget instanceof PyCallExpression) { final PyExpression callee = ((PyCallExpression)visitor.myRaiseTarget).getCallee(); if (callee != null) { raiseTarget = callee.getText(); } } builder.append(" ").append(raiseTarget); } builder.append(":").append(offset); } } else { builder.append(prefix).append("return:").append(offset); if (PyCodeInsightSettings.getInstance().INSERT_TYPE_DOCSTUB) { builder.append(prefix).append("rtype:").append(offset); } } return builder.toString(); } private static class RaiseVisitor extends PyRecursiveElementVisitor { private boolean myHasRaise = false; private boolean myHasReturn = false; private PyExpression myRaiseTarget = null; @Override public void visitPyRaiseStatement(PyRaiseStatement node) { myHasRaise = true; final PyExpression[] expressions = node.getExpressions(); if (expressions.length > 0) myRaiseTarget = expressions[0]; } @Override public void visitPyReturnStatement(PyReturnStatement node) { myHasReturn = true; } } }
apache-2.0
shun634501730/java_source_cn
src_en/java/sql/SQLXML.java
18858
/* * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved. * ORACLE PROPRIETARY/CONFIDENTIAL. Use is subject to license terms. * * * * * * * * * * * * * * * * * * * * */ package java.sql; import java.io.InputStream; import java.io.OutputStream; import java.io.Reader; import java.io.Writer; import javax.xml.transform.Result; import javax.xml.transform.Source; /** * The mapping in the JavaTM programming language for the SQL XML type. * XML is a built-in type that stores an XML value * as a column value in a row of a database table. * By default drivers implement an SQLXML object as * a logical pointer to the XML data * rather than the data itself. * An SQLXML object is valid for the duration of the transaction in which it was created. * <p> * The SQLXML interface provides methods for accessing the XML value * as a String, a Reader or Writer, or as a Stream. The XML value * may also be accessed through a Source or set as a Result, which * are used with XML Parser APIs such as DOM, SAX, and StAX, as * well as with XSLT transforms and XPath evaluations. * <p> * Methods in the interfaces ResultSet, CallableStatement, and PreparedStatement, * such as getSQLXML allow a programmer to access an XML value. * In addition, this interface has methods for updating an XML value. * <p> * The XML value of the SQLXML instance may be obtained as a BinaryStream using * <pre> * SQLXML sqlxml = resultSet.getSQLXML(column); * InputStream binaryStream = sqlxml.getBinaryStream(); * </pre> * For example, to parse an XML value with a DOM parser: * <pre> * DocumentBuilder parser = DocumentBuilderFactory.newInstance().newDocumentBuilder(); * Document result = parser.parse(binaryStream); * </pre> * or to parse an XML value with a SAX parser to your handler: * <pre> * SAXParser parser = SAXParserFactory.newInstance().newSAXParser(); * parser.parse(binaryStream, myHandler); * </pre> * or to parse an XML value with a StAX parser: * <pre> * XMLInputFactory factory = XMLInputFactory.newInstance(); * XMLStreamReader streamReader = factory.createXMLStreamReader(binaryStream); * </pre> * <p> * Because databases may use an optimized representation for the XML, * accessing the value through getSource() and * setResult() can lead to improved processing performance * without serializing to a stream representation and parsing the XML. * <p> * For example, to obtain a DOM Document Node: * <pre> * DOMSource domSource = sqlxml.getSource(DOMSource.class); * Document document = (Document) domSource.getNode(); * </pre> * or to set the value to a DOM Document Node to myNode: * <pre> * DOMResult domResult = sqlxml.setResult(DOMResult.class); * domResult.setNode(myNode); * </pre> * or, to send SAX events to your handler: * <pre> * SAXSource saxSource = sqlxml.getSource(SAXSource.class); * XMLReader xmlReader = saxSource.getXMLReader(); * xmlReader.setContentHandler(myHandler); * xmlReader.parse(saxSource.getInputSource()); * </pre> * or, to set the result value from SAX events: * <pre> * SAXResult saxResult = sqlxml.setResult(SAXResult.class); * ContentHandler contentHandler = saxResult.getHandler(); * contentHandler.startDocument(); * // set the XML elements and attributes into the result * contentHandler.endDocument(); * </pre> * or, to obtain StAX events: * <pre> * StAXSource staxSource = sqlxml.getSource(StAXSource.class); * XMLStreamReader streamReader = staxSource.getXMLStreamReader(); * </pre> * or, to set the result value from StAX events: * <pre> * StAXResult staxResult = sqlxml.setResult(StAXResult.class); * XMLStreamWriter streamWriter = staxResult.getXMLStreamWriter(); * </pre> * or, to perform XSLT transformations on the XML value using the XSLT in xsltFile * output to file resultFile: * <pre> * File xsltFile = new File("a.xslt"); * File myFile = new File("result.xml"); * Transformer xslt = TransformerFactory.newInstance().newTransformer(new StreamSource(xsltFile)); * Source source = sqlxml.getSource(null); * Result result = new StreamResult(myFile); * xslt.transform(source, result); * </pre> * or, to evaluate an XPath expression on the XML value: * <pre> * XPath xpath = XPathFactory.newInstance().newXPath(); * DOMSource domSource = sqlxml.getSource(DOMSource.class); * Document document = (Document) domSource.getNode(); * String expression = "/foo/@bar"; * String barValue = xpath.evaluate(expression, document); * </pre> * To set the XML value to be the result of an XSLT transform: * <pre> * File sourceFile = new File("source.xml"); * Transformer xslt = TransformerFactory.newInstance().newTransformer(new StreamSource(xsltFile)); * Source streamSource = new StreamSource(sourceFile); * Result result = sqlxml.setResult(null); * xslt.transform(streamSource, result); * </pre> * Any Source can be transformed to a Result using the identity transform * specified by calling newTransformer(): * <pre> * Transformer identity = TransformerFactory.newInstance().newTransformer(); * Source source = sqlxml.getSource(null); * File myFile = new File("result.xml"); * Result result = new StreamResult(myFile); * identity.transform(source, result); * </pre> * To write the contents of a Source to standard output: * <pre> * Transformer identity = TransformerFactory.newInstance().newTransformer(); * Source source = sqlxml.getSource(null); * Result result = new StreamResult(System.out); * identity.transform(source, result); * </pre> * To create a DOMSource from a DOMResult: * <pre> * DOMSource domSource = new DOMSource(domResult.getNode()); * </pre> * <p> * Incomplete or invalid XML values may cause an SQLException when * set or the exception may occur when execute() occurs. All streams * must be closed before execute() occurs or an SQLException will be thrown. * <p> * Reading and writing XML values to or from an SQLXML object can happen at most once. * The conceptual states of readable and not readable determine if one * of the reading APIs will return a value or throw an exception. * The conceptual states of writable and not writable determine if one * of the writing APIs will set a value or throw an exception. * <p> * The state moves from readable to not readable once free() or any of the * reading APIs are called: getBinaryStream(), getCharacterStream(), getSource(), and getString(). * Implementations may also change the state to not writable when this occurs. * <p> * The state moves from writable to not writeable once free() or any of the * writing APIs are called: setBinaryStream(), setCharacterStream(), setResult(), and setString(). * Implementations may also change the state to not readable when this occurs. * * <p> * All methods on the <code>SQLXML</code> interface must be fully implemented if the * JDBC driver supports the data type. * * @see javax.xml.parsers * @see javax.xml.stream * @see javax.xml.transform * @see javax.xml.xpath * @since 1.6 */ public interface SQLXML { /** * This method closes this object and releases the resources that it held. * The SQL XML object becomes invalid and neither readable or writeable * when this method is called. * * After <code>free</code> has been called, any attempt to invoke a * method other than <code>free</code> will result in a <code>SQLException</code> * being thrown. If <code>free</code> is called multiple times, the subsequent * calls to <code>free</code> are treated as a no-op. * @throws SQLException if there is an error freeing the XML value. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ void free() throws SQLException; /** * Retrieves the XML value designated by this SQLXML instance as a stream. * The bytes of the input stream are interpreted according to appendix F of the XML 1.0 specification. * The behavior of this method is the same as ResultSet.getBinaryStream() * when the designated column of the ResultSet has a type java.sql.Types of SQLXML. * <p> * The SQL XML object becomes not readable when this method is called and * may also become not writable depending on implementation. * * @return a stream containing the XML data. * @throws SQLException if there is an error processing the XML value. * An exception is thrown if the state is not readable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ InputStream getBinaryStream() throws SQLException; /** * Retrieves a stream that can be used to write the XML value that this SQLXML instance represents. * The stream begins at position 0. * The bytes of the stream are interpreted according to appendix F of the XML 1.0 specification * The behavior of this method is the same as ResultSet.updateBinaryStream() * when the designated column of the ResultSet has a type java.sql.Types of SQLXML. * <p> * The SQL XML object becomes not writeable when this method is called and * may also become not readable depending on implementation. * * @return a stream to which data can be written. * @throws SQLException if there is an error processing the XML value. * An exception is thrown if the state is not writable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ OutputStream setBinaryStream() throws SQLException; /** * Retrieves the XML value designated by this SQLXML instance as a java.io.Reader object. * The format of this stream is defined by org.xml.sax.InputSource, * where the characters in the stream represent the unicode code points for * XML according to section 2 and appendix B of the XML 1.0 specification. * Although an encoding declaration other than unicode may be present, * the encoding of the stream is unicode. * The behavior of this method is the same as ResultSet.getCharacterStream() * when the designated column of the ResultSet has a type java.sql.Types of SQLXML. * <p> * The SQL XML object becomes not readable when this method is called and * may also become not writable depending on implementation. * * @return a stream containing the XML data. * @throws SQLException if there is an error processing the XML value. * The getCause() method of the exception may provide a more detailed exception, for example, * if the stream does not contain valid characters. * An exception is thrown if the state is not readable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ Reader getCharacterStream() throws SQLException; /** * Retrieves a stream to be used to write the XML value that this SQLXML instance represents. * The format of this stream is defined by org.xml.sax.InputSource, * where the characters in the stream represent the unicode code points for * XML according to section 2 and appendix B of the XML 1.0 specification. * Although an encoding declaration other than unicode may be present, * the encoding of the stream is unicode. * The behavior of this method is the same as ResultSet.updateCharacterStream() * when the designated column of the ResultSet has a type java.sql.Types of SQLXML. * <p> * The SQL XML object becomes not writeable when this method is called and * may also become not readable depending on implementation. * * @return a stream to which data can be written. * @throws SQLException if there is an error processing the XML value. * The getCause() method of the exception may provide a more detailed exception, for example, * if the stream does not contain valid characters. * An exception is thrown if the state is not writable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ Writer setCharacterStream() throws SQLException; /** * Returns a string representation of the XML value designated by this SQLXML instance. * The format of this String is defined by org.xml.sax.InputSource, * where the characters in the stream represent the unicode code points for * XML according to section 2 and appendix B of the XML 1.0 specification. * Although an encoding declaration other than unicode may be present, * the encoding of the String is unicode. * The behavior of this method is the same as ResultSet.getString() * when the designated column of the ResultSet has a type java.sql.Types of SQLXML. * <p> * The SQL XML object becomes not readable when this method is called and * may also become not writable depending on implementation. * * @return a string representation of the XML value designated by this SQLXML instance. * @throws SQLException if there is an error processing the XML value. * The getCause() method of the exception may provide a more detailed exception, for example, * if the stream does not contain valid characters. * An exception is thrown if the state is not readable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ String getString() throws SQLException; /** * Sets the XML value designated by this SQLXML instance to the given String representation. * The format of this String is defined by org.xml.sax.InputSource, * where the characters in the stream represent the unicode code points for * XML according to section 2 and appendix B of the XML 1.0 specification. * Although an encoding declaration other than unicode may be present, * the encoding of the String is unicode. * The behavior of this method is the same as ResultSet.updateString() * when the designated column of the ResultSet has a type java.sql.Types of SQLXML. * <p> * The SQL XML object becomes not writeable when this method is called and * may also become not readable depending on implementation. * * @param value the XML value * @throws SQLException if there is an error processing the XML value. * The getCause() method of the exception may provide a more detailed exception, for example, * if the stream does not contain valid characters. * An exception is thrown if the state is not writable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ void setString(String value) throws SQLException; /** * Returns a Source for reading the XML value designated by this SQLXML instance. * Sources are used as inputs to XML parsers and XSLT transformers. * <p> * Sources for XML parsers will have namespace processing on by default. * The systemID of the Source is implementation dependent. * <p> * The SQL XML object becomes not readable when this method is called and * may also become not writable depending on implementation. * <p> * Note that SAX is a callback architecture, so a returned * SAXSource should then be set with a content handler that will * receive the SAX events from parsing. The content handler * will receive callbacks based on the contents of the XML. * <pre> * SAXSource saxSource = sqlxml.getSource(SAXSource.class); * XMLReader xmlReader = saxSource.getXMLReader(); * xmlReader.setContentHandler(myHandler); * xmlReader.parse(saxSource.getInputSource()); * </pre> * * @param <T> the type of the class modeled by this Class object * @param sourceClass The class of the source, or null. * If the class is null, a vendor specific Source implementation will be returned. * The following classes are supported at a minimum: * <pre> * javax.xml.transform.dom.DOMSource - returns a DOMSource * javax.xml.transform.sax.SAXSource - returns a SAXSource * javax.xml.transform.stax.StAXSource - returns a StAXSource * javax.xml.transform.stream.StreamSource - returns a StreamSource * </pre> * @return a Source for reading the XML value. * @throws SQLException if there is an error processing the XML value * or if this feature is not supported. * The getCause() method of the exception may provide a more detailed exception, for example, * if an XML parser exception occurs. * An exception is thrown if the state is not readable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ <T extends Source> T getSource(Class<T> sourceClass) throws SQLException; /** * Returns a Result for setting the XML value designated by this SQLXML instance. * <p> * The systemID of the Result is implementation dependent. * <p> * The SQL XML object becomes not writeable when this method is called and * may also become not readable depending on implementation. * <p> * Note that SAX is a callback architecture and the returned * SAXResult has a content handler assigned that will receive the * SAX events based on the contents of the XML. Call the content * handler with the contents of the XML document to assign the values. * <pre> * SAXResult saxResult = sqlxml.setResult(SAXResult.class); * ContentHandler contentHandler = saxResult.getXMLReader().getContentHandler(); * contentHandler.startDocument(); * // set the XML elements and attributes into the result * contentHandler.endDocument(); * </pre> * * @param <T> the type of the class modeled by this Class object * @param resultClass The class of the result, or null. * If resultClass is null, a vendor specific Result implementation will be returned. * The following classes are supported at a minimum: * <pre> * javax.xml.transform.dom.DOMResult - returns a DOMResult * javax.xml.transform.sax.SAXResult - returns a SAXResult * javax.xml.transform.stax.StAXResult - returns a StAXResult * javax.xml.transform.stream.StreamResult - returns a StreamResult * </pre> * @return Returns a Result for setting the XML value. * @throws SQLException if there is an error processing the XML value * or if this feature is not supported. * The getCause() method of the exception may provide a more detailed exception, for example, * if an XML parser exception occurs. * An exception is thrown if the state is not writable. * @exception SQLFeatureNotSupportedException if the JDBC driver does not support * this method * @since 1.6 */ <T extends Result> T setResult(Class<T> resultClass) throws SQLException; }
apache-2.0
WANdisco/amplab-hive
spark-client/src/main/java/org/apache/hive/spark/client/metrics/Metrics.java
4409
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hive.spark.client.metrics; import java.io.Serializable; import org.apache.spark.executor.TaskMetrics; import org.apache.hadoop.hive.common.classification.InterfaceAudience; /** * Metrics tracked during the execution of a job. * * Depending on how the metrics object is obtained (by calling methods in the `MetricsCollection` * class), metrics will refer to one or more tasks. */ @InterfaceAudience.Private public class Metrics implements Serializable { /** Time taken on the executor to deserialize tasks. */ public final long executorDeserializeTime; /** Time the executor spends actually running the task (including fetching shuffle data). */ public final long executorRunTime; /** The number of bytes sent back to the driver by tasks. */ public final long resultSize; /** Amount of time the JVM spent in garbage collection while executing tasks. */ public final long jvmGCTime; /** Amount of time spent serializing task results. */ public final long resultSerializationTime; /** The number of in-memory bytes spilled by tasks. */ public final long memoryBytesSpilled; /** The number of on-disk bytes spilled by tasks. */ public final long diskBytesSpilled; /** If tasks read from a HadoopRDD or from persisted data, metrics on how much data was read. */ public final InputMetrics inputMetrics; /** * If tasks read from shuffle output, metrics on getting shuffle data. This includes read metrics * aggregated over all the tasks' shuffle dependencies. */ public final ShuffleReadMetrics shuffleReadMetrics; /** If tasks wrote to shuffle output, metrics on the written shuffle data. */ public final ShuffleWriteMetrics shuffleWriteMetrics; private Metrics() { // For Serialization only. this(0L, 0L, 0L, 0L, 0L, 0L, 0L, null, null, null); } public Metrics( long executorDeserializeTime, long executorRunTime, long resultSize, long jvmGCTime, long resultSerializationTime, long memoryBytesSpilled, long diskBytesSpilled, InputMetrics inputMetrics, ShuffleReadMetrics shuffleReadMetrics, ShuffleWriteMetrics shuffleWriteMetrics) { this.executorDeserializeTime = executorDeserializeTime; this.executorRunTime = executorRunTime; this.resultSize = resultSize; this.jvmGCTime = jvmGCTime; this.resultSerializationTime = resultSerializationTime; this.memoryBytesSpilled = memoryBytesSpilled; this.diskBytesSpilled = diskBytesSpilled; this.inputMetrics = inputMetrics; this.shuffleReadMetrics = shuffleReadMetrics; this.shuffleWriteMetrics = shuffleWriteMetrics; } public Metrics(TaskMetrics metrics) { this( metrics.executorDeserializeTime(), metrics.executorRunTime(), metrics.resultSize(), metrics.jvmGCTime(), metrics.resultSerializationTime(), metrics.memoryBytesSpilled(), metrics.diskBytesSpilled(), optionalInputMetric(metrics), optionalShuffleReadMetric(metrics), optionalShuffleWriteMetrics(metrics)); } private static InputMetrics optionalInputMetric(TaskMetrics metrics) { return metrics.inputMetrics().isDefined() ? new InputMetrics(metrics) : null; } private static ShuffleReadMetrics optionalShuffleReadMetric(TaskMetrics metrics) { return metrics.shuffleReadMetrics().isDefined() ? new ShuffleReadMetrics(metrics) : null; } private static ShuffleWriteMetrics optionalShuffleWriteMetrics(TaskMetrics metrics) { return metrics.shuffleWriteMetrics().isDefined() ? new ShuffleWriteMetrics(metrics) : null; } }
apache-2.0
WilliamRen/bbossgroups-3.5
bboss-util/src-asm/bboss/org/objectweb/asm/tree/AnnotationNode.java
8187
/*** * ASM: a very small and fast Java bytecode manipulation framework * Copyright (c) 2000-2011 INRIA, France Telecom * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holders nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF * THE POSSIBILITY OF SUCH DAMAGE. */ package bboss.org.objectweb.asm.tree; import java.util.ArrayList; import java.util.List; import bboss.org.objectweb.asm.AnnotationVisitor; import bboss.org.objectweb.asm.Opcodes; /** * A node that represents an annotationn. * * @author Eric Bruneton */ public class AnnotationNode extends AnnotationVisitor { /** * The class descriptor of the annotation class. */ public String desc; /** * The name value pairs of this annotation. Each name value pair is stored * as two consecutive elements in the list. The name is a {@link String}, * and the value may be a {@link Byte}, {@link Boolean}, {@link Character}, * {@link Short}, {@link Integer}, {@link Long}, {@link Float}, * {@link Double}, {@link String} or {@link bboss.org.objectweb.asm.Type}, or an * two elements String array (for enumeration values), a * {@link AnnotationNode}, or a {@link List} of values of one of the * preceding types. The list may be <tt>null</tt> if there is no name value * pair. */ public List<Object> values; /** * Constructs a new {@link AnnotationNode}. <i>Subclasses must not use this * constructor</i>. Instead, they must use the * {@link #AnnotationNode(int, String)} version. * * @param desc * the class descriptor of the annotation class. * @throws IllegalStateException * If a subclass calls this constructor. */ public AnnotationNode(final String desc) { this(Opcodes.ASM5, desc); if (getClass() != AnnotationNode.class) { throw new IllegalStateException(); } } /** * Constructs a new {@link AnnotationNode}. * * @param api * the ASM API version implemented by this visitor. Must be one * of {@link Opcodes#ASM4} or {@link Opcodes#ASM5}. * @param desc * the class descriptor of the annotation class. */ public AnnotationNode(final int api, final String desc) { super(api); this.desc = desc; } /** * Constructs a new {@link AnnotationNode} to visit an array value. * * @param values * where the visited values must be stored. */ AnnotationNode(final List<Object> values) { super(Opcodes.ASM5); this.values = values; } // ------------------------------------------------------------------------ // Implementation of the AnnotationVisitor abstract class // ------------------------------------------------------------------------ @Override public void visit(final String name, final Object value) { if (values == null) { values = new ArrayList<Object>(this.desc != null ? 2 : 1); } if (this.desc != null) { values.add(name); } values.add(value); } @Override public void visitEnum(final String name, final String desc, final String value) { if (values == null) { values = new ArrayList<Object>(this.desc != null ? 2 : 1); } if (this.desc != null) { values.add(name); } values.add(new String[] { desc, value }); } @Override public AnnotationVisitor visitAnnotation(final String name, final String desc) { if (values == null) { values = new ArrayList<Object>(this.desc != null ? 2 : 1); } if (this.desc != null) { values.add(name); } AnnotationNode annotation = new AnnotationNode(desc); values.add(annotation); return annotation; } @Override public AnnotationVisitor visitArray(final String name) { if (values == null) { values = new ArrayList<Object>(this.desc != null ? 2 : 1); } if (this.desc != null) { values.add(name); } List<Object> array = new ArrayList<Object>(); values.add(array); return new AnnotationNode(array); } @Override public void visitEnd() { } // ------------------------------------------------------------------------ // Accept methods // ------------------------------------------------------------------------ /** * Checks that this annotation node is compatible with the given ASM API * version. This methods checks that this node, and all its nodes * recursively, do not contain elements that were introduced in more recent * versions of the ASM API than the given version. * * @param api * an ASM API version. Must be one of {@link Opcodes#ASM4} or * {@link Opcodes#ASM5}. */ public void check(final int api) { // nothing to do } /** * Makes the given visitor visit this annotation. * * @param av * an annotation visitor. Maybe <tt>null</tt>. */ public void accept(final AnnotationVisitor av) { if (av != null) { if (values != null) { for (int i = 0; i < values.size(); i += 2) { String name = (String) values.get(i); Object value = values.get(i + 1); accept(av, name, value); } } av.visitEnd(); } } /** * Makes the given visitor visit a given annotation value. * * @param av * an annotation visitor. Maybe <tt>null</tt>. * @param name * the value name. * @param value * the actual value. */ static void accept(final AnnotationVisitor av, final String name, final Object value) { if (av != null) { if (value instanceof String[]) { String[] typeconst = (String[]) value; av.visitEnum(name, typeconst[0], typeconst[1]); } else if (value instanceof AnnotationNode) { AnnotationNode an = (AnnotationNode) value; an.accept(av.visitAnnotation(name, an.desc)); } else if (value instanceof List) { AnnotationVisitor v = av.visitArray(name); List<?> array = (List<?>) value; for (int j = 0; j < array.size(); ++j) { accept(v, null, array.get(j)); } v.visitEnd(); } else { av.visit(name, value); } } } }
apache-2.0
mdecourci/assertj-core
src/test/java/org/assertj/core/api/BooleanArrayAssertBaseTest.java
1432
/** * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * * Copyright 2012-2015 the original author or authors. */ package org.assertj.core.api; import static org.assertj.core.test.BooleanArrays.emptyArray; import static org.mockito.Mockito.mock; import org.assertj.core.internal.BooleanArrays; /** * Base class for {@link BooleanArrayAssert} tests. * * @author Olivier Michallat */ public abstract class BooleanArrayAssertBaseTest extends BaseTestTemplate<BooleanArrayAssert, boolean[]> { protected BooleanArrays arrays; @Override protected BooleanArrayAssert create_assertions() { return new BooleanArrayAssert(emptyArray()); } @Override protected void inject_internal_objects() { super.inject_internal_objects(); arrays = mock(BooleanArrays.class); assertions.arrays = arrays; } protected BooleanArrays getArrays(BooleanArrayAssert someAssertions) { return someAssertions.arrays; } }
apache-2.0
jatin9896/incubator-carbondata
core/src/main/java/org/apache/carbondata/core/scan/expression/logical/OrExpression.java
2479
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.carbondata.core.scan.expression.logical; import org.apache.carbondata.core.metadata.datatype.DataTypes; import org.apache.carbondata.core.scan.expression.Expression; import org.apache.carbondata.core.scan.expression.ExpressionResult; import org.apache.carbondata.core.scan.expression.exception.FilterIllegalMemberException; import org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException; import org.apache.carbondata.core.scan.filter.intf.ExpressionType; import org.apache.carbondata.core.scan.filter.intf.RowIntf; public class OrExpression extends BinaryLogicalExpression { private static final long serialVersionUID = 4220598043176438380L; public OrExpression(Expression left, Expression right) { super(left, right); } @Override public ExpressionResult evaluate(RowIntf value) throws FilterIllegalMemberException, FilterUnsupportedException { ExpressionResult resultLeft = left.evaluate(value); ExpressionResult resultRight = right.evaluate(value); if (resultLeft.getDataType() == DataTypes.BOOLEAN) { resultLeft.set(DataTypes.BOOLEAN, (resultLeft.getBoolean() || resultRight.getBoolean())); } else { throw new FilterUnsupportedException( "Incompatible datatype for applying OR Expression Filter"); } return resultLeft; } @Override public ExpressionType getFilterExpressionType() { return ExpressionType.OR; } @Override public String getString() { return "Or(" + left.getString() + ',' + right.getString() + ')'; } @Override public String getStatement() { return "(" + left.getString() + " or " + right.getString() + ")"; } }
apache-2.0
kidaa/incubator-ignite
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/IgniteInternalCache.java
84847
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.cache.affinity.*; import org.apache.ignite.cache.store.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.*; import org.apache.ignite.internal.processors.cache.dr.*; import org.apache.ignite.internal.processors.cache.transactions.*; import org.apache.ignite.internal.processors.cache.version.*; import org.apache.ignite.lang.*; import org.apache.ignite.mxbean.*; import org.apache.ignite.transactions.*; import org.jetbrains.annotations.*; import javax.cache.*; import javax.cache.expiry.*; import javax.cache.processor.*; import java.io.*; import java.sql.*; import java.util.*; import java.util.Date; /** * This interface provides a rich API for working with distributed caches. It includes the following * main functionality: * <ul> * <li> * Various {@code 'get(..)'} methods to synchronously or asynchronously get values from cache. * All {@code 'get(..)'} methods are transactional and will participate in an ongoing transaction * if there is one. * </li> * <li> * Various {@code 'put(..)'}, {@code 'putIfAbsent(..)'}, and {@code 'replace(..)'} methods to * synchronously or asynchronously put single or multiple entries into cache. * All these methods are transactional and will participate in an ongoing transaction * if there is one. * </li> * <li> * Various {@code 'remove(..)'} methods to synchronously or asynchronously remove single or multiple keys * from cache. All {@code 'remove(..)'} methods are transactional and will participate in an ongoing transaction * if there is one. * </li> * <li> * Various {@code 'contains(..)'} method to check if cache contains certain keys or values locally. * </li> * <li> * Various {@code 'forEach(..)'}, {@code 'forAny(..)'}, and {@code 'reduce(..)'} methods to visit * every local cache entry within this projection. * </li> * <li> * Various {@code flagsOn(..)'}, {@code 'flagsOff(..)'}, and {@code 'projection(..)'} methods to * set specific flags and filters on a cache projection. * </li> * <li> * Methods like {@code 'keySet(..)'}, {@code 'values(..)'}, and {@code 'entrySet(..)'} to provide * views on cache keys, values, and entries. * </li> * <li> * Various {@code 'peek(..)'} methods to peek at values in global or transactional memory, swap * storage, or persistent storage. * </li> * <li> * Various {@code 'reload(..)'} methods to reload latest values from persistent storage. * </li> * <li> * Various {@code 'promote(..)'} methods to load specified keys from swap storage into * global cache memory. * </li> * <li> * Various {@code 'lock(..)'}, {@code 'unlock(..)'}, and {@code 'isLocked(..)'} methods to acquire, release, * and check on distributed locks on a single or multiple keys in cache. All locking methods * are not transactional and will not enlist keys into ongoing transaction, if any. * </li> * <li> * Various {@code 'clear(..)'} methods to clear elements from cache, and optionally from * swap storage. All {@code 'clear(..)'} methods are not transactional and will not enlist cleared * keys into ongoing transaction, if any. * </li> * <li> * Various {@code 'evict(..)'} methods to evict elements from cache, and optionally store * them in underlying swap storage for later access. All {@code 'evict(..)'} methods are not * transactional and will not enlist evicted keys into ongoing transaction, if any. * </li> * <li> * Various {@code 'txStart(..)'} methods to perform various cache * operations within a transaction (see {@link Transaction} for more information). * </li> * <li> * Various {@code 'gridProjection(..)'} methods which provide {@link org.apache.ignite.cluster.ClusterGroup} only * for nodes on which given keys reside. All {@code 'gridProjection(..)'} methods are not * transactional and will not enlist keys into ongoing transaction. * </li> * <li> * </ul> * <h1 class="header">Extended Put And Remove Methods</h1> * All methods that end with {@code 'x'} provide the same functionality as their sibling * methods that don't end with {@code 'x'}, however instead of returning a previous value they * return a {@code boolean} flag indicating whether operation succeeded or not. Returning * a previous value may involve a network trip or a persistent store lookup and should be * avoided whenever not needed. * <h1 class="header">Predicate Filters</h1> * All filters passed into methods on this API are checked <b>atomically</b>. In other words the * value returned by the methods is guaranteed to be consistent with the filters passed in. Note * that filters are optional, and if not passed in, then methods will still work as is without * filter validation. * <h1 class="header">Transactions</h1> * Cache API supports distributed transactions. All {@code 'get(..)'}, {@code 'put(..)'}, {@code 'replace(..)'}, * and {@code 'remove(..)'} operations are transactional and will participate in an ongoing transaction, * if any. Other methods like {@code 'peek(..)'} or various {@code 'contains(..)'} methods may * be transaction-aware, i.e. check in-transaction entries first, but will not affect the current * state of transaction. See {@link Transaction} documentation for more information * about transactions. * <h1 class="header">Group Locking</h1> * <i>Group Locking</i> is a feature where instead of acquiring individual locks, Ignite will lock * multiple keys with one lock to save on locking overhead. There are 2 types of <i>Group Locking</i>: * <i>affinity-based</i>, and <i>partitioned-based</i>. * <p> * With {@code affinity-based-group-locking} the keys are grouped by <i>affinity-key</i>. This means that * only keys with identical affinity-key (see {@link AffinityKeyMapped}) can participate in the * transaction, and only one lock on the <i>affinity-key</i> will be acquired for the whole transaction. * {@code Affinity-group-locked} transactions are started via * <code>txStartAffinity(Object, TransactionConcurrency, TransactionIsolation, long, int)</code> method. * <p> * With {@code partition-based-group-locking} the keys are grouped by partition ID. This means that * only keys belonging to identical partition (see {@link Affinity#partition(Object)}) can participate in the * transaction, and only one lock on the whole partition will be acquired for the whole transaction. * {@code Partition-group-locked} transactions are started via * <code>txStartPartition(int, TransactionConcurrency, TransactionIsolation, long, int)</code> method. * <p> * <i>Group locking</i> should always be used for transactions whenever possible. If your requirements fit either * <i>affinity-based</i> or <i>partition-based</i> scenarios outlined above then <i>group-locking</i> * can significantly improve performance of your application, often by an order of magnitude. * <h1 class="header">Null Keys or Values</h1> * Neither {@code null} keys or values are allowed to be stored in cache. If a {@code null} value * happens to be in cache (e.g. after invalidation or remove), then cache will treat this case * as there is no value at all. * <h1 class="header">Peer Class Loading</h1> * If peer-class-loading is enabled, all classes passed into cache API will be automatically deployed * to any participating grid nodes. However, in case of redeployment, caches will be cleared and * all entries will be removed. This behavior is useful during development, but should not be * used in production. * <h1 class="header">Portable Objects</h1> * If an object is defined as portable Ignite cache will automatically store it in portable (i.e. binary) * format. User can choose to work either with the portable format or with the deserialized form (assuming * that class definitions are present in the classpath). By default, cache works with deserialized form * (example shows the case when {@link Integer} is used as a key for a portable object): * <pre> * IgniteInternalCache<Integer, Value> prj = Ignition.grid().cache(null); * * // Value will be serialized and stored in cache in portable format. * prj.put(1, new Value()); * * // Value will be deserialized since it's stored in portable format. * Value val = prj.get(1); * </pre> * You won't be able to work with deserialized form if class definition for the {@code Value} is not on * classpath. Even if you have the class definition, you should always avoid full deserialization if it's not * needed for performance reasons. To work with portable format directly you should create special projection * using {@link #keepPortable()} method: * <pre> * IgniteInternalCache<Integer, GridPortableObject> prj = Ignition.grid().cache(null).keepPortable(); * * // Value is not deserialized and returned in portable format. * GridPortableObject po = prj.get(1); * </pre> * See {@link #keepPortable()} method JavaDoc for more details. */ public interface IgniteInternalCache<K, V> extends Iterable<Cache.Entry<K, V>> { /** * Gets name of this cache ({@code null} for default cache). * * @return Cache name. */ public String name(); /** * Gets base cache for this projection. * * @param <K1> Cache key type. * @param <V1> Cache value type. * @return Base cache for this projection. */ @SuppressWarnings({"ClassReferencesSubclass"}) public <K1, V1> IgniteInternalCache<K1, V1> cache(); /** * @return Skip store. */ public boolean skipStore(); /** * @param skipStore Skip store flag. * @return New internal cache instance based on this one, but with skip store flag enabled. */ public IgniteInternalCache<K, V> setSkipStore(boolean skipStore); /** * Creates projection that will operate with portable objects. * <p> * Projection returned by this method will force cache not to deserialize portable objects, * so keys and values will be returned from cache API methods without changes. Therefore, * signature of the projection can contain only following types: * <ul> * <li><code>org.gridgain.grid.portables.PortableObject</code> for portable classes</li> * <li>All primitives (byte, int, ...) and there boxed versions (Byte, Integer, ...)</li> * <li>Arrays of primitives (byte[], int[], ...)</li> * <li>{@link String} and array of {@link String}s</li> * <li>{@link UUID} and array of {@link UUID}s</li> * <li>{@link Date} and array of {@link Date}s</li> * <li>{@link Timestamp} and array of {@link Timestamp}s</li> * <li>Enums and array of enums</li> * <li> * Maps, collections and array of objects (but objects inside * them will still be converted if they are portable) * </li> * </ul> * <p> * For example, if you use {@link Integer} as a key and {@code Value} class as a value * (which will be stored in portable format), you should acquire following projection * to avoid deserialization: * <pre> * IgniteInternalCache<Integer, GridPortableObject> prj = cache.keepPortable(); * * // Value is not deserialized and returned in portable format. * GridPortableObject po = prj.get(1); * </pre> * <p> * Note that this method makes sense only if cache is working in portable mode * (<code>org.apache.ignite.configuration.CacheConfiguration#isPortableEnabled()</code> returns {@code true}. If not, * this method is no-op and will return current projection. * * @return New internal cache instance for portable objects. */ public <K1, V1> IgniteInternalCache<K1, V1> keepPortable(); /** * Returns {@code true} if this map contains no key-value mappings. * * @return {@code true} if this map contains no key-value mappings. */ public boolean isEmpty(); /** * @param key Key. * @return {@code True} if cache contains mapping for a given key. */ public boolean containsKey(K key); /** * @param key Key. * @return Future. */ public IgniteInternalFuture<Boolean> containsKeyAsync(K key); /** * @param keys Keys, * @return {@code True} if cache contains all keys. */ public boolean containsKeys(Collection<? extends K> keys); /** * @param keys Keys to check. * @return Future. */ public IgniteInternalFuture<Boolean> containsKeysAsync(Collection<? extends K> keys); /** * @param key Key. * @param peekModes Peek modes. * @param plc Expiry policy if TTL should be updated. * @return Value. * @throws IgniteCheckedException If failed. */ @Nullable public V localPeek(K key, CachePeekMode[] peekModes, @Nullable IgniteCacheExpiryPolicy plc) throws IgniteCheckedException; /** * @param peekModes Peek modes. * @return Entries iterable. * @throws IgniteCheckedException If failed. */ public Iterable<Cache.Entry<K, V>> localEntries(CachePeekMode[] peekModes) throws IgniteCheckedException; /** * Retrieves value mapped to the specified key from cache. Value will only be returned if * its entry passed the optional filter provided. Filter check is atomic, and therefore the * returned value is guaranteed to be consistent with the filter. The return value of {@code null} * means entry did not pass the provided filter or cache has no mapping for the * key. * <p> * If the value is not present in cache, then it will be looked up from swap storage. If * it's not present in swap, or if swap is disable, and if read-through is allowed, value * will be loaded from {@link CacheStore} persistent storage via * <code>CacheStore#load(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to retrieve the value for. * @return Value for the given key. * @throws IgniteCheckedException If get operation failed. * @throws NullPointerException if the key is {@code null}. */ @Nullable public V get(K key) throws IgniteCheckedException; /** * Asynchronously retrieves value mapped to the specified key from cache. Value will only be returned if * its entry passed the optional filter provided. Filter check is atomic, and therefore the * returned value is guaranteed to be consistent with the filter. The return value of {@code null} * means entry did not pass the provided filter or cache has no mapping for the * key. * <p> * If the value is not present in cache, then it will be looked up from swap storage. If * it's not present in swap, or if swap is disabled, and if read-through is allowed, value * will be loaded from {@link CacheStore} persistent storage via * <code>CacheStore#load(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key for the value to get. * @return Future for the get operation. * @throws NullPointerException if the key is {@code null}. */ public IgniteInternalFuture<V> getAsync(K key); /** * Retrieves values mapped to the specified keys from cache. Value will only be returned if * its entry passed the optional filter provided. Filter check is atomic, and therefore the * returned value is guaranteed to be consistent with the filter. If requested key-value pair * is not present in the returned map, then it means that its entry did not pass the provided * filter or cache has no mapping for the key. * <p> * If some value is not present in cache, then it will be looked up from swap storage. If * it's not present in swap, or if swap is disabled, and if read-through is allowed, value * will be loaded from {@link CacheStore} persistent storage via * <code>CacheStore#loadAll(Transaction, Collection, org.apache.ignite.lang.IgniteBiInClosure)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param keys Keys to get. * @return Map of key-value pairs. * @throws IgniteCheckedException If get operation failed. */ public Map<K, V> getAll(@Nullable Collection<? extends K> keys) throws IgniteCheckedException; /** * Asynchronously retrieves values mapped to the specified keys from cache. Value will only be returned if * its entry passed the optional filter provided. Filter check is atomic, and therefore the * returned value is guaranteed to be consistent with the filter. If requested key-value pair * is not present in the returned map, then it means that its entry did not pass the provided * filter or cache has no mapping for the key. * <p> * If some value is not present in cache, then it will be looked up from swap storage. If * it's not present in swap, or if swap is disabled, and if read-through is allowed, value * will be loaded from {@link CacheStore} persistent storage via * <code>CacheStore#loadAll(Transaction, Collection, org.apache.ignite.lang.IgniteBiInClosure)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param keys Key for the value to get. * @return Future for the get operation. */ public IgniteInternalFuture<Map<K, V>> getAllAsync(@Nullable Collection<? extends K> keys); /** * Stores given key-value pair in cache. If filters are provided, then entries will * be stored in cache only if they pass the filter. Note that filter check is atomic, * so value stored in cache is guaranteed to be consistent with the filters. If cache * previously contained value for the given key, then this value is returned. * In case of {@link CacheMode#PARTITIONED} or {@link CacheMode#REPLICATED} caches, * the value will be loaded from the primary node, which in its turn may load the value * from the swap storage, and consecutively, if it's not in swap, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method <code>#putx(Object, Object, org.apache.ignite.lang.IgnitePredicate[])</code> should * always be used instead of this one to avoid the overhead associated with returning of the previous value. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Previous value associated with specified key, or {@code null} * if entry did not pass the filter, or if there was no mapping for the key in swap * or in persistent storage. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If put operation failed. */ @Nullable public V getAndPut(K key, V val) throws IgniteCheckedException; /** * Asynchronously stores given key-value pair in cache. If filters are provided, then entries will * be stored in cache only if they pass the filter. Note that filter check is atomic, * so value stored in cache is guaranteed to be consistent with the filters. If cache * previously contained value for the given key, then this value is returned. Otherwise, * in case of {@link CacheMode#REPLICATED} caches, the value will be loaded from swap * and, if it's not there, and read-through is allowed, from the underlying * {@link CacheStore} storage. In case of {@link CacheMode#PARTITIONED} caches, * the value will be loaded from the primary node, which in its turn may load the value * from the swap storage, and consecutively, if it's not in swap and read-through is allowed, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method <code>#putx(Object, Object, org.apache.ignite.lang.IgnitePredicate[])</code> should * always be used instead of this one to avoid the overhead associated with returning of the previous value. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Future for the put operation. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<V> getAndPutAsync(K key, V val); /** * Stores given key-value pair in cache. If filters are provided, then entries will * be stored in cache only if they pass the filter. Note that filter check is atomic, * so value stored in cache is guaranteed to be consistent with the filters. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * Unlike <code>#put(Object, Object, org.apache.ignite.lang.IgnitePredicate[])</code> method, it does not return previous * value and, therefore, does not have any overhead associated with returning a value. It * should be used whenever return value is not required. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return {@code True} if optional filter passed and value was stored in cache, * {@code false} otherwise. Note that this method will return {@code true} if filter is not * specified. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If put operation failed. */ public boolean put(K key, V val) throws IgniteCheckedException; /** * Stores given key-value pair in cache. If filters are provided, then entries will * be stored in cache only if they pass the filter. Note that filter check is atomic, * so value stored in cache is guaranteed to be consistent with the filters. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * Unlike <code>#put(Object, Object, org.apache.ignite.lang.IgnitePredicate[])</code> method, it does not return previous * value and, therefore, does not have any overhead associated with returning of a value. It * should always be used whenever return value is not required. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Future for the put operation. Future will return {@code true} if optional filter * passed and value was stored in cache, {@code false} otherwise. Note that future will * return {@code true} if filter is not specified. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<Boolean> putAsync(K key, V val); /** * Stores given key-value pair in cache only if cache had no previous mapping for it. If cache * previously contained value for the given key, then this value is returned. * In case of {@link CacheMode#PARTITIONED} or {@link CacheMode#REPLICATED} caches, * the value will be loaded from the primary node, which in its turn may load the value * from the swap storage, and consecutively, if it's not in swap, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method {@link #putIfAbsent(Object, Object)} should * always be used instead of this one to avoid the overhead associated with returning of the * previous value. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Previously contained value regardless of whether put happened or not. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If put operation failed. */ @Nullable public V getAndPutIfAbsent(K key, V val) throws IgniteCheckedException; /** * Asynchronously stores given key-value pair in cache only if cache had no previous mapping for it. If cache * previously contained value for the given key, then this value is returned. In case of * {@link CacheMode#PARTITIONED} or {@link CacheMode#REPLICATED} caches, * the value will be loaded from the primary node, which in its turn may load the value * from the swap storage, and consecutively, if it's not in swap, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method {@link #putIfAbsentAsync(Object, Object)} should * always be used instead of this one to avoid the overhead associated with returning of the * previous value. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Future of put operation which will provide previously contained value * regardless of whether put happened or not. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<V> getAndPutIfAbsentAsync(K key, V val); /** * Stores given key-value pair in cache only if cache had no previous mapping for it. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * Unlike {@link #getAndPutIfAbsent(Object, Object)} method, it does not return previous * value and, therefore, does not have any overhead associated with returning of a value. It * should always be used whenever return value is not required. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return {@code true} if value is stored in cache and {@code false} otherwise. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If put operation failed. */ public boolean putIfAbsent(K key, V val) throws IgniteCheckedException; /** * Asynchronously stores given key-value pair in cache only if cache had no previous mapping for it. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * Unlike {@link #getAndPutIfAbsent(Object, Object)} method, it does not return previous * value and, therefore, does not have any overhead associated with returning of a value. It * should always be used whenever return value is not required. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Future for this put operation. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<Boolean> putIfAbsentAsync(K key, V val); /** * Stores given key-value pair in cache only if there is a previous mapping for it. * In case of {@link CacheMode#PARTITIONED} or {@link CacheMode#REPLICATED} caches, * the value will be loaded from the primary node, which in its turn may load the value * from the swap storage, and consecutively, if it's not in swap, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method {@link #replace(Object, Object)} should * always be used instead of this one to avoid the overhead associated with returning of the * previous value. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Previously contained value regardless of whether replace happened or not. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If replace operation failed. */ @Nullable public V getAndReplace(K key, V val) throws IgniteCheckedException; /** * Asynchronously stores given key-value pair in cache only if there is a previous mapping for it. If cache * previously contained value for the given key, then this value is returned.In case of * {@link CacheMode#PARTITIONED} caches, the value will be loaded from the primary node, * which in its turn may load the value from the swap storage, and consecutively, if it's not in swap, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method {@link #replace(Object, Object)} should * always be used instead of this one to avoid the overhead associated with returning of the * previous value. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Future for replace operation. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<V> getAndReplaceAsync(K key, V val); /** * Stores given key-value pair in cache only if only if there is a previous mapping for it. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * Unlike {@link #getAndReplace(Object, Object)} method, it does not return previous * value and, therefore, does not have any overhead associated with returning of a value. It * should always be used whenever return value is not required. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return {@code True} if replace happened, {@code false} otherwise. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If replace operation failed. */ public boolean replace(K key, V val) throws IgniteCheckedException; /** * Asynchronously stores given key-value pair in cache only if only if there is a previous mapping for it. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * Unlike {@link #getAndReplaceAsync(Object, Object)} method, it does not return previous * value and, therefore, does not have any overhead associated with returning of a value. It * should always be used whenever return value is not required. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param val Value to be associated with the given key. * @return Future for the replace operation. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<Boolean> replaceAsync(K key, V val); /** * Stores given key-value pair in cache only if only if the previous value is equal to the * {@code 'oldVal'} passed in. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param oldVal Old value to match. * @param newVal Value to be associated with the given key. * @return {@code True} if replace happened, {@code false} otherwise. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If replace operation failed. */ public boolean replace(K key, V oldVal, V newVal) throws IgniteCheckedException; /** * Asynchronously stores given key-value pair in cache only if only if the previous value is equal to the * {@code 'oldVal'} passed in. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via <code>CacheStore#put(Transaction, Object, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param oldVal Old value to match. * @param newVal Value to be associated with the given key. * @return Future for the replace operation. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<Boolean> replaceAsync(K key, V oldVal, V newVal); /** * Stores given key-value pairs in cache. If filters are provided, then entries will * be stored in cache only if they pass the filter. Note that filter check is atomic, * so value stored in cache is guaranteed to be consistent with the filters. * <p> * If write-through is enabled, the stored values will be persisted to {@link CacheStore} * via <code>CacheStore#putAll(Transaction, Map)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param m Key-value pairs to store in cache. * @throws IgniteCheckedException If put operation failed. */ public void putAll(@Nullable Map<? extends K, ? extends V> m) throws IgniteCheckedException; /** * Asynchronously stores given key-value pairs in cache. If filters are provided, then entries will * be stored in cache only if they pass the filter. Note that filter check is atomic, * so value stored in cache is guaranteed to be consistent with the filters. * <p> * If write-through is enabled, the stored values will be persisted to {@link CacheStore} * via <code>CacheStore#putAll(Transaction, Map)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param m Key-value pairs to store in cache. * @return Future for putAll operation. */ public IgniteInternalFuture<?> putAllAsync(@Nullable Map<? extends K, ? extends V> m); /** * Set of keys cached on this node. You can remove elements from this set, but you cannot add elements * to this set. All removal operation will be reflected on the cache itself. * <p> * Iterator over this set will not fail if set was concurrently updated * by another thread. This means that iterator may or may not return latest * keys depending on whether they were added before or after current * iterator position. * <p> * NOTE: this operation is not distributed and returns only the keys cached on this node. * * @return Key set for this cache projection. */ public Set<K> keySet(); /** * @return Set of keys including internal keys. */ public Set<K> keySetx(); /** * Set of keys for which this node is primary. * This set is dynamic and may change with grid topology changes. * Note that this set will contain mappings for all keys, even if their values are * {@code null} because they were invalidated. You can remove elements from * this set, but you cannot add elements to this set. All removal operation will be * reflected on the cache itself. * <p> * Iterator over this set will not fail if set was concurrently updated * by another thread. This means that iterator may or may not return latest * keys depending on whether they were added before or after current * iterator position. * <p> * NOTE: this operation is not distributed and returns only the keys cached on this node. * * @return Primary key set for the current node. */ public Set<K> primaryKeySet(); /** * Collection of values cached on this node. You can remove * elements from this collection, but you cannot add elements to this collection. * All removal operation will be reflected on the cache itself. * <p> * Iterator over this collection will not fail if collection was * concurrently updated by another thread. This means that iterator may or * may not return latest values depending on whether they were added before * or after current iterator position. * <p> * NOTE: this operation is not distributed and returns only the values cached on this node. * * @return Collection of cached values. */ public Collection<V> values(); /** * Gets set of all entries cached on this node. You can remove * elements from this set, but you cannot add elements to this set. * All removal operation will be reflected on the cache itself. * <p> * NOTE: this operation is not distributed and returns only the entries cached on this node. * * @return Entries that pass through key filter. */ public Set<Cache.Entry<K, V>> entrySet(); /** * Gets set containing cache entries that belong to provided partition or {@code null} * if partition is not found locally. * <p> * NOTE: this operation is not distributed and returns only the entries cached on this node. * * @param part Partition. * @return Set containing partition's entries or {@code null} if partition is * not found locally. */ @Nullable public Set<Cache.Entry<K, V>> entrySet(int part); /** * Starts new transaction with the specified concurrency and isolation. * * @param concurrency Concurrency. * @param isolation Isolation. * @return New transaction. * @throws IllegalStateException If transaction is already started by this thread. * @throws UnsupportedOperationException If cache is {@link CacheAtomicityMode#ATOMIC}. */ public Transaction txStart(TransactionConcurrency concurrency, TransactionIsolation isolation); /** * @param concurrency Concurrency. * @param isolation Isolation. * @return New transaction. */ public IgniteInternalTx txStartEx(TransactionConcurrency concurrency, TransactionIsolation isolation); /** * Starts transaction with specified isolation, concurrency, timeout, invalidation flag, * and number of participating entries. * * @param concurrency Concurrency. * @param isolation Isolation. * @param timeout Timeout. * @param txSize Number of entries participating in transaction (may be approximate). * @return New transaction. * @throws IllegalStateException If transaction is already started by this thread. * @throws UnsupportedOperationException If cache is {@link CacheAtomicityMode#ATOMIC}. */ public Transaction txStart(TransactionConcurrency concurrency, TransactionIsolation isolation, long timeout, int txSize); /** * Gets transaction started by this thread or {@code null} if this thread does * not have a transaction. * * @return Transaction started by this thread or {@code null} if this thread * does not have a transaction. */ @Nullable public Transaction tx(); /** * Evicts entry associated with given key from cache. Note, that entry will be evicted * only if it's not used (not participating in any locks or transactions). * <p> * If {@link org.apache.ignite.configuration.CacheConfiguration#isSwapEnabled()} is set to {@code true}, the evicted entry will * be swapped to offheap, and then to disk. * * @param key Key to evict from cache. * @return {@code True} if entry could be evicted, {@code false} otherwise. */ public boolean evict(K key); /** * Attempts to evict all entries associated with keys. Note, * that entry will be evicted only if it's not used (not * participating in any locks or transactions). * <p> * If {@link org.apache.ignite.configuration.CacheConfiguration#isSwapEnabled()} is set to {@code true}, the evicted entry will * be swapped to offheap, and then to disk. * * @param keys Keys to evict. */ public void evictAll(@Nullable Collection<? extends K> keys); /** * Clears all entries from this cache only if the entry is not * currently locked or participating in a transaction. * <p> * If {@link org.apache.ignite.configuration.CacheConfiguration#isSwapEnabled()} is set to {@code true}, the evicted entries will * also be cleared from swap. * <p> * Note that this operation is local as it merely clears * entries from local cache. It does not remove entries from * remote caches or from underlying persistent storage. */ public void clearLocally(); /** * Clears an entry from this cache and swap storage only if the entry * is not currently locked, and is not participating in a transaction. * <p> * If {@link org.apache.ignite.configuration.CacheConfiguration#isSwapEnabled()} is set to {@code true}, the evicted entries will * also be cleared from swap. * <p> * Note that this operation is local as it merely clears * an entry from local cache. It does not remove entries from * remote caches or from underlying persistent storage. * * @param key Key to clearLocally. * @return {@code True} if entry was successfully cleared from cache, {@code false} * if entry was in use at the time of this method invocation and could not be * cleared. */ public boolean clearLocally(K key); /** * Clears entries from this cache and swap storage only if the entry * is not currently locked, and is not participating in a transaction. * <p> * If {@link org.apache.ignite.configuration.CacheConfiguration#isSwapEnabled()} is set to {@code true}, the evicted entries will * also be cleared from swap. * <p> * Note that this operation is local as it merely clears * an entry from local cache. It does not remove entries from * remote caches or from underlying persistent storage. * * @param keys Keys to clearLocally. */ public void clearLocallyAll(Set<? extends K> keys); /** * Clears key on all nodes that store it's data. That is, caches are cleared on remote * nodes and local node, as opposed to {@link IgniteInternalCache#clearLocally(Object)} method which only * clears local node's cache. * <p> * Ignite will make the best attempt to clear caches on all nodes. If some caches * could not be cleared, then exception will be thrown. * * @param key Key to clear. * @throws IgniteCheckedException In case of cache could not be cleared on any of the nodes. */ public void clear(K key) throws IgniteCheckedException; /** * Clears keys on all nodes that store it's data. That is, caches are cleared on remote * nodes and local node, as opposed to {@link IgniteInternalCache#clearLocallyAll(Set)} method which only * clears local node's cache. * <p> * Ignite will make the best attempt to clear caches on all nodes. If some caches * could not be cleared, then exception will be thrown. * * @param keys Keys to clear. * @throws IgniteCheckedException In case of cache could not be cleared on any of the nodes. */ public void clearAll(Set<? extends K> keys) throws IgniteCheckedException; /** * Clears cache on all nodes that store it's data. That is, caches are cleared on remote * nodes and local node, as opposed to {@link IgniteInternalCache#clearLocally()} method which only * clears local node's cache. * <p> * Ignite will make the best attempt to clear caches on all nodes. If some caches * could not be cleared, then exception will be thrown. * <p> * * @throws IgniteCheckedException In case of cache could not be cleared on any of the nodes. */ public void clear() throws IgniteCheckedException; /** * @return Clear future. */ public IgniteInternalFuture<?> clearAsync(); /** * @param key Key to clear. * @return Clear future. */ public IgniteInternalFuture<?> clearAsync(K key); /** * @param keys Keys to clear. * @return Clear future. */ public IgniteInternalFuture<?> clearAsync(Set<? extends K> keys); /** * Removes given key mapping from cache. If cache previously contained value for the given key, * then this value is returned. In case of {@link CacheMode#PARTITIONED} or {@link CacheMode#REPLICATED} * caches, the value will be loaded from the primary node, which in its turn may load the value * from the disk-based swap storage, and consecutively, if it's not in swap, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method <code>#removex(Object, org.apache.ignite.lang.IgnitePredicate[])</code> should * always be used instead of this one to avoid the overhead associated with returning of the * previous value. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via <code>CacheStore#remove(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @return Previous value associated with specified key, or {@code null} * if there was no value for this key. * @throws NullPointerException If key is {@code null}. * @throws IgniteCheckedException If remove operation failed. */ @Nullable public V getAndRemove(K key) throws IgniteCheckedException; /** * Asynchronously removes given key mapping from cache. If cache previously contained value for the given key, * then this value is returned. In case of {@link CacheMode#PARTITIONED} or {@link CacheMode#REPLICATED} * caches, the value will be loaded from the primary node, which in its turn may load the value * from the swap storage, and consecutively, if it's not in swap, * from the underlying persistent storage. If value has to be loaded from persistent * storage, <code>CacheStore#load(Transaction, Object)</code> method will be used. * <p> * If the returned value is not needed, method <code>#removex(Object, org.apache.ignite.lang.IgnitePredicate[])</code> should * always be used instead of this one to avoid the overhead associated with returning of the * previous value. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via <code>CacheStore#remove(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @return Future for the remove operation. * @throws NullPointerException if the key is {@code null}. */ public IgniteInternalFuture<V> getAndRemoveAsync(K key); /** * Removes given key mapping from cache. * <p> * This method will return {@code true} if remove did occur, which means that all optionally * provided filters have passed and there was something to remove, {@code false} otherwise. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via <code>CacheStore#remove(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @return {@code True} if filter passed validation and entry was removed, {@code false} otherwise. * Note that if filter is not specified, this method will return {@code true}. * @throws NullPointerException if the key is {@code null}. * @throws IgniteCheckedException If remove failed. */ public boolean remove(K key) throws IgniteCheckedException; /** * Asynchronously removes given key mapping from cache. * <p> * This method will return {@code true} if remove did occur, which means that all optionally * provided filters have passed and there was something to remove, {@code false} otherwise. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via <code>CacheStore#remove(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @return Future for the remove operation. The future will return {@code true} * if optional filters passed validation and remove did occur, {@code false} otherwise. * Note that if filter is not specified, this method will return {@code true}. * @throws NullPointerException if the key is {@code null}. */ public IgniteInternalFuture<Boolean> removeAsync(K key); /** * Removes given key mapping from cache if one exists and value is equal to the passed in value. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via <code>CacheStore#remove(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @param val Value to match against currently cached value. * @return {@code True} if entry was removed and passed in value matched the cached one, * {@code false} otherwise. * @throws NullPointerException if the key or value is {@code null}. * @throws IgniteCheckedException If remove failed. */ public boolean remove(K key, V val) throws IgniteCheckedException; /** * Asynchronously removes given key mapping from cache if one exists and value is equal to the passed in value. * <p> * This method will return {@code true} if remove did occur, which means that all optionally * provided filters have passed and there was something to remove, {@code false} otherwise. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via <code>CacheStore#remove(Transaction, Object)</code> method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @param val Value to match against currently cached value. * @return Future for the remove operation. The future will return {@code true} * if currently cached value will match the passed in one. * @throws NullPointerException if the key or value is {@code null}. */ public IgniteInternalFuture<Boolean> removeAsync(K key, V val); /** * Removes given key mappings from cache. * <p> * If write-through is enabled, the values will be removed from {@link CacheStore} via {@link IgniteDataStreamer}. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param keys Keys whose mappings are to be removed from cache. * @throws IgniteCheckedException If remove failed. */ public void removeAll(@Nullable Collection<? extends K> keys) throws IgniteCheckedException; /** * Asynchronously removes given key mappings from cache for entries. * <p> * If write-through is enabled, the values will be removed from {@link CacheStore} via {@link IgniteDataStreamer}. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param keys Keys whose mappings are to be removed from cache. * @return Future for the remove operation. The future will complete whenever * remove operation completes. */ public IgniteInternalFuture<?> removeAllAsync(@Nullable Collection<? extends K> keys); /** * Removes mappings from cache. * <p> * <b>USE WITH CARE</b> - if your cache has many entries then transaction will quickly become very heavy and slow. * <p> * If write-through is enabled, the values will be removed from {@link CacheStore} via {@link IgniteDataStreamer}. * <h2 class="header">Transactions</h2> * This method is not transactional. * * @throws IgniteCheckedException If remove failed. */ public void removeAll() throws IgniteCheckedException; /** * @return Remove future. */ public IgniteInternalFuture<?> removeAllAsync(); /** * Synchronously acquires lock on a cached object with given * key only if the passed in filter (if any) passes. This method * together with filter check will be executed as one atomic operation. * <h2 class="header">Transactions</h2> * Locks are not transactional and should not be used from within transactions. If you do * need explicit locking within transaction, then you should use * {@link TransactionConcurrency#PESSIMISTIC} concurrency control for transaction * which will acquire explicit locks for relevant cache operations. * * @param key Key to lock. * @param timeout Timeout in milliseconds to wait for lock to be acquired * ({@code '0'} for no expiration), {@code -1} for immediate failure if * lock cannot be acquired immediately). * @return {@code True} if all filters passed and lock was acquired, * {@code false} otherwise. * @throws IgniteCheckedException If lock acquisition resulted in error. */ public boolean lock(K key, long timeout) throws IgniteCheckedException; /** * Asynchronously acquires lock on a cached object with given * key only if the passed in filter (if any) passes. This method * together with filter check will be executed as one atomic operation. * <h2 class="header">Transactions</h2> * Locks are not transactional and should not be used from within transactions. If you do * need explicit locking within transaction, then you should use * {@link TransactionConcurrency#PESSIMISTIC} concurrency control for transaction * which will acquire explicit locks for relevant cache operations. * * @param key Key to lock. * @param timeout Timeout in milliseconds to wait for lock to be acquired * ({@code '0'} for no expiration, {@code -1} for immediate failure if * lock cannot be acquired immediately). * @return Future for the lock operation. The future will return {@code true} * whenever all filters pass and locks are acquired before timeout is expired, * {@code false} otherwise. */ public IgniteInternalFuture<Boolean> lockAsync(K key, long timeout); /** * All or nothing synchronous lock for passed in keys. This method * together with filter check will be executed as one atomic operation. * If at least one filter validation failed, no locks will be acquired. * <h2 class="header">Transactions</h2> * Locks are not transactional and should not be used from within transactions. If you do * need explicit locking within transaction, then you should use * {@link TransactionConcurrency#PESSIMISTIC} concurrency control for transaction * which will acquire explicit locks for relevant cache operations. * * @param keys Keys to lock. * @param timeout Timeout in milliseconds to wait for lock to be acquired * ({@code '0'} for no expiration). * @return {@code True} if all filters passed and locks were acquired before * timeout has expired, {@code false} otherwise. * @throws IgniteCheckedException If lock acquisition resulted in error. */ public boolean lockAll(@Nullable Collection<? extends K> keys, long timeout) throws IgniteCheckedException; /** * All or nothing synchronous lock for passed in keys. This method * together with filter check will be executed as one atomic operation. * If at least one filter validation failed, no locks will be acquired. * <h2 class="header">Transactions</h2> * Locks are not transactional and should not be used from within transactions. If you do * need explicit locking within transaction, then you should use * {@link TransactionConcurrency#PESSIMISTIC} concurrency control for transaction * which will acquire explicit locks for relevant cache operations. * * @param keys Keys to lock. * @param timeout Timeout in milliseconds to wait for lock to be acquired * ({@code '0'} for no expiration). * @return Future for the collection of locks. The future will return * {@code true} if all filters passed and locks were acquired before * timeout has expired, {@code false} otherwise. */ public IgniteInternalFuture<Boolean> lockAllAsync(@Nullable Collection<? extends K> keys, long timeout); /** * Unlocks given key only if current thread owns the lock. If optional filter * will not pass, then unlock will not happen. If the key being unlocked was * never locked by current thread, then this method will do nothing. * <h2 class="header">Transactions</h2> * Locks are not transactional and should not be used from within transactions. If you do * need explicit locking within transaction, then you should use * {@link TransactionConcurrency#PESSIMISTIC} concurrency control for transaction * which will acquire explicit locks for relevant cache operations. * * @param key Key to unlock. * @throws IgniteCheckedException If unlock execution resulted in error. */ public void unlock(K key) throws IgniteCheckedException; /** * Unlocks given keys only if current thread owns the locks. Only the keys * that have been locked by calling thread and pass through the filter (if any) * will be unlocked. If none of the key locks is owned by current thread, then * this method will do nothing. * <h2 class="header">Transactions</h2> * Locks are not transactional and should not be used from within transactions. If you do * need explicit locking within transaction, then you should use * {@link TransactionConcurrency#PESSIMISTIC} concurrency control for transaction * which will acquire explicit locks for relevant cache operations. * * @param keys Keys to unlock. * @throws IgniteCheckedException If unlock execution resulted in error. */ public void unlockAll(@Nullable Collection<? extends K> keys) throws IgniteCheckedException; /** * Checks if any node owns a lock for this key. * <p> * This is a local in-VM operation and does not involve any network trips * or access to persistent storage in any way. * * @param key Key to check. * @return {@code True} if lock is owned by some node. */ public boolean isLocked(K key); /** * Checks if current thread owns a lock on this key. * <p> * This is a local in-VM operation and does not involve any network trips * or access to persistent storage in any way. * * @param key Key to check. * @return {@code True} if key is locked by current thread. */ public boolean isLockedByThread(K key); /** * Gets the number of all entries cached on this node. This method will return the count of * all cache entries and has O(1) complexity on base {@link IgniteInternalCache}. It is essentially the * size of cache key set and is semantically identical to {{@code Cache.keySet().size()}. * <p> * NOTE: this operation is not distributed and returns only the number of entries cached on this node. * * @return Size of cache on this node. */ public int size(); /** * @param peekModes Peek modes. * @return Local cache size. * @throws IgniteCheckedException If failed. */ public int localSize(CachePeekMode[] peekModes) throws IgniteCheckedException; /** * @param peekModes Peek modes. * @return Global cache size. * @throws IgniteCheckedException If failed. */ public int size(CachePeekMode[] peekModes) throws IgniteCheckedException; /** * @param peekModes Peek modes. * @return Future. */ public IgniteInternalFuture<Integer> sizeAsync(CachePeekMode[] peekModes); /** * Gets size of near cache key set. This method will return count of all entries in near * cache and has O(1) complexity on base cache projection. * <p> * Note that for {@code LOCAL} non-distributed caches this method will always return {@code 0} * * @return Size of near cache key set or {@code 0} if cache is not {@link CacheMode#PARTITIONED}. */ public int nearSize(); /** * Gets the number of all primary entries cached on this node. For {@link CacheMode#LOCAL} non-distributed * cache mode, this method is identical to {@link #size()}. * <p> * For {@link CacheMode#PARTITIONED} and {@link CacheMode#REPLICATED} modes, this method will * return number of primary entries cached on this node (excluding any backups). The complexity of * this method is O(P), where P is the total number of partitions. * <p> * NOTE: this operation is not distributed and returns only the number of primary entries cached on this node. * * @return Number of primary entries in cache. */ public int primarySize(); /** * This method unswaps cache entries by given keys, if any, from swap storage * into memory. * <h2 class="header">Transactions</h2> * This method is not transactional. * * @param keys Keys to promote entries for. * @throws IgniteCheckedException If promote failed. */ public void promoteAll(@Nullable Collection<? extends K> keys) throws IgniteCheckedException; /** * Gets configuration bean for this cache. * * @return Configuration bean for this cache. */ public CacheConfiguration configuration(); /** * Gets affinity service to provide information about data partitioning * and distribution. * * @return Cache data affinity service. */ public Affinity<K> affinity(); /** * Gets metrics (statistics) for this cache. * * @return Cache metrics. */ public CacheMetrics metrics(); /** * Gets metrics (statistics) for this cache. * * @return Cache metrics. */ public CacheMetricsMXBean mxBean(); /** * Gets size (in bytes) of all entries swapped to disk. * * @return Size (in bytes) of all entries swapped to disk. * @throws IgniteCheckedException In case of error. */ public long overflowSize() throws IgniteCheckedException; /** * Gets number of cache entries stored in off-heap memory. * * @return Number of cache entries stored in off-heap memory. */ public long offHeapEntriesCount(); /** * Gets memory size allocated in off-heap. * * @return Allocated memory size. */ public long offHeapAllocatedSize(); /** * Gets size in bytes for swap space. * * @return Size in bytes. * @throws IgniteCheckedException If failed. */ public long swapSize() throws IgniteCheckedException; /** * Gets number of swap entries (keys). * * @return Number of entries stored in swap. * @throws IgniteCheckedException If failed. */ public long swapKeys() throws IgniteCheckedException; /** * Forces this cache node to re-balance its partitions. This method is usually used when * {@link CacheConfiguration#getRebalanceDelay()} configuration parameter has non-zero value. * When many nodes are started or stopped almost concurrently, it is more efficient to delay * rebalancing until the node topology is stable to make sure that no redundant re-partitioning * happens. * <p> * In case of{@link CacheMode#PARTITIONED} caches, for better efficiency user should * usually make sure that new nodes get placed on the same place of consistent hash ring as * the left nodes, and that nodes are restarted before * {@link CacheConfiguration#getRebalanceDelay() rebalanceDelay} expires. To place nodes * on the same place in consistent hash ring, use * {@link IgniteConfiguration#setConsistentId(Serializable)} to make sure that * a node maps to the same hash ID if re-started. * <p> * See {@link org.apache.ignite.configuration.CacheConfiguration#getRebalanceDelay()} for more information on how to configure * rebalance re-partition delay. * <p> * @return Future that will be completed when rebalancing is finished. */ public IgniteInternalFuture<?> rebalance(); /** * Creates projection for specified subject ID. * * @param subjId Client ID. * @return Internal projection. */ IgniteInternalCache<K, V> forSubjectId(UUID subjId); /** * Store DR data. * * @param drMap DR map. * @throws IgniteCheckedException If put operation failed. */ public void putAllConflict(Map<KeyCacheObject, GridCacheDrInfo> drMap) throws IgniteCheckedException; /** * Store DR data asynchronously. * * @param drMap DR map. * @return Future. * @throws IgniteCheckedException If put operation failed. */ public IgniteInternalFuture<?> putAllConflictAsync(Map<KeyCacheObject, GridCacheDrInfo> drMap) throws IgniteCheckedException; /** * Removes DR data. * * @param drMap DR map. * @throws IgniteCheckedException If remove failed. */ public void removeAllConflict(Map<KeyCacheObject, GridCacheVersion> drMap) throws IgniteCheckedException; /** * Removes DR data asynchronously. * * @param drMap DR map. * @return Future. * @throws IgniteCheckedException If remove failed. */ public IgniteInternalFuture<?> removeAllConflictAsync(Map<KeyCacheObject, GridCacheVersion> drMap) throws IgniteCheckedException; /** * Asynchronously stores given key-value pair in cache only if only if the previous value is equal to the * {@code 'oldVal'} passed in. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via {@link CacheStore#write(javax.cache.Cache.Entry)} method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param oldVal Old value to match. * @param newVal Value to be associated with the given key. * @return Future for the replace operation. The future will return object containing actual old value and success * flag. * @throws NullPointerException If either key or value are {@code null}. */ public IgniteInternalFuture<GridCacheReturn> replacexAsync(K key, V oldVal, V newVal); /** * Stores given key-value pair in cache only if only if the previous value is equal to the * {@code 'oldVal'} passed in. * <p> * This method will return {@code true} if value is stored in cache and {@code false} otherwise. * <p> * If write-through is enabled, the stored value will be persisted to {@link CacheStore} * via {@link CacheStore#write(javax.cache.Cache.Entry)} method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key to store in cache. * @param oldVal Old value to match. * @param newVal Value to be associated with the given key. * @return Object containing actual old value and success flag. * @throws NullPointerException If either key or value are {@code null}. * @throws IgniteCheckedException If replace operation failed. */ public GridCacheReturn replacex(K key, V oldVal, V newVal) throws IgniteCheckedException; /** * Removes given key mapping from cache if one exists and value is equal to the passed in value. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via {@link CacheStore#delete(Object)} method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @param val Value to match against currently cached value. * @return Object containing actual old value and success flag. * @throws NullPointerException if the key or value is {@code null}. * @throws IgniteCheckedException If remove failed. */ public GridCacheReturn removex(K key, V val) throws IgniteCheckedException; /** * Asynchronously removes given key mapping from cache if one exists and value is equal to the passed in value. * <p> * This method will return {@code true} if remove did occur, which means that all optionally * provided filters have passed and there was something to remove, {@code false} otherwise. * <p> * If write-through is enabled, the value will be removed from {@link CacheStore} * via {@link CacheStore#delete(Object)} method. * <h2 class="header">Transactions</h2> * This method is transactional and will enlist the entry into ongoing transaction * if there is one. * * @param key Key whose mapping is to be removed from cache. * @param val Value to match against currently cached value. * @return Future for the remove operation. The future will return object containing actual old value and success * flag. * @throws NullPointerException if the key or value is {@code null}. */ public IgniteInternalFuture<GridCacheReturn> removexAsync(K key, V val); /** * Gets value from cache. Will go to primary node even if this is a backup. * * @param key Key to get value for. * @return Value. * @throws IgniteCheckedException If failed. */ @Nullable public V getForcePrimary(K key) throws IgniteCheckedException; /** * Asynchronously gets value from cache. Will go to primary node even if this is a backup. * * @param key Key to get value for. * @return Future with result. */ public IgniteInternalFuture<V> getForcePrimaryAsync(K key); /** * Gets values from cache. Will bypass started transaction, if any, i.e. will not enlist entries * and will not lock any keys if pessimistic transaction is started by thread. * * @param keys Keys to get values for. * @return Value. * @throws IgniteCheckedException If failed. */ public Map<K, V> getAllOutTx(Set<? extends K> keys) throws IgniteCheckedException; /** * Gets values from cache. Will bypass started transaction, if any, i.e. will not enlist entries * and will not lock any keys if pessimistic transaction is started by thread. * * @param keys Keys to get values for. * @return Future for getAllOutTx operation. */ public IgniteInternalFuture<Map<K, V>> getAllOutTxAsync(Set<? extends K> keys); /** * Checks whether this cache is IGFS data cache. * * @return {@code True} in case this cache is IGFS data cache. */ public boolean isIgfsDataCache(); /** * Get current amount of used IGFS space in bytes. * * @return Amount of used IGFS space in bytes. */ public long igfsDataSpaceUsed(); /** * Get maximum space available for IGFS. * * @return Amount of space available for IGFS in bytes. */ public long igfsDataSpaceMax(); /** * Checks whether this cache is Mongo data cache. * * @return {@code True} if this cache is mongo data cache. */ public boolean isMongoDataCache(); /** * Checks whether this cache is Mongo meta cache. * * @return {@code True} if this cache is mongo meta cache. */ public boolean isMongoMetaCache(); /** * Gets entry set containing internal entries. * * @param filter Filter. * @return Entry set. */ public Set<Cache.Entry<K, V>> entrySetx(CacheEntryPredicate... filter); /** * @return {@link javax.cache.expiry.ExpiryPolicy} associated with this projection. */ @Nullable public ExpiryPolicy expiry(); /** * @param plc {@link ExpiryPolicy} to associate with this projection. * @return New projection based on this one, but with the specified expiry policy. */ public IgniteInternalCache<K, V> withExpiryPolicy(ExpiryPolicy plc); /** * @param key Key. * @param entryProcessor Entry processor. * @param args Arguments. * @return Invoke result. * @throws IgniteCheckedException If failed. */ @Nullable public <T> EntryProcessorResult<T> invoke(K key, EntryProcessor<K, V, T> entryProcessor, Object... args) throws IgniteCheckedException; /** * @param key Key. * @param entryProcessor Entry processor. * @param args Arguments. * @return Future. */ public <T> IgniteInternalFuture<EntryProcessorResult<T>> invokeAsync(K key, EntryProcessor<K, V, T> entryProcessor, Object... args); /** * @param keys Keys. * @param entryProcessor Entry processor. * @param args Arguments. * @return Invoke results. * @throws IgniteCheckedException If failed. */ public <T> Map<K, EntryProcessorResult<T>> invokeAll(Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor, Object... args) throws IgniteCheckedException; /** * @param keys Keys. * @param entryProcessor Entry processor. * @param args Arguments. * @return Future. */ public <T> IgniteInternalFuture<Map<K, EntryProcessorResult<T>>> invokeAllAsync(Set<? extends K> keys, EntryProcessor<K, V, T> entryProcessor, Object... args); /** * @param map Map containing keys and entry processors to be applied to values. * @param args Arguments. * @return Invoke results. * @throws IgniteCheckedException If failed. */ public <T> Map<K, EntryProcessorResult<T>> invokeAll( Map<? extends K, ? extends EntryProcessor<K, V, T>> map, Object... args) throws IgniteCheckedException; /** * @param map Map containing keys and entry processors to be applied to values. * @param args Arguments. * @return Future. */ public <T> IgniteInternalFuture<Map<K, EntryProcessorResult<T>>> invokeAllAsync( Map<? extends K, ? extends EntryProcessor<K, V, T>> map, Object... args); /** * @return Context. */ public GridCacheContext<K, V> context(); /** * Delegates to {@link CacheStore#loadCache(org.apache.ignite.lang.IgniteBiInClosure,Object...)} method * to load state from the underlying persistent storage. The loaded values * will then be given to the optionally passed in predicate, and, if the predicate returns * {@code true}, will be stored in cache. If predicate is {@code null}, then * all loaded values will be stored in cache. * <p> * Note that this method does not receive keys as a parameter, so it is up to * {@link CacheStore} implementation to provide all the data to be loaded. * <p> * This method is not transactional and may end up loading a stale value into * cache if another thread has updated the value immediately after it has been * loaded. It is mostly useful when pre-loading the cache from underlying * data store before start, or for read-only caches. * * @param p Optional predicate (may be {@code null}). If provided, will be used to * filter values to be put into cache. * @param args Optional user arguments to be passed into * {@link CacheStore#loadCache(org.apache.ignite.lang.IgniteBiInClosure, Object...)} method. * @throws IgniteCheckedException If loading failed. */ public void localLoadCache(@Nullable IgniteBiPredicate<K, V> p, @Nullable Object... args) throws IgniteCheckedException; /** * Asynchronously delegates to {@link CacheStore#loadCache(org.apache.ignite.lang.IgniteBiInClosure, Object...)} method * to reload state from the underlying persistent storage. The reloaded values * will then be given to the optionally passed in predicate, and if the predicate returns * {@code true}, will be stored in cache. If predicate is {@code null}, then * all reloaded values will be stored in cache. * <p> * Note that this method does not receive keys as a parameter, so it is up to * {@link CacheStore} implementation to provide all the data to be loaded. * <p> * This method is not transactional and may end up loading a stale value into * cache if another thread has updated the value immediately after it has been * loaded. It is mostly useful when pre-loading the cache from underlying * data store before start, or for read-only caches. * * @param p Optional predicate (may be {@code null}). If provided, will be used to * filter values to be put into cache. * @param args Optional user arguments to be passed into * {@link CacheStore#loadCache(org.apache.ignite.lang.IgniteBiInClosure,Object...)} method. * @return Future to be completed whenever loading completes. */ public IgniteInternalFuture<?> localLoadCacheAsync(@Nullable IgniteBiPredicate<K, V> p, @Nullable Object... args); }
apache-2.0
raviagarwal7/buck
src/com/facebook/buck/rules/CachingBuildEngineDelegate.java
1264
/* * Copyright 2016-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.rules; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.util.cache.FileHashCache; import com.google.common.cache.LoadingCache; /** * Functionality used in the {@link CachingBuildEngine} when running a distributed build. */ public interface CachingBuildEngineDelegate { LoadingCache<ProjectFilesystem, FileHashCache> createFileHashCacheLoader(); /** * Called right before the rule is going to be built. This is when direct inputs to the rule would * get materialized on disk. * * @param buildRule rule that is about to be built. */ void onRuleAboutToBeBuilt(BuildRule buildRule); }
apache-2.0
akuznetsov-gridgain/ignite
modules/core/src/main/java/org/apache/ignite/spi/discovery/tcp/messages/TcpDiscoveryDuplicateIdMessage.java
1924
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.spi.discovery.tcp.messages; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.spi.discovery.tcp.internal.*; import java.util.*; /** * Message telling joining node that new topology already contain * different node with same ID. */ public class TcpDiscoveryDuplicateIdMessage extends TcpDiscoveryAbstractMessage { /** */ private static final long serialVersionUID = 0L; /** Node with duplicate ID. */ private final TcpDiscoveryNode node; /** * Constructor. * * @param creatorNodeId Creator node ID. * @param node Node with same ID. */ public TcpDiscoveryDuplicateIdMessage(UUID creatorNodeId, TcpDiscoveryNode node) { super(creatorNodeId); assert node != null; this.node = node; } /** * @return Node with duplicate ID. */ public TcpDiscoveryNode node() { return node; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(TcpDiscoveryDuplicateIdMessage.class, this, "super", super.toString()); } }
apache-2.0
akuznetsov-gridgain/ignite
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/IgniteCacheInvokeAbstractTest.java
17910
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache; import org.apache.ignite.*; import org.apache.ignite.cache.*; import org.apache.ignite.configuration.*; import org.apache.ignite.internal.util.typedef.internal.*; import org.apache.ignite.lang.*; import org.apache.ignite.testframework.*; import org.apache.ignite.transactions.*; import org.jetbrains.annotations.*; import javax.cache.processor.*; import java.util.*; import java.util.concurrent.*; import static org.apache.ignite.cache.CacheAtomicityMode.*; import static org.apache.ignite.cache.CacheMode.*; import static org.apache.ignite.transactions.TransactionConcurrency.*; import static org.apache.ignite.transactions.TransactionIsolation.*; /** * */ public abstract class IgniteCacheInvokeAbstractTest extends IgniteCacheAbstractTest { /** */ private Integer lastKey = 0; /** * @throws Exception If failed. */ public void testInvoke() throws Exception { IgniteCache<Integer, Integer> cache = jcache(); invoke(cache, null); if (atomicityMode() == TRANSACTIONAL) { invoke(cache, PESSIMISTIC); invoke(cache, OPTIMISTIC); } } /** * @param cache Cache. * @param txMode Not null transaction concurrency mode if explicit transaction should be started. * @throws Exception If failed. */ private void invoke(final IgniteCache<Integer, Integer> cache, @Nullable TransactionConcurrency txMode) throws Exception { IncrementProcessor incProcessor = new IncrementProcessor(); for (final Integer key : keys()) { log.info("Test invoke [key=" + key + ", txMode=" + txMode + ']'); cache.remove(key); Transaction tx = startTx(txMode); Integer res = cache.invoke(key, incProcessor); if (tx != null) tx.commit(); assertEquals(-1, (int)res); checkValue(key, 1); tx = startTx(txMode); res = cache.invoke(key, incProcessor); if (tx != null) tx.commit(); assertEquals(1, (int)res); checkValue(key, 2); tx = startTx(txMode); res = cache.invoke(key, incProcessor); if (tx != null) tx.commit(); assertEquals(2, (int)res); checkValue(key, 3); tx = startTx(txMode); res = cache.invoke(key, new ArgumentsSumProcessor(), 10, 20, 30); if (tx != null) tx.commit(); assertEquals(3, (int)res); checkValue(key, 63); tx = startTx(txMode); String strRes = cache.invoke(key, new ToStringProcessor()); if (tx != null) tx.commit(); assertEquals("63", strRes); checkValue(key, 63); tx = startTx(txMode); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { cache.invoke(key, new ExceptionProcessor(63)); return null; } }, EntryProcessorException.class, "Test processor exception."); if (tx != null) tx.commit(); checkValue(key, 63); IgniteCache<Integer, Integer> asyncCache = cache.withAsync(); assertTrue(asyncCache.isAsync()); assertNull(asyncCache.invoke(key, incProcessor)); IgniteFuture<Integer> fut = asyncCache.future(); assertNotNull(fut); assertEquals(63, (int)fut.get()); checkValue(key, 64); tx = startTx(txMode); assertNull(cache.invoke(key, new RemoveProcessor(64))); if (tx != null) tx.commit(); checkValue(key, null); } } /** * @throws Exception If failed. */ public void testInvokeAll() throws Exception { IgniteCache<Integer, Integer> cache = jcache(); invokeAll(cache, null); if (atomicityMode() == TRANSACTIONAL) { invokeAll(cache, PESSIMISTIC); invokeAll(cache, OPTIMISTIC); } } /** * @param cache Cache. * @param txMode Not null transaction concurrency mode if explicit transaction should be started. * @throws Exception If failed. */ private void invokeAll(IgniteCache<Integer, Integer> cache, @Nullable TransactionConcurrency txMode) throws Exception { invokeAll(cache, new HashSet<>(primaryKeys(cache, 3, 0)), txMode); if (gridCount() > 1) { invokeAll(cache, new HashSet<>(backupKeys(cache, 3, 0)), txMode); invokeAll(cache, new HashSet<>(nearKeys(cache, 3, 0)), txMode); Set<Integer> keys = new HashSet<>(); keys.addAll(primaryKeys(jcache(0), 3, 0)); keys.addAll(primaryKeys(jcache(1), 3, 0)); keys.addAll(primaryKeys(jcache(2), 3, 0)); invokeAll(cache, keys, txMode); } Set<Integer> keys = new HashSet<>(); for (int i = 0; i < 1000; i++) keys.add(i); invokeAll(cache, keys, txMode); } /** * @param cache Cache. * @param keys Keys. * @param txMode Not null transaction concurrency mode if explicit transaction should be started. * @throws Exception If failed. */ private void invokeAll(IgniteCache<Integer, Integer> cache, Set<Integer> keys, @Nullable TransactionConcurrency txMode) throws Exception { cache.removeAll(keys); log.info("Test invokeAll [keys=" + keys + ", txMode=" + txMode + ']'); IncrementProcessor incProcessor = new IncrementProcessor(); Transaction tx = startTx(txMode); Map<Integer, EntryProcessorResult<Integer>> resMap = cache.invokeAll(keys, incProcessor); if (tx != null) tx.commit(); Map<Object, Object> exp = new HashMap<>(); for (Integer key : keys) exp.put(key, -1); checkResult(resMap, exp); for (Integer key : keys) checkValue(key, 1); tx = startTx(txMode); resMap = cache.invokeAll(keys, incProcessor); if (tx != null) tx.commit(); exp = new HashMap<>(); for (Integer key : keys) exp.put(key, 1); checkResult(resMap, exp); for (Integer key : keys) checkValue(key, 2); tx = startTx(txMode); resMap = cache.invokeAll(keys, new ArgumentsSumProcessor(), 10, 20, 30); if (tx != null) tx.commit(); for (Integer key : keys) exp.put(key, 3); checkResult(resMap, exp); for (Integer key : keys) checkValue(key, 62); tx = startTx(txMode); resMap = cache.invokeAll(keys, new ExceptionProcessor(null)); if (tx != null) tx.commit(); for (Integer key : keys) { final EntryProcessorResult<Integer> res = resMap.get(key); assertNotNull("No result for " + key); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { res.get(); return null; } }, EntryProcessorException.class, "Test processor exception."); } for (Integer key : keys) checkValue(key, 62); tx = startTx(txMode); Map<Integer, EntryProcessor<Integer, Integer, Integer>> invokeMap = new HashMap<>(); for (Integer key : keys) { switch (key % 4) { case 0: invokeMap.put(key, new IncrementProcessor()); break; case 1: invokeMap.put(key, new RemoveProcessor(62)); break; case 2: invokeMap.put(key, new ArgumentsSumProcessor()); break; case 3: invokeMap.put(key, new ExceptionProcessor(62)); break; default: fail(); } } resMap = cache.invokeAll(invokeMap, 10, 20, 30); if (tx != null) tx.commit(); for (Integer key : keys) { final EntryProcessorResult<Integer> res = resMap.get(key); switch (key % 4) { case 0: { assertNotNull("No result for " + key, res); assertEquals(62, (int)res.get()); checkValue(key, 63); break; } case 1: { assertNull(res); checkValue(key, null); break; } case 2: { assertNotNull("No result for " + key, res); assertEquals(3, (int)res.get()); checkValue(key, 122); break; } case 3: { assertNotNull("No result for " + key, res); GridTestUtils.assertThrows(log, new Callable<Void>() { @Override public Void call() throws Exception { res.get(); return null; } }, EntryProcessorException.class, "Test processor exception."); checkValue(key, 62); break; } } } cache.invokeAll(keys, new IncrementProcessor()); tx = startTx(txMode); resMap = cache.invokeAll(keys, new RemoveProcessor(null)); if (tx != null) tx.commit(); assertEquals("Unexpected results: " + resMap, 0, resMap.size()); for (Integer key : keys) checkValue(key, null); IgniteCache<Integer, Integer> asyncCache = cache.withAsync(); assertTrue(asyncCache.isAsync()); assertNull(asyncCache.invokeAll(keys, new IncrementProcessor())); IgniteFuture<Map<Integer, EntryProcessorResult<Integer>>> fut = asyncCache.future(); resMap = fut.get(); exp = new HashMap<>(); for (Integer key : keys) exp.put(key, -1); checkResult(resMap, exp); for (Integer key : keys) checkValue(key, 1); invokeMap = new HashMap<>(); for (Integer key : keys) invokeMap.put(key, incProcessor); assertNull(asyncCache.invokeAll(invokeMap)); fut = asyncCache.future(); resMap = fut.get(); for (Integer key : keys) exp.put(key, 1); checkResult(resMap, exp); for (Integer key : keys) checkValue(key, 2); } /** * @param resMap Result map. * @param exp Expected results. */ private void checkResult(Map<Integer, EntryProcessorResult<Integer>> resMap, Map<Object, Object> exp) { assertNotNull(resMap); assertEquals(exp.size(), resMap.size()); for (Map.Entry<Object, Object> expVal : exp.entrySet()) { EntryProcessorResult<Integer> res = resMap.get(expVal.getKey()); assertNotNull("No result for " + expVal.getKey()); assertEquals("Unexpected result for " + expVal.getKey(), res.get(), expVal.getValue()); } } /** * @param key Key. * @param expVal Expected value. */ protected void checkValue(Object key, @Nullable Object expVal) { if (expVal != null) { for (int i = 0; i < gridCount(); i++) { IgniteCache<Object, Object> cache = jcache(i); Object val = cache.localPeek(key, CachePeekMode.ONHEAP); if (val == null) assertFalse(ignite(0).affinity(null).isPrimaryOrBackup(ignite(i).cluster().localNode(), key)); else assertEquals("Unexpected value for grid " + i, expVal, val); } } else { for (int i = 0; i < gridCount(); i++) { IgniteCache<Object, Object> cache = jcache(i); assertNull("Unexpected non null value for grid " + i, cache.localPeek(key, CachePeekMode.ONHEAP)); } } } /** * @return Test keys. * @throws Exception If failed. */ protected Collection<Integer> keys() throws Exception { IgniteCache<Integer, Object> cache = jcache(0); ArrayList<Integer> keys = new ArrayList<>(); keys.add(primaryKeys(cache, 1, lastKey).get(0)); if (gridCount() > 1) { keys.add(backupKeys(cache, 1, lastKey).get(0)); if (cache.getConfiguration(CacheConfiguration.class).getCacheMode() != REPLICATED) keys.add(nearKeys(cache, 1, lastKey).get(0)); } lastKey = Collections.max(keys) + 1; return keys; } /** * @param txMode Transaction concurrency mode. * @return Transaction. */ @Nullable private Transaction startTx(@Nullable TransactionConcurrency txMode) { return txMode == null ? null : ignite(0).transactions().txStart(txMode, REPEATABLE_READ); } /** * */ private static class ArgumentsSumProcessor implements EntryProcessor<Integer, Integer, Integer> { /** {@inheritDoc} */ @Override public Integer process(MutableEntry<Integer, Integer> e, Object... args) throws EntryProcessorException { assertEquals(3, args.length); assertEquals(10, args[0]); assertEquals(20, args[1]); assertEquals(30, args[2]); assertTrue(e.exists()); Integer res = e.getValue(); for (Object arg : args) res += (Integer)arg; e.setValue(res); return args.length; } } /** * */ protected static class ToStringProcessor implements EntryProcessor<Integer, Integer, String> { /** {@inheritDoc} */ @Override public String process(MutableEntry<Integer, Integer> e, Object... arguments) throws EntryProcessorException { return String.valueOf(e.getValue()); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(ToStringProcessor.class, this); } } /** * */ protected static class IncrementProcessor implements EntryProcessor<Integer, Integer, Integer> { /** {@inheritDoc} */ @Override public Integer process(MutableEntry<Integer, Integer> e, Object... arguments) throws EntryProcessorException { Ignite ignite = e.unwrap(Ignite.class); assertNotNull(ignite); if (e.exists()) { Integer val = e.getValue(); assertNotNull(val); e.setValue(val + 1); assertTrue(e.exists()); assertEquals(val + 1, (int) e.getValue()); return val; } else { e.setValue(1); return -1; } } /** {@inheritDoc} */ @Override public String toString() { return S.toString(IncrementProcessor.class, this); } } /** * */ private static class RemoveProcessor implements EntryProcessor<Integer, Integer, Integer> { /** */ private Integer expVal; /** * @param expVal Expected value. */ RemoveProcessor(@Nullable Integer expVal) { this.expVal = expVal; } /** {@inheritDoc} */ @Override public Integer process(MutableEntry<Integer, Integer> e, Object... arguments) throws EntryProcessorException { assertTrue(e.exists()); if (expVal != null) assertEquals(expVal, e.getValue()); e.remove(); assertFalse(e.exists()); return null; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(RemoveProcessor.class, this); } } /** * */ private static class ExceptionProcessor implements EntryProcessor<Integer, Integer, Integer> { /** */ private Integer expVal; /** * @param expVal Expected value. */ ExceptionProcessor(@Nullable Integer expVal) { this.expVal = expVal; } /** {@inheritDoc} */ @Override public Integer process(MutableEntry<Integer, Integer> e, Object... arguments) throws EntryProcessorException { assertTrue(e.exists()); if (expVal != null) assertEquals(expVal, e.getValue()); throw new EntryProcessorException("Test processor exception."); } /** {@inheritDoc} */ @Override public String toString() { return S.toString(ExceptionProcessor.class, this); } } }
apache-2.0
ern/elasticsearch
x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/MlConfigMigratorIT.java
24602
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.ml.integration; import com.carrotsearch.hppc.cursors.ObjectCursor; import org.elasticsearch.Version; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateUpdateTask; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.TestIndexNameExpressionResolver; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.xpack.core.ml.MlConfigIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.ml.MlConfigMigrationEligibilityCheck; import org.elasticsearch.xpack.ml.MlConfigMigrator; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; import org.elasticsearch.xpack.ml.datafeed.persistence.DatafeedConfigProvider; import org.elasticsearch.xpack.ml.job.persistence.JobConfigProvider; import org.junit.Before; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class MlConfigMigratorIT extends MlSingleNodeTestCase { private final IndexNameExpressionResolver expressionResolver = TestIndexNameExpressionResolver.newInstance(); private ClusterService clusterService; @Before public void setUpTests() { clusterService = mock(ClusterService.class); ClusterSettings clusterSettings = new ClusterSettings(nodeSettings(), new HashSet<>(Collections.singletonList( MlConfigMigrationEligibilityCheck.ENABLE_CONFIG_MIGRATION))); Metadata metadata = mock(Metadata.class); SortedMap<String, IndexAbstraction> indicesMap = new TreeMap<>(); when(metadata.getIndicesLookup()).thenReturn(indicesMap); ClusterState clusterState = mock(ClusterState.class); when(clusterState.getMetadata()).thenReturn(metadata); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); when(clusterService.state()).thenReturn(clusterState); } public void testWriteConfigToIndex() throws InterruptedException { final String indexJobId = "job-already-migrated"; // Add a job to the index JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); Job indexJob = buildJobBuilder(indexJobId).build(); // Same as index job but has extra fields in its custom settings // which will be used to check the config was overwritten Job migratedJob = MlConfigMigrator.updateJobForMigration(indexJob); AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<IndexResponse> indexResponseHolder = new AtomicReference<>(); // put a job representing a previously migrated job blockingCall(actionListener -> jobConfigProvider.putJob(migratedJob, actionListener), indexResponseHolder, exceptionHolder); MlConfigMigrator mlConfigMigrator = new MlConfigMigrator(nodeSettings(), client(), clusterService, expressionResolver); AtomicReference<Set<String>> failedIdsHolder = new AtomicReference<>(); Job foo = buildJobBuilder("foo").build(); // try to write foo and 'job-already-migrated' which does not have the custom setting field assertNull(indexJob.getCustomSettings()); blockingCall(actionListener -> mlConfigMigrator.writeConfigToIndex(Collections.emptyList(), Arrays.asList(indexJob, foo), actionListener), failedIdsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(failedIdsHolder.get(), empty()); // Check job foo has been indexed and job-already-migrated has been overwritten AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>(); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, false, actionListener), jobsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(jobsHolder.get(), hasSize(2)); Job fooJob = jobsHolder.get().get(0).build(); assertEquals("foo", fooJob.getId()); // this job won't have been marked as migrated as calling // MlConfigMigrator.writeConfigToIndex directly does not do that assertNull(fooJob.getCustomSettings()); Job alreadyMigratedJob = jobsHolder.get().get(1).build(); assertEquals("job-already-migrated", alreadyMigratedJob.getId()); assertNull(alreadyMigratedJob.getCustomSettings()); } public void testMigrateConfigs() throws InterruptedException, IOException { MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); mlMetadata.putJob(buildJobBuilder("job-foo").build(), false); mlMetadata.putJob(buildJobBuilder("job-bar").build(), false); DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-1", "job-foo"); builder.setIndices(Collections.singletonList("beats*")); mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry()); Metadata.Builder metadata = Metadata.builder(); RoutingTable.Builder routingTable = RoutingTable.builder(); addMlConfigIndex(metadata, routingTable); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metadata(metadata.putCustom(MlMetadata.TYPE, mlMetadata.build())) .routingTable(routingTable.build()) .build(); when(clusterService.state()).thenReturn(clusterState); List<Metadata.Custom> customs = new ArrayList<>(); doAnswer(invocation -> { ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1]; ClusterState result = listener.execute(clusterState); for (ObjectCursor<Metadata.Custom> value : result.metadata().customs().values()){ customs.add(value.value); } listener.clusterStateProcessed("source", mock(ClusterState.class), mock(ClusterState.class)); return null; }).when(clusterService).submitStateUpdateTask(eq("remove-migrated-ml-configs"), any()); AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<Boolean> responseHolder = new AtomicReference<>(); // do the migration MlConfigMigrator mlConfigMigrator = new MlConfigMigrator(nodeSettings(), client(), clusterService, expressionResolver); // the first time this is called mlmetadata will be snap-shotted blockingCall(actionListener -> mlConfigMigrator.migrateConfigs(clusterState, actionListener), responseHolder, exceptionHolder); // Verify that we have custom values in the new cluster state and that none of them is null assertThat(customs.size(), greaterThan(0)); assertThat(customs.stream().anyMatch(Objects::isNull), is(false)); assertNull(exceptionHolder.get()); assertTrue(responseHolder.get()); assertSnapshot(mlMetadata.build()); // check the jobs have been migrated AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>(); JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(jobsHolder.get(), hasSize(2)); assertTrue(jobsHolder.get().get(0).build().getCustomSettings().containsKey(MlConfigMigrator.MIGRATED_FROM_VERSION)); assertEquals("job-bar", jobsHolder.get().get(0).build().getId()); assertTrue(jobsHolder.get().get(1).build().getCustomSettings().containsKey(MlConfigMigrator.MIGRATED_FROM_VERSION)); assertEquals("job-foo", jobsHolder.get().get(1).build().getId()); // check datafeeds are migrated DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry()); AtomicReference<List<DatafeedConfig.Builder>> datafeedsHolder = new AtomicReference<>(); blockingCall(actionListener -> datafeedConfigProvider.expandDatafeedConfigs("*", true, actionListener), datafeedsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(datafeedsHolder.get(), hasSize(1)); assertEquals("df-1", datafeedsHolder.get().get(0).getId()); } public void testExistingSnapshotDoesNotBlockMigration() throws InterruptedException { // define the configs MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); mlMetadata.putJob(buildJobBuilder("job-foo").build(), false); Metadata.Builder metadata = Metadata.builder(); RoutingTable.Builder routingTable = RoutingTable.builder(); addMlConfigIndex(metadata, routingTable); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metadata(metadata.putCustom(MlMetadata.TYPE, mlMetadata.build())) .routingTable(routingTable.build()) .build(); when(clusterService.state()).thenReturn(clusterState); // index a doc with the same Id as the config snapshot PlainActionFuture<Boolean> future = PlainActionFuture.newFuture(); AnomalyDetectorsIndex.createStateIndexAndAliasIfNecessary(client(), clusterService.state(), expressionResolver, MasterNodeRequest.DEFAULT_MASTER_NODE_TIMEOUT, future); future.actionGet(); IndexRequest indexRequest = new IndexRequest(AnomalyDetectorsIndex.jobStateIndexWriteAlias()).id("ml-config") .source(Collections.singletonMap("a_field", "a_value")) .opType(DocWriteRequest.OpType.CREATE) .setRequireAlias(true) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); client().index(indexRequest).actionGet(); doAnswer(invocation -> { ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1]; listener.clusterStateProcessed("source", mock(ClusterState.class), mock(ClusterState.class)); return null; }).when(clusterService).submitStateUpdateTask(eq("remove-migrated-ml-configs"), any()); AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<Boolean> responseHolder = new AtomicReference<>(); // do the migration MlConfigMigrator mlConfigMigrator = new MlConfigMigrator(nodeSettings(), client(), clusterService, expressionResolver); // writing the snapshot should fail because the doc already exists // in which case the migration should continue blockingCall(actionListener -> mlConfigMigrator.migrateConfigs(clusterState, actionListener), responseHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertTrue(responseHolder.get()); // check the jobs have been migrated AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>(); JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(jobsHolder.get(), hasSize(1)); assertTrue(jobsHolder.get().get(0).build().getCustomSettings().containsKey(MlConfigMigrator.MIGRATED_FROM_VERSION)); assertEquals("job-foo", jobsHolder.get().get(0).build().getId()); } public void testMigrateConfigs_GivenLargeNumberOfJobsAndDatafeeds() throws InterruptedException { int jobCount = randomIntBetween(150, 201); int datafeedCount = randomIntBetween(150, jobCount); // and jobs and datafeeds clusterstate MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); for (int i = 0; i < jobCount; i++) { mlMetadata.putJob(buildJobBuilder("job-" + i).build(), false); } for (int i = 0; i < datafeedCount; i++) { DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-" + i, "job-" + i); builder.setIndices(Collections.singletonList("beats*")); mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry()); } Metadata.Builder metadata = Metadata.builder(); RoutingTable.Builder routingTable = RoutingTable.builder(); addMlConfigIndex(metadata, routingTable); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metadata(metadata.putCustom(MlMetadata.TYPE, mlMetadata.build())) .routingTable(routingTable.build()) .build(); when(clusterService.state()).thenReturn(clusterState); doAnswer(invocation -> { ClusterStateUpdateTask listener = (ClusterStateUpdateTask) invocation.getArguments()[1]; listener.clusterStateProcessed("source", mock(ClusterState.class), mock(ClusterState.class)); return null; }).when(clusterService).submitStateUpdateTask(eq("remove-migrated-ml-configs"), any()); AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<Boolean> responseHolder = new AtomicReference<>(); // do the migration MlConfigMigrator mlConfigMigrator = new MlConfigMigrator(nodeSettings(), client(), clusterService, expressionResolver); blockingCall(actionListener -> mlConfigMigrator.migrateConfigs(clusterState, actionListener), responseHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertTrue(responseHolder.get()); // check the jobs have been migrated AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>(); JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(jobsHolder.get(), hasSize(jobCount)); // check datafeeds are migrated DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry()); AtomicReference<List<DatafeedConfig.Builder>> datafeedsHolder = new AtomicReference<>(); blockingCall(actionListener -> datafeedConfigProvider.expandDatafeedConfigs("*", true, actionListener), datafeedsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(datafeedsHolder.get(), hasSize(datafeedCount)); } public void testMigrateConfigs_GivenNoJobsOrDatafeeds() throws InterruptedException { // Add empty ML metadata MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metadata(Metadata.builder() .putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<Boolean> responseHolder = new AtomicReference<>(); // do the migration MlConfigMigrator mlConfigMigrator = new MlConfigMigrator(nodeSettings(), client(), clusterService, expressionResolver); blockingCall(actionListener -> mlConfigMigrator.migrateConfigs(clusterState, actionListener), responseHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertFalse(responseHolder.get()); } public void testMigrateConfigsWithoutTasks_GivenMigrationIsDisabled() throws InterruptedException { Settings settings = Settings.builder().put(nodeSettings()) .put(MlConfigMigrationEligibilityCheck.ENABLE_CONFIG_MIGRATION.getKey(), false) .build(); ClusterSettings clusterSettings = new ClusterSettings(settings, new HashSet<>(Collections.singletonList( MlConfigMigrationEligibilityCheck.ENABLE_CONFIG_MIGRATION))); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); // and jobs and datafeeds clusterstate MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); mlMetadata.putJob(buildJobBuilder("job-foo").build(), false); mlMetadata.putJob(buildJobBuilder("job-bar").build(), false); DatafeedConfig.Builder builder = new DatafeedConfig.Builder("df-1", "job-foo"); builder.setIndices(Collections.singletonList("beats*")); mlMetadata.putDatafeed(builder.build(), Collections.emptyMap(), xContentRegistry()); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metadata(Metadata.builder() .putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<Boolean> responseHolder = new AtomicReference<>(); // do the migration MlConfigMigrator mlConfigMigrator = new MlConfigMigrator(settings, client(), clusterService, expressionResolver); blockingCall(actionListener -> mlConfigMigrator.migrateConfigs(clusterState, actionListener), responseHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertFalse(responseHolder.get()); // check the jobs have not been migrated AtomicReference<List<Job.Builder>> jobsHolder = new AtomicReference<>(); JobConfigProvider jobConfigProvider = new JobConfigProvider(client(), xContentRegistry()); blockingCall(actionListener -> jobConfigProvider.expandJobs("*", true, true, actionListener), jobsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(jobsHolder.get().isEmpty(), is(true)); // check datafeeds have not been migrated DatafeedConfigProvider datafeedConfigProvider = new DatafeedConfigProvider(client(), xContentRegistry()); AtomicReference<List<DatafeedConfig.Builder>> datafeedsHolder = new AtomicReference<>(); blockingCall(actionListener -> datafeedConfigProvider.expandDatafeedConfigs("*", true, actionListener), datafeedsHolder, exceptionHolder); assertNull(exceptionHolder.get()); assertThat(datafeedsHolder.get().isEmpty(), is(true)); } public void assertSnapshot(MlMetadata expectedMlMetadata) throws IOException { client().admin().indices().prepareRefresh(AnomalyDetectorsIndex.jobStateIndexPattern()).get(); SearchResponse searchResponse = client() .prepareSearch(AnomalyDetectorsIndex.jobStateIndexPattern()) .setSize(1) .setQuery(QueryBuilders.idsQuery().addIds("ml-config")) .get(); assertThat(searchResponse.getHits().getHits().length, greaterThan(0)); try (InputStream stream = searchResponse.getHits().getAt(0).getSourceRef().streamInput(); XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, stream)) { MlMetadata recoveredMeta = MlMetadata.LENIENT_PARSER.apply(parser, null).build(); assertEquals(expectedMlMetadata, recoveredMeta); } } private void addMlConfigIndex(Metadata.Builder metadata, RoutingTable.Builder routingTable) { IndexMetadata.Builder indexMetadata = IndexMetadata.builder(MlConfigIndex.indexName()); indexMetadata.settings(Settings.builder() .put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT) .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) ); metadata.put(indexMetadata); Index index = new Index(MlConfigIndex.indexName(), "_uuid"); ShardId shardId = new ShardId(index, 0); ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, true, RecoverySource.EmptyStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "")); shardRouting = shardRouting.initialize("node_id", null, 0L); shardRouting = shardRouting.moveToStarted(); routingTable.add(IndexRoutingTable.builder(index) .addIndexShard(new IndexShardRoutingTable.Builder(shardId).addShard(shardRouting).build())); } public void testConfigIndexIsCreated() throws Exception { // and jobs and datafeeds clusterstate MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); mlMetadata.putJob(buildJobBuilder("job-foo").build(), false); ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) .metadata(Metadata.builder().putCustom(MlMetadata.TYPE, mlMetadata.build())) .build(); AtomicReference<Exception> exceptionHolder = new AtomicReference<>(); AtomicReference<Boolean> responseHolder = new AtomicReference<>(); MlConfigMigrator mlConfigMigrator = new MlConfigMigrator(nodeSettings(), client(), clusterService, expressionResolver); // if the cluster state has a job config and the index does not // exist it should be created blockingCall(actionListener -> mlConfigMigrator.migrateConfigs(clusterState, actionListener), responseHolder, exceptionHolder); assertBusy(() -> assertTrue(configIndexExists())); } private boolean configIndexExists() { return ESIntegTestCase.indexExists(MlConfigIndex.indexName(), client()); } }
apache-2.0
Laliska/perun
perun-web-gui/src/main/java/cz/metacentrum/perun/webgui/tabs/vostabs/VoResourcesPropagationsTabItem.java
11937
package cz.metacentrum.perun.webgui.tabs.vostabs; import com.google.gwt.core.client.JavaScriptObject; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.cellview.client.Column; import com.google.gwt.user.cellview.client.ColumnSortEvent; import com.google.gwt.user.cellview.client.RowStyles; import com.google.gwt.user.cellview.client.TextColumn; import com.google.gwt.user.client.ui.*; import com.google.gwt.view.client.ListDataProvider; import cz.metacentrum.perun.webgui.client.PerunWebSession; import cz.metacentrum.perun.webgui.client.UiElements; import cz.metacentrum.perun.webgui.client.mainmenu.MainMenu; import cz.metacentrum.perun.webgui.client.resources.*; import cz.metacentrum.perun.webgui.json.GetEntityById; import cz.metacentrum.perun.webgui.json.JsonCallbackEvents; import cz.metacentrum.perun.webgui.json.JsonUtils; import cz.metacentrum.perun.webgui.json.propagationStatsReader.GetAllResourcesState; import cz.metacentrum.perun.webgui.model.*; import cz.metacentrum.perun.webgui.tabs.TabItem; import cz.metacentrum.perun.webgui.tabs.TabItemWithUrl; import cz.metacentrum.perun.webgui.tabs.UrlMapper; import cz.metacentrum.perun.webgui.tabs.VosTabs; import cz.metacentrum.perun.webgui.widgets.AjaxLoaderImage; import cz.metacentrum.perun.webgui.widgets.CustomButton; import cz.metacentrum.perun.webgui.widgets.PerunTable; import java.util.ArrayList; import java.util.Map; /** * Tab with propagation status of all facilities related to VO. * * @author Pavel Zlamal <256627@mail.muni.cz> */ public class VoResourcesPropagationsTabItem implements TabItem, TabItemWithUrl { /** * Perun web session */ private PerunWebSession session = PerunWebSession.getInstance(); /** * Content widget - should be simple panel */ private SimplePanel contentWidget = new SimplePanel(); /** * Title widget */ private Label titleWidget = new Label("All VO's resources state"); private VirtualOrganization vo; private int voId; private int mainrow = 0; private int okCounter = 0; private int errorCounter = 0; private int notDeterminedCounter = 0; private int procesingCounter = 0; /** * Creates a tab instance * @param voId */ public VoResourcesPropagationsTabItem(int voId){ this.voId = voId; JsonCallbackEvents events = new JsonCallbackEvents(){ public void onFinished(JavaScriptObject jso) { vo = jso.cast(); } }; new GetEntityById(PerunEntity.VIRTUAL_ORGANIZATION, voId, events).retrieveData(); } /** * Creates a tab instance * @param vo */ public VoResourcesPropagationsTabItem(VirtualOrganization vo){ this.voId = vo.getId(); this.vo = vo; } public boolean isPrepared(){ return (vo != null); } public Widget draw() { mainrow = 0; okCounter = 0; errorCounter = 0; notDeterminedCounter = 0; procesingCounter = 0; titleWidget.setText(Utils.getStrippedStringWithEllipsis(vo.getName())+": resources state"); final TabItem tab = this; VerticalPanel mainTab = new VerticalPanel(); mainTab.setWidth("100%"); // MAIN PANEL final ScrollPanel firstTabPanel = new ScrollPanel(); firstTabPanel.setSize("100%", "100%"); firstTabPanel.setStyleName("perun-tableScrollPanel"); final FlexTable help = new FlexTable(); help.setCellPadding(4); help.setWidth("100%"); final CustomButton cb = UiElements.getRefreshButton(this); help.setWidget(0, 0, cb); help.getFlexCellFormatter().setWidth(0, 0, "80px"); help.setHTML(0, 1, "<strong>Color&nbsp;notation:</strong>"); help.getFlexCellFormatter().setWidth(0, 1, "100px"); help.setHTML(0, 2, "<strong>OK</strong>"); help.getFlexCellFormatter().setHorizontalAlignment(0, 2, HasHorizontalAlignment.ALIGN_CENTER); help.getFlexCellFormatter().setWidth(0, 2, "50px"); help.getFlexCellFormatter().setStyleName(0, 2, "green"); help.setHTML(0, 3, "<strong>Error</strong>"); help.getFlexCellFormatter().setWidth(0, 3, "50px"); help.getFlexCellFormatter().setStyleName(0, 3, "red"); help.getFlexCellFormatter().setHorizontalAlignment(0, 3, HasHorizontalAlignment.ALIGN_CENTER); help.setHTML(0, 4, "<strong>Not&nbsp;determined</strong>"); help.getFlexCellFormatter().setWidth(0, 4, "50px"); help.getFlexCellFormatter().setHorizontalAlignment(0, 4, HasHorizontalAlignment.ALIGN_CENTER); help.getFlexCellFormatter().setStyleName(0, 4, "notdetermined"); /* help.setHTML(0, 5, "<strong>Processing</strong>"); help.getFlexCellFormatter().setWidth(0, 5, "50px"); help.getFlexCellFormatter().setStyleName(0, 5, "yellow"); help.getFlexCellFormatter().setHorizontalAlignment(0, 5, HasHorizontalAlignment.ALIGN_CENTER); */ help.setHTML(0, 5, "&nbsp;"); help.getFlexCellFormatter().setWidth(0, 6, "50%"); mainTab.add(help); mainTab.add(new HTML("<hr size=\"2\" />")); mainTab.add(firstTabPanel); final FlexTable content = new FlexTable(); content.setWidth("100%"); content.setBorderWidth(0); firstTabPanel.add(content); content.setStyleName("propagationTable", true); final AjaxLoaderImage im = new AjaxLoaderImage(); content.setWidget(0, 0, im); content.getFlexCellFormatter().setHorizontalAlignment(0, 0, HasHorizontalAlignment.ALIGN_CENTER); final GetAllResourcesState callback = new GetAllResourcesState(voId, new JsonCallbackEvents(){ public void onLoadingStart(){ im.loadingStart(); cb.setProcessing(true); } public void onError(PerunError error){ im.loadingError(error); cb.setProcessing(false); } public void onFinished(JavaScriptObject jso) { im.loadingFinished(); cb.setProcessing(false); content.clear(); content.getFlexCellFormatter().setHorizontalAlignment(0, 0, HasHorizontalAlignment.ALIGN_LEFT); ArrayList<ResourceState> list = JsonUtils.jsoAsList(jso); if (list != null && !list.isEmpty()){ list = new TableSorter<ResourceState>().sortByResourceName(list); // PROCESS CLUSTERS (with more than one destinations) for (final ResourceState state : list) { content.setHTML(mainrow, 0, new Image(LargeIcons.INSTANCE.serverGroupIcon())+"<span class=\"now-managing\" style=\"display: inline-block; position: relative; top: -8px;\">" + state.getResource().getName() + "</span>"); ArrayList<Task> tasks = new TableSorter<Task>().sortByService(JsonUtils.<Task>jsoAsList(state.getTasks())); if (tasks == null || tasks.isEmpty()) notDeterminedCounter++; boolean allOk = true; for (Task tsk :tasks) { if (tsk.getStatus().equalsIgnoreCase("ERROR")) { errorCounter++; allOk = false; break; } } if (allOk && tasks != null && !tasks.isEmpty()) okCounter++; ListDataProvider<Task> dataProvider = new ListDataProvider<Task>(); PerunTable<Task> table; // Table data provider. dataProvider = new ListDataProvider<Task>(tasks); // Cell table table = new PerunTable<Task>(tasks); table.removeRowCountChangeHandler(); // Connect the table to the data provider. dataProvider.addDataDisplay(table); // Sorting ColumnSortEvent.ListHandler<Task> columnSortHandler = new ColumnSortEvent.ListHandler<Task>(dataProvider.getList()); table.addColumnSortHandler(columnSortHandler); // set empty content & loader AjaxLoaderImage loaderImage = new AjaxLoaderImage(); loaderImage.setEmptyResultMessage("No service configuration was propagated to this resource."); table.setEmptyTableWidget(loaderImage); loaderImage.loadingFinished(); table.addIdColumn("Task Id"); // Service column Column<Task, String> serviceColumn = JsonUtils.addColumn( new JsonUtils.GetValue<Task, String>() { public String getValue(Task task) { return String.valueOf(task.getExecService().getService().getName()); } }, null); // status column Column<Task, String> statusColumn = JsonUtils.addColumn( new JsonUtils.GetValue<Task, String>() { public String getValue(Task task) { return String.valueOf(task.getStatus()); } }, null); // start COLUMN TextColumn<Task> startTimeColumn = new TextColumn<Task>() { public String getValue(Task result) { return result.getStartTime(); } }; // end COLUMN TextColumn<Task> endTimeColumn = new TextColumn<Task>() { public String getValue(Task result) { return result.getEndTime(); } }; // schedule COLUMN TextColumn<Task> scheduleColumn = new TextColumn<Task>() { public String getValue(Task result) { return result.getSchedule(); } }; // Add the columns. table.addColumn(serviceColumn, "Service"); table.addColumn(statusColumn, "Status"); table.addColumn(scheduleColumn, "Scheduled"); table.addColumn(startTimeColumn, "Started"); table.addColumn(endTimeColumn, "Ended"); // set row styles based on task state table.setRowStyles(new RowStyles<Task>(){ public String getStyleNames(Task row, int rowIndex) { if (row.getStatus().equalsIgnoreCase("NONE")) { return "rowdarkgreen"; } else if (row.getStatus().equalsIgnoreCase("DONE")){ return "rowgreen"; } else if (row.getStatus().equalsIgnoreCase("PROCESSING")){ return "rowyellow"; } else if (row.getStatus().equalsIgnoreCase("ERROR")){ return "rowred"; } return ""; } }); table.setWidth("100%"); content.setWidget(mainrow+1, 0, table); content.getFlexCellFormatter().setStyleName(mainrow + 1, 0, "propagationTablePadding"); mainrow++; mainrow++; } } // set counters help.setHTML(0, 2, "<strong>Ok&nbsp;("+okCounter+")</strong>"); help.setHTML(0, 3, "<strong>Error&nbsp;("+errorCounter+")</strong>"); help.setHTML(0, 4, "<strong>Not&nbsp;determined&nbsp;("+notDeterminedCounter+")</strong>"); //help.setHTML(0, 5, "<strong>Processing&nbsp;(" + procesingCounter + ")</strong>"); } }); // get for all facilities for VO callback.retrieveData(); // resize perun table to correct size on screen session.getUiElements().resizePerunTable(firstTabPanel, 400, this); this.contentWidget.setWidget(mainTab); return getWidget(); } public Widget getWidget() { return this.contentWidget; } public Widget getTitle() { return this.titleWidget; } public ImageResource getIcon() { return SmallIcons.INSTANCE.arrowRightIcon(); } @Override public int hashCode() { final int prime = 1327; int result = 1; result = prime * result + voId; return result; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; VoResourcesPropagationsTabItem other = (VoResourcesPropagationsTabItem) obj; if (voId != other.voId) return false; return true; } public boolean multipleInstancesEnabled() { return false; } public void open() { session.getUiElements().getMenu().openMenu(MainMenu.VO_ADMIN); session.getUiElements().getBreadcrumbs().setLocation(vo, "Resources state", getUrlWithParameters()); if(vo != null){ session.setActiveVo(vo); return; } session.setActiveVoId(voId); } public boolean isAuthorized() { if (session.isVoAdmin(voId) || session.isVoObserver(voId)) { return true; } else { return false; } } public final static String URL = "propags"; public String getUrl() { return URL; } public String getUrlWithParameters() { return VosTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?vo="+voId; } static public VoResourcesPropagationsTabItem load(Map<String, String> parameters) { int voId = Integer.parseInt(parameters.get("vo")); return new VoResourcesPropagationsTabItem(voId); } }
bsd-2-clause
nwjs/chromium.src
components/browser_ui/contacts_picker/android/java/src/org/chromium/components/browser_ui/contacts_picker/ContactDetails.java
10708
// Copyright 2018 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.browser_ui.contacts_picker; import android.content.res.Resources; import android.graphics.drawable.Drawable; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import org.chromium.blink.mojom.ContactIconBlob; import org.chromium.payments.mojom.PaymentAddress; import java.util.ArrayList; import java.util.Arrays; import java.util.List; /** * A class to keep track of the metadata associated with a contact. */ public class ContactDetails implements Comparable<ContactDetails> { // The identifier for the information from the signed in user. Must not be a valid id in the // context of the Android Contacts list. public static final String SELF_CONTACT_ID = "-1"; /** * A container class for delivering contact details in abbreviated form * (where only the first email and phone numbers are returned and the rest * is indicated with "+n more" strings). */ public static class AbbreviatedContactDetails { public String primaryEmail; public String overflowEmailCount; public String primaryTelephoneNumber; public String overflowTelephoneNumberCount; public String primaryAddress; public String overflowAddressCount; } // The unique id for the contact. private final String mId; // The display name for this contact. private final String mDisplayName; // The list of emails registered for this contact. private final List<String> mEmails; // The list of phone numbers registered for this contact. private final List<String> mPhoneNumbers; // The list of addresses registered for this contact. private final List<PaymentAddress> mAddresses; // The list of icons registered for this contact. private final List<ContactIconBlob> mIcons; // Keeps track of whether this is the contact detail for the owner of the device. private boolean mIsSelf; // The avatar icon for the owner of the device. Non-null only if the ContactDetails representing // the owner were synthesized (not when a pre-existing contact tile was moved to the top). @Nullable private Drawable mSelfIcon; /** * The ContactDetails constructor. * @param id The unique identifier of this contact. * @param displayName The display name of this contact. * @param emails The emails registered for this contact. * @param phoneNumbers The phone numbers registered for this contact. * @param addresses The addresses registered for this contact. */ public ContactDetails(String id, String displayName, List<String> emails, List<String> phoneNumbers, List<PaymentAddress> addresses) { mDisplayName = displayName != null ? displayName : ""; mEmails = emails != null ? emails : new ArrayList<String>(); mPhoneNumbers = phoneNumbers != null ? phoneNumbers : new ArrayList<String>(); mAddresses = addresses != null ? addresses : new ArrayList<PaymentAddress>(); mIcons = new ArrayList<>(); mId = id; } public List<String> getDisplayNames() { return Arrays.asList(mDisplayName); } public List<String> getEmails() { return mEmails; } public List<String> getPhoneNumbers() { return mPhoneNumbers; } public List<PaymentAddress> getAddresses() { return mAddresses; } public List<ContactIconBlob> getIcons() { return mIcons; } public String getDisplayName() { return mDisplayName; } public String getId() { return mId; } public void setIcon(ContactIconBlob icon) { assert mIcons.isEmpty(); mIcons.add(icon); } /** * Marks whether object is representing the owner of the device. * @param value True if this is the contact details for the owner. False otherwise. */ public void setIsSelf(boolean value) { mIsSelf = value; } /** * Returns true if this contact detail is representing the owner of the device. */ public boolean isSelf() { return mIsSelf; } /** * Sets the icon representing the owner of the device. */ public void setSelfIcon(Drawable icon) { mSelfIcon = icon; } /** * Fetch the cached icon for this contact. Returns null if this is not the 'self' contact, all * other contact avatars should be retrieved through the {@link FetchIconWorkerTask}. */ @Nullable public Drawable getSelfIcon() { return mSelfIcon; } /** * Accessor for the abbreviated display name (first letter of first name and first letter of * last name). * @return The display name, abbreviated to two characters. */ public String getDisplayNameAbbreviation() { // Display the two letter abbreviation of the display name. String displayChars = ""; if (mDisplayName.length() > 0) { displayChars += mDisplayName.charAt(0); String[] parts = mDisplayName.split(" "); if (parts.length > 1) { displayChars += parts[parts.length - 1].charAt(0); } } return displayChars; } private String ensureSingleLine(String address) { String returnValue = address.replaceAll("\n\n", "\n"); // The string might have multiple consecutive new-lines, which means \n\n\n -> \n\n, so // we'll perform the conversion until we've caught them all. while (returnValue.length() < address.length()) { address = returnValue; returnValue = address.replaceAll("\n\n", "\n"); } return returnValue.replaceAll("\n", ", "); } /** * Accessor for the list of contact details (emails and phone numbers). Returned as strings * separated by newline). * @param includeAddresses Whether to include addresses in the returned results. * @param includeEmails Whether to include emails in the returned results. * @param includeTels Whether to include telephones in the returned results. * @return A string containing all the contact details registered for this contact. */ public String getContactDetailsAsString( boolean includeAddresses, boolean includeEmails, boolean includeTels) { int count = 0; StringBuilder builder = new StringBuilder(); if (includeAddresses) { for (PaymentAddress address : mAddresses) { if (count++ > 0) { builder.append("\n"); } builder.append(ensureSingleLine(address.addressLine[0])); } } if (includeEmails) { for (String email : mEmails) { if (count++ > 0) { builder.append("\n"); } builder.append(email); } } if (includeTels) { for (String phoneNumber : mPhoneNumbers) { if (count++ > 0) { builder.append("\n"); } builder.append(phoneNumber); } } return builder.toString(); } /** * Accessor for the list of contact details (emails and phone numbers). * @param includeAddresses Whether to include addresses in the returned results. * @param includeEmails Whether to include emails in the returned results. * @param includeTels Whether to include telephones in the returned results. * @param resources The resources to use for fetching the string. Must be provided. * @return The contact details registered for this contact. */ public AbbreviatedContactDetails getAbbreviatedContactDetails(boolean includeAddresses, boolean includeEmails, boolean includeTels, @NonNull Resources resources) { AbbreviatedContactDetails results = new AbbreviatedContactDetails(); results.overflowAddressCount = ""; if (!includeAddresses || mAddresses.size() == 0) { results.primaryAddress = ""; } else { results.primaryAddress = ensureSingleLine(mAddresses.get(0).addressLine[0]); int totalAddresses = mAddresses.size(); if (totalAddresses > 1) { int hiddenAddresses = totalAddresses - 1; results.overflowAddressCount = resources.getQuantityString( R.plurals.contacts_picker_more_details, hiddenAddresses, hiddenAddresses); } } results.overflowEmailCount = ""; if (!includeEmails || mEmails.size() == 0) { results.primaryEmail = ""; } else { results.primaryEmail = mEmails.get(0); int totalAddresses = mEmails.size(); if (totalAddresses > 1) { int hiddenAddresses = totalAddresses - 1; results.overflowEmailCount = resources.getQuantityString( R.plurals.contacts_picker_more_details, hiddenAddresses, hiddenAddresses); } } results.overflowTelephoneNumberCount = ""; if (!includeTels || mPhoneNumbers.size() == 0) { results.primaryTelephoneNumber = ""; } else { results.primaryTelephoneNumber = mPhoneNumbers.get(0); int totalNumbers = mPhoneNumbers.size(); if (totalNumbers > 1) { int hiddenNumbers = totalNumbers - 1; results.overflowTelephoneNumberCount = resources.getQuantityString( R.plurals.contacts_picker_more_details, hiddenNumbers, hiddenNumbers); } } return results; } /** * A comparison function (results in a full name ascending sorting). * @param other The other ContactDetails object to compare it with. * @return 0, 1, or -1, depending on which is bigger. */ @Override public int compareTo(ContactDetails other) { return other.mDisplayName.compareTo(mDisplayName); } @Override public int hashCode() { Object[] values = {mId, mDisplayName}; return Arrays.hashCode(values); } @Override public boolean equals(@Nullable Object object) { if (object == null) return false; if (object == this) return true; if (!(object instanceof ContactDetails)) return false; ContactDetails otherInfo = (ContactDetails) object; return mId.equals(otherInfo.mId); } }
bsd-3-clause
scheib/chromium
components/infobars/android/java/src/org/chromium/components/infobars/InfoBarUiItem.java
2179
// Copyright 2020 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.components.infobars; import android.view.View; import androidx.annotation.IntDef; import org.chromium.chrome.browser.infobar.InfoBarIdentifier; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; /** * An interface for items that can be added to an InfoBarContainerLayout. */ public interface InfoBarUiItem { // The infobar priority. @IntDef({InfoBarPriority.CRITICAL, InfoBarPriority.USER_TRIGGERED, InfoBarPriority.PAGE_TRIGGERED, InfoBarPriority.BACKGROUND}) @Retention(RetentionPolicy.SOURCE) public @interface InfoBarPriority { int CRITICAL = 0; int USER_TRIGGERED = 1; int PAGE_TRIGGERED = 2; int BACKGROUND = 3; } /** * Returns the View that represents this infobar. This should have no background or borders; * a background and shadow will be added by a wrapper view. */ View getView(); /** * Returns whether controls for this View should be clickable. If false, all input events on * this item will be ignored. */ boolean areControlsEnabled(); /** * Sets whether or not controls for this View should be clickable. This does not affect the * visual state of the infobar. * @param state If false, all input events on this Item will be ignored. */ void setControlsEnabled(boolean state); /** * Returns the accessibility text to announce when this infobar is first shown. */ CharSequence getAccessibilityText(); /** * Returns the priority of an infobar. High priority infobar is shown in front of low * priority infobar. If infobars have the same priorities, the most recently added one * is shown behind previous ones. * */ int getPriority(); /** * Returns the type of infobar, as best as can be determined at this time. See * components/infobars/core/infobar_delegate.h. */ @InfoBarIdentifier int getInfoBarIdentifier(); }
bsd-3-clause
ric2b/Vivaldi-browser
chromium/chrome/android/javatests/src/org/chromium/chrome/browser/partnercustomizations/PartnerDisableIncognitoModeIntegrationTest.java
6744
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chrome.browser.partnercustomizations; import android.content.Context; import android.net.Uri; import android.os.Bundle; import android.support.test.InstrumentationRegistry; import android.view.Menu; import android.view.MenuItem; import android.widget.PopupMenu; import androidx.test.filters.MediumTest; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.runner.RunWith; import org.chromium.base.test.util.CommandLineFlags; import org.chromium.base.test.util.Criteria; import org.chromium.base.test.util.CriteriaHelper; import org.chromium.base.test.util.Feature; import org.chromium.chrome.R; import org.chromium.chrome.browser.flags.ChromeSwitches; import org.chromium.chrome.browser.incognito.IncognitoUtils; import org.chromium.chrome.test.ChromeJUnit4ClassRunner; import org.chromium.chrome.test.partnercustomizations.TestPartnerBrowserCustomizationsProvider; import org.chromium.content_public.browser.test.util.TestThreadUtils; import org.chromium.net.test.EmbeddedTestServer; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; /** * Integration tests for the partner disabling incognito mode feature. */ @RunWith(ChromeJUnit4ClassRunner.class) @CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE}) public class PartnerDisableIncognitoModeIntegrationTest { @Rule public BasePartnerBrowserCustomizationIntegrationTestRule mActivityTestRule = new BasePartnerBrowserCustomizationIntegrationTestRule(); private void setParentalControlsEnabled(boolean enabled) { Uri uri = PartnerBrowserCustomizations.buildQueryUri( PartnerBrowserCustomizations.PARTNER_DISABLE_INCOGNITO_MODE_PATH); Bundle bundle = new Bundle(); bundle.putBoolean( TestPartnerBrowserCustomizationsProvider.INCOGNITO_MODE_DISABLED_KEY, enabled); Context context = InstrumentationRegistry.getTargetContext(); context.getContentResolver().call(uri, "setIncognitoModeDisabled", null, bundle); } private void assertIncognitoMenuItemEnabled(boolean enabled) throws ExecutionException { Menu menu = TestThreadUtils.runOnUiThreadBlocking(new Callable<Menu>() { @Override public Menu call() { // PopupMenu is a convenient way of building a temp menu. PopupMenu tempMenu = new PopupMenu(mActivityTestRule.getActivity(), mActivityTestRule.getActivity().findViewById(R.id.menu_anchor_stub)); tempMenu.inflate(R.menu.main_menu); Menu menu = tempMenu.getMenu(); return menu; } }); for (int i = 0; i < menu.size(); ++i) { MenuItem item = menu.getItem(i); if (item.getItemId() == R.id.new_incognito_tab_menu_id && item.isVisible()) { Assert.assertEquals( "Menu item enabled state is not correct.", enabled, item.isEnabled()); } } } private void waitForParentalControlsEnabledState(final boolean parentalControlsEnabled) { CriteriaHelper.pollUiThread(() -> { // areParentalControlsEnabled is updated on a background thread, so we // also wait on the isIncognitoModeEnabled to ensure the updates on the // UI thread have also triggered. Criteria.checkThat(PartnerBrowserCustomizations.isIncognitoDisabled(), Matchers.is(parentalControlsEnabled)); Criteria.checkThat( IncognitoUtils.isIncognitoModeEnabled(), Matchers.not(parentalControlsEnabled)); }); } private void toggleActivityForegroundState() { TestThreadUtils.runOnUiThreadBlocking(() -> mActivityTestRule.getActivity().onPause()); TestThreadUtils.runOnUiThreadBlocking(() -> mActivityTestRule.getActivity().onStop()); TestThreadUtils.runOnUiThreadBlocking(() -> mActivityTestRule.getActivity().onStart()); TestThreadUtils.runOnUiThreadBlocking(() -> mActivityTestRule.getActivity().onResume()); } @Test @MediumTest @Feature({"DisableIncognitoMode"}) public void testIncognitoEnabledIfNoParentalControls() throws InterruptedException { setParentalControlsEnabled(false); mActivityTestRule.startMainActivityOnBlankPage(); waitForParentalControlsEnabledState(false); mActivityTestRule.newIncognitoTabFromMenu(); } @Test @MediumTest @Feature({"DisableIncognitoMode"}) public void testIncognitoMenuItemEnabledBasedOnParentalControls() throws InterruptedException, ExecutionException { setParentalControlsEnabled(true); mActivityTestRule.startMainActivityOnBlankPage(); waitForParentalControlsEnabledState(true); assertIncognitoMenuItemEnabled(false); setParentalControlsEnabled(false); toggleActivityForegroundState(); waitForParentalControlsEnabledState(false); assertIncognitoMenuItemEnabled(true); } @Test @MediumTest @Feature({"DisableIncognitoMode"}) public void testEnabledParentalControlsClosesIncognitoTabs() throws InterruptedException { EmbeddedTestServer testServer = EmbeddedTestServer.createAndStartServer(InstrumentationRegistry.getContext()); try { String[] testUrls = { testServer.getURL("/chrome/test/data/android/about.html"), testServer.getURL("/chrome/test/data/android/ok.txt"), testServer.getURL("/chrome/test/data/android/test.html") }; setParentalControlsEnabled(false); mActivityTestRule.startMainActivityOnBlankPage(); waitForParentalControlsEnabledState(false); mActivityTestRule.loadUrlInNewTab(testUrls[0], true); mActivityTestRule.loadUrlInNewTab(testUrls[1], true); mActivityTestRule.loadUrlInNewTab(testUrls[2], true); mActivityTestRule.loadUrlInNewTab(testUrls[0], false); setParentalControlsEnabled(true); toggleActivityForegroundState(); waitForParentalControlsEnabledState(true); CriteriaHelper.pollInstrumentationThread(() -> { Criteria.checkThat( mActivityTestRule.tabsCount(true /* incognito */), Matchers.is(0)); }); } finally { testServer.stopAndDestroyServer(); } } }
bsd-3-clause
mdaniel/svn-caucho-com-resin
modules/hessian/src/com/caucho/burlap/client/BurlapMetaInfoAPI.java
2725
/* * The Apache Software License, Version 1.1 * * Copyright (c) 2001-2004 Caucho Technology, Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, if * any, must include the following acknowlegement: * "This product includes software developed by the * Caucho Technology (http://www.caucho.com/)." * Alternately, this acknowlegement may appear in the software itself, * if and wherever such third-party acknowlegements normally appear. * * 4. The names "Burlap", "Resin", and "Caucho" must not be used to * endorse or promote products derived from this software without prior * written permission. For written permission, please contact * info@caucho.com. * * 5. Products derived from this software may not be called "Resin" * nor may "Resin" appear in their names without prior written * permission of Caucho Technology. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL CAUCHO TECHNOLOGY OR ITS CONTRIBUTORS * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, * OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * * @author Scott Ferguson */ package com.caucho.burlap.client; /** * API retrieving burlap meta information. */ public interface BurlapMetaInfoAPI { /** * Returns a service attribute. * * <ul> * <li>java.api.class - the Java interface for the object interface. * <li>java.ejb.home.class - the EJB home interface * <li>java.ejb.remote.class - the EJB remote interface * <li>java.primary.key.class - the EJB primary key class * </ul> */ public Object _burlap_getAttribute(String name); }
gpl-2.0
dwango/quercus
src/main/java/com/caucho/config/el/CandiValueExpression.java
3154
/* * Copyright (c) 1998-2012 Caucho Technology -- all rights reserved * * This file is part of Resin(R) Open Source * * Each copy or derived work must preserve the copyright notice and this * notice unmodified. * * Resin Open Source is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * Resin Open Source is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE, or any warranty * of NON-INFRINGEMENT. See the GNU General Public License for more * details. * * You should have received a copy of the GNU General Public License * along with Resin Open Source; if not, write to the * * Free Software Foundation, Inc. * 59 Temple Place, Suite 330 * Boston, MA 02111-1307 USA * * @author Scott Ferguson */ package com.caucho.config.el; import javax.el.ELContext; import javax.el.ELException; import javax.el.PropertyNotFoundException; import javax.el.PropertyNotWritableException; import javax.el.ValueExpression; import com.caucho.config.inject.CreationalContextImpl; import com.caucho.config.inject.OwnerCreationalContext; /** * Variable resolution for webbeans variables */ @SuppressWarnings("serial") public class CandiValueExpression extends ValueExpression { private final ValueExpression _expr; public CandiValueExpression(ValueExpression expr) { _expr = expr; } @Override public Class<?> getExpectedType() { return _expr.getExpectedType(); } @Override public Class<?> getType(ELContext context) throws PropertyNotFoundException, ELException { return _expr.getType(context); } @Override public Object getValue(ELContext context) throws PropertyNotFoundException, ELException { CandiConfigResolver.startContext(); try { return _expr.getValue(context); } finally { CandiConfigResolver.finishContext(); } } @Override public boolean isReadOnly(ELContext context) throws PropertyNotFoundException, ELException { return _expr.isReadOnly(context); } @Override public void setValue(ELContext context, Object value) throws PropertyNotFoundException, PropertyNotWritableException, ELException { _expr.setValue(context, value); } @Override public boolean equals(Object obj) { if (obj instanceof CandiValueExpression) { CandiValueExpression expr = (CandiValueExpression) obj; return _expr.equals(expr._expr); } else return _expr.equals(obj); } @Override public String getExpressionString() { return _expr.getExpressionString(); } @Override public int hashCode() { return _expr.hashCode(); } @Override public boolean isLiteralText() { return _expr.isLiteralText(); } @Override public String toString() { return getClass().getSimpleName() + "[" + getExpressionString() + "]"; } }
gpl-2.0
jtux270/translate
ovirt/backend/manager/modules/bll/src/main/java/org/ovirt/engine/core/bll/GetNumberOfVmsInVdsGroupByVdsGroupIdQuery.java
547
package org.ovirt.engine.core.bll; import org.ovirt.engine.core.common.queries.IdQueryParameters; public class GetNumberOfVmsInVdsGroupByVdsGroupIdQuery<P extends IdQueryParameters> extends QueriesCommandBase<P> { public GetNumberOfVmsInVdsGroupByVdsGroupIdQuery(P parameters) { super(parameters); } @Override protected void executeQueryCommand() { int numOfVms = getDbFacade().getVdsGroupDao().getVmsCountByClusterId(getParameters().getId()); getQueryReturnValue().setReturnValue(numOfVms); } }
gpl-3.0
ypsy/Signal-Android
src/org/thoughtcrime/securesms/GroupCreateActivity.java
23980
/** * Copyright (C) 2014 Open Whisper Systems * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.thoughtcrime.securesms; import android.app.Activity; import android.content.Context; import android.content.Intent; import android.graphics.Bitmap; import android.net.Uri; import android.os.AsyncTask; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.text.TextUtils; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.EditText; import android.widget.ImageView; import android.widget.ListView; import android.widget.TextView; import android.widget.Toast; import com.bumptech.glide.Glide; import com.bumptech.glide.load.engine.DiskCacheStrategy; import com.bumptech.glide.request.animation.GlideAnimation; import com.bumptech.glide.request.target.SimpleTarget; import com.soundcloud.android.crop.Crop; import org.thoughtcrime.securesms.components.PushRecipientsPanel; import org.thoughtcrime.securesms.components.PushRecipientsPanel.RecipientsPanelChangedListener; import org.thoughtcrime.securesms.contacts.RecipientsEditor; import org.thoughtcrime.securesms.contacts.avatars.ContactColors; import org.thoughtcrime.securesms.contacts.avatars.ContactPhotoFactory; import org.thoughtcrime.securesms.crypto.MasterSecret; import org.thoughtcrime.securesms.database.DatabaseFactory; import org.thoughtcrime.securesms.database.GroupDatabase; import org.thoughtcrime.securesms.database.GroupDatabase.GroupRecord; import org.thoughtcrime.securesms.database.NotInDirectoryException; import org.thoughtcrime.securesms.database.TextSecureDirectory; import org.thoughtcrime.securesms.database.ThreadDatabase; import org.thoughtcrime.securesms.groups.GroupManager; import org.thoughtcrime.securesms.groups.GroupManager.GroupActionResult; import org.thoughtcrime.securesms.mms.RoundedCorners; import org.thoughtcrime.securesms.recipients.Recipient; import org.thoughtcrime.securesms.recipients.RecipientFactory; import org.thoughtcrime.securesms.recipients.Recipients; import org.thoughtcrime.securesms.util.BitmapUtil; import org.thoughtcrime.securesms.util.DynamicLanguage; import org.thoughtcrime.securesms.util.DynamicTheme; import org.thoughtcrime.securesms.util.GroupUtil; import org.thoughtcrime.securesms.util.task.ProgressDialogAsyncTask; import org.thoughtcrime.securesms.util.SelectedRecipientsAdapter; import org.thoughtcrime.securesms.util.SelectedRecipientsAdapter.OnRecipientDeletedListener; import org.thoughtcrime.securesms.util.TextSecurePreferences; import org.thoughtcrime.securesms.util.Util; import org.thoughtcrime.securesms.util.ViewUtil; import org.whispersystems.libsignal.util.guava.Optional; import org.whispersystems.signalservice.api.util.InvalidNumberException; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Set; /** * Activity to create and update groups * * @author Jake McGinty */ public class GroupCreateActivity extends PassphraseRequiredActionBarActivity implements OnRecipientDeletedListener, RecipientsPanelChangedListener { private final static String TAG = GroupCreateActivity.class.getSimpleName(); public static final String GROUP_RECIPIENT_EXTRA = "group_recipient"; public static final String GROUP_THREAD_EXTRA = "group_thread"; private final DynamicTheme dynamicTheme = new DynamicTheme(); private final DynamicLanguage dynamicLanguage = new DynamicLanguage(); private static final int PICK_CONTACT = 1; public static final int AVATAR_SIZE = 210; private EditText groupName; private ListView lv; private ImageView avatar; private TextView creatingText; private MasterSecret masterSecret; private Bitmap avatarBmp; @NonNull private Optional<GroupData> groupToUpdate = Optional.absent(); @Override protected void onPreCreate() { dynamicTheme.onCreate(this); dynamicLanguage.onCreate(this); } @Override protected void onCreate(Bundle state, @NonNull MasterSecret masterSecret) { this.masterSecret = masterSecret; setContentView(R.layout.group_create_activity); //noinspection ConstantConditions getSupportActionBar().setDisplayHomeAsUpEnabled(true); initializeResources(); initializeExistingGroup(); } @Override public void onResume() { super.onResume(); dynamicTheme.onResume(this); dynamicLanguage.onResume(this); updateViewState(); } private boolean isSignalGroup() { return TextSecurePreferences.isPushRegistered(this) && !getAdapter().hasNonPushMembers(); } private void disableSignalGroupViews(int reasonResId) { View pushDisabled = findViewById(R.id.push_disabled); pushDisabled.setVisibility(View.VISIBLE); ((TextView) findViewById(R.id.push_disabled_reason)).setText(reasonResId); avatar.setEnabled(false); groupName.setEnabled(false); } private void enableSignalGroupViews() { findViewById(R.id.push_disabled).setVisibility(View.GONE); avatar.setEnabled(true); groupName.setEnabled(true); } @SuppressWarnings("ConstantConditions") private void updateViewState() { if (!TextSecurePreferences.isPushRegistered(this)) { disableSignalGroupViews(R.string.GroupCreateActivity_youre_not_registered_for_signal); getSupportActionBar().setTitle(R.string.GroupCreateActivity_actionbar_mms_title); } else if (getAdapter().hasNonPushMembers()) { disableSignalGroupViews(R.string.GroupCreateActivity_contacts_dont_support_push); getSupportActionBar().setTitle(R.string.GroupCreateActivity_actionbar_mms_title); } else { enableSignalGroupViews(); getSupportActionBar().setTitle(groupToUpdate.isPresent() ? R.string.GroupCreateActivity_actionbar_edit_title : R.string.GroupCreateActivity_actionbar_title); } } private static boolean isActiveInDirectory(Context context, Recipient recipient) { try { return TextSecureDirectory.getInstance(context) .isSecureTextSupported(Util.canonicalizeNumber(context, recipient.getNumber())); } catch (NotInDirectoryException | InvalidNumberException e) { return false; } } private void addSelectedContacts(@NonNull Recipient... recipients) { new AddMembersTask(this).execute(recipients); } private void addSelectedContacts(@NonNull Collection<Recipient> recipients) { addSelectedContacts(recipients.toArray(new Recipient[recipients.size()])); } private void initializeResources() { RecipientsEditor recipientsEditor = ViewUtil.findById(this, R.id.recipients_text); PushRecipientsPanel recipientsPanel = ViewUtil.findById(this, R.id.recipients); lv = ViewUtil.findById(this, R.id.selected_contacts_list); avatar = ViewUtil.findById(this, R.id.avatar); groupName = ViewUtil.findById(this, R.id.group_name); creatingText = ViewUtil.findById(this, R.id.creating_group_text); SelectedRecipientsAdapter adapter = new SelectedRecipientsAdapter(this); adapter.setOnRecipientDeletedListener(this); lv.setAdapter(adapter); recipientsEditor.setHint(R.string.recipients_panel__add_members); recipientsPanel.setPanelChangeListener(this); findViewById(R.id.contacts_button).setOnClickListener(new AddRecipientButtonListener()); avatar.setImageDrawable(ContactPhotoFactory.getDefaultGroupPhoto() .asDrawable(this, ContactColors.UNKNOWN_COLOR.toConversationColor(this))); avatar.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { Crop.pickImage(GroupCreateActivity.this); } }); } private void initializeExistingGroup() { final String encodedGroupId = RecipientFactory.getRecipientForId(this, getIntent().getLongExtra(GROUP_RECIPIENT_EXTRA, -1), true) .getNumber(); byte[] groupId; try { groupId = GroupUtil.getDecodedId(encodedGroupId); } catch (IOException ioe) { Log.w(TAG, "Couldn't decode the encoded groupId passed in via intent", ioe); groupId = null; } if (groupId != null) { new FillExistingGroupInfoAsyncTask(this).execute(groupId); } } @Override public boolean onPrepareOptionsMenu(Menu menu) { MenuInflater inflater = this.getMenuInflater(); menu.clear(); inflater.inflate(R.menu.group_create, menu); super.onPrepareOptionsMenu(menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { super.onOptionsItemSelected(item); switch (item.getItemId()) { case android.R.id.home: finish(); return true; case R.id.menu_create_group: if (groupToUpdate.isPresent()) handleGroupUpdate(); else handleGroupCreate(); return true; } return false; } @Override public void onRecipientDeleted(Recipient recipient) { getAdapter().remove(recipient); updateViewState(); } @Override public void onRecipientsPanelUpdate(Recipients recipients) { if (recipients != null) addSelectedContacts(recipients.getRecipientsList()); } private void handleGroupCreate() { if (getAdapter().getCount() < 1) { Log.i(TAG, getString(R.string.GroupCreateActivity_contacts_no_members)); Toast.makeText(getApplicationContext(), R.string.GroupCreateActivity_contacts_no_members, Toast.LENGTH_SHORT).show(); return; } if (isSignalGroup()) { new CreateSignalGroupTask(this, masterSecret, avatarBmp, getGroupName(), getAdapter().getRecipients()).execute(); } else { new CreateMmsGroupTask(this, getAdapter().getRecipients()).execute(); } } private void handleGroupUpdate() { new UpdateSignalGroupTask(this, masterSecret, groupToUpdate.get().id, avatarBmp, getGroupName(), getAdapter().getRecipients()).execute(); } private void handleOpenConversation(long threadId, Recipients recipients) { Intent intent = new Intent(this, ConversationActivity.class); intent.putExtra(ConversationActivity.THREAD_ID_EXTRA, threadId); intent.putExtra(ConversationActivity.DISTRIBUTION_TYPE_EXTRA, ThreadDatabase.DistributionTypes.DEFAULT); intent.putExtra(ConversationActivity.RECIPIENTS_EXTRA, recipients.getIds()); startActivity(intent); finish(); } private SelectedRecipientsAdapter getAdapter() { return (SelectedRecipientsAdapter)lv.getAdapter(); } private @Nullable String getGroupName() { return groupName.getText() != null ? groupName.getText().toString() : null; } @Override public void onActivityResult(int reqCode, int resultCode, final Intent data) { super.onActivityResult(reqCode, resultCode, data); Uri outputFile = Uri.fromFile(new File(getCacheDir(), "cropped")); if (data == null || resultCode != Activity.RESULT_OK) return; switch (reqCode) { case PICK_CONTACT: List<String> selected = data.getStringArrayListExtra("contacts"); for (String contact : selected) { final Recipient recipient = RecipientFactory.getRecipientsFromString(this, contact, false).getPrimaryRecipient(); if (recipient != null) addSelectedContacts(recipient); } break; case Crop.REQUEST_PICK: new Crop(data.getData()).output(outputFile).asSquare().start(this); break; case Crop.REQUEST_CROP: Glide.with(this).load(Crop.getOutput(data)).asBitmap() .skipMemoryCache(true) .diskCacheStrategy(DiskCacheStrategy.NONE) .centerCrop().override(AVATAR_SIZE, AVATAR_SIZE) .into(new SimpleTarget<Bitmap>() { @Override public void onResourceReady(Bitmap resource, GlideAnimation<? super Bitmap> glideAnimation) { setAvatar(Crop.getOutput(data), resource); } }); } } private class AddRecipientButtonListener implements View.OnClickListener { @Override public void onClick(View v) { Intent intent = new Intent(GroupCreateActivity.this, PushContactSelectionActivity.class); if (groupToUpdate.isPresent()) intent.putExtra(ContactSelectionListFragment.DISPLAY_MODE, ContactSelectionListFragment.DISPLAY_MODE_PUSH_ONLY); startActivityForResult(intent, PICK_CONTACT); } } private static class CreateMmsGroupTask extends AsyncTask<Void,Void,Long> { private GroupCreateActivity activity; private Set<Recipient> members; public CreateMmsGroupTask(GroupCreateActivity activity, Set<Recipient> members) { this.activity = activity; this.members = members; } @Override protected Long doInBackground(Void... avoid) { Recipients recipients = RecipientFactory.getRecipientsFor(activity, members, false); return DatabaseFactory.getThreadDatabase(activity) .getThreadIdFor(recipients, ThreadDatabase.DistributionTypes.CONVERSATION); } @Override protected void onPostExecute(Long resultThread) { if (resultThread > -1) { activity.handleOpenConversation(resultThread, RecipientFactory.getRecipientsFor(activity, members, true)); } else { Toast.makeText(activity, R.string.GroupCreateActivity_contacts_mms_exception, Toast.LENGTH_LONG).show(); activity.finish(); } } @Override protected void onProgressUpdate(Void... values) { super.onProgressUpdate(values); } } private abstract static class SignalGroupTask extends AsyncTask<Void,Void,Optional<GroupActionResult>> { protected GroupCreateActivity activity; protected MasterSecret masterSecret; protected Bitmap avatar; protected Set<Recipient> members; protected String name; public SignalGroupTask(GroupCreateActivity activity, MasterSecret masterSecret, Bitmap avatar, String name, Set<Recipient> members) { this.activity = activity; this.masterSecret = masterSecret; this.avatar = avatar; this.name = name; this.members = members; } @Override protected void onPreExecute() { activity.findViewById(R.id.group_details_layout).setVisibility(View.GONE); activity.findViewById(R.id.creating_group_layout).setVisibility(View.VISIBLE); activity.findViewById(R.id.menu_create_group).setVisibility(View.GONE); final int titleResId = activity.groupToUpdate.isPresent() ? R.string.GroupCreateActivity_updating_group : R.string.GroupCreateActivity_creating_group; activity.creatingText.setText(activity.getString(titleResId, activity.getGroupName())); } @Override protected void onPostExecute(Optional<GroupActionResult> groupActionResultOptional) { if (activity.isFinishing()) return; activity.findViewById(R.id.group_details_layout).setVisibility(View.VISIBLE); activity.findViewById(R.id.creating_group_layout).setVisibility(View.GONE); activity.findViewById(R.id.menu_create_group).setVisibility(View.VISIBLE); } } private static class CreateSignalGroupTask extends SignalGroupTask { public CreateSignalGroupTask(GroupCreateActivity activity, MasterSecret masterSecret, Bitmap avatar, String name, Set<Recipient> members) { super(activity, masterSecret, avatar, name, members); } @Override protected Optional<GroupActionResult> doInBackground(Void... aVoid) { try { return Optional.of(GroupManager.createGroup(activity, masterSecret, members, avatar, name)); } catch (InvalidNumberException e) { return Optional.absent(); } } @Override protected void onPostExecute(Optional<GroupActionResult> result) { if (result.isPresent() && result.get().getThreadId() > -1) { if (!activity.isFinishing()) { activity.handleOpenConversation(result.get().getThreadId(), result.get().getGroupRecipient()); } } else { super.onPostExecute(result); Toast.makeText(activity.getApplicationContext(), R.string.GroupCreateActivity_contacts_invalid_number, Toast.LENGTH_LONG).show(); } } } private static class UpdateSignalGroupTask extends SignalGroupTask { private byte[] groupId; public UpdateSignalGroupTask(GroupCreateActivity activity, MasterSecret masterSecret, byte[] groupId, Bitmap avatar, String name, Set<Recipient> members) { super(activity, masterSecret, avatar, name, members); this.groupId = groupId; } @Override protected Optional<GroupActionResult> doInBackground(Void... aVoid) { try { return Optional.of(GroupManager.updateGroup(activity, masterSecret, groupId, members, avatar, name)); } catch (InvalidNumberException e) { return Optional.absent(); } } @Override protected void onPostExecute(Optional<GroupActionResult> result) { if (result.isPresent() && result.get().getThreadId() > -1) { if (!activity.isFinishing()) { Intent intent = activity.getIntent(); intent.putExtra(GROUP_THREAD_EXTRA, result.get().getThreadId()); intent.putExtra(GROUP_RECIPIENT_EXTRA, result.get().getGroupRecipient().getIds()); activity.setResult(RESULT_OK, intent); activity.finish(); } } else { super.onPostExecute(result); Toast.makeText(activity.getApplicationContext(), R.string.GroupCreateActivity_contacts_invalid_number, Toast.LENGTH_LONG).show(); } } } private static class AddMembersTask extends AsyncTask<Recipient,Void,List<AddMembersTask.Result>> { static class Result { Optional<Recipient> recipient; boolean isPush; String reason; public Result(@Nullable Recipient recipient, boolean isPush, @Nullable String reason) { this.recipient = Optional.fromNullable(recipient); this.isPush = isPush; this.reason = reason; } } private GroupCreateActivity activity; private boolean failIfNotPush; public AddMembersTask(@NonNull GroupCreateActivity activity) { this.activity = activity; this.failIfNotPush = activity.groupToUpdate.isPresent(); } @Override protected List<Result> doInBackground(Recipient... recipients) { final List<Result> results = new LinkedList<>(); for (Recipient recipient : recipients) { boolean isPush = isActiveInDirectory(activity, recipient); String recipientE164 = null; try { recipientE164 = Util.canonicalizeNumber(activity, recipient.getNumber()); } catch (InvalidNumberException ine) { /* do nothing */ } if (failIfNotPush && !isPush) { results.add(new Result(null, false, activity.getString(R.string.GroupCreateActivity_cannot_add_non_push_to_existing_group, recipient.getNumber()))); } else if (TextUtils.equals(TextSecurePreferences.getLocalNumber(activity), recipientE164)) { results.add(new Result(null, false, activity.getString(R.string.GroupCreateActivity_youre_already_in_the_group))); } else { results.add(new Result(recipient, isPush, null)); } } return results; } @Override protected void onPostExecute(List<Result> results) { if (activity.isFinishing()) return; for (Result result : results) { if (result.recipient.isPresent()) { activity.getAdapter().add(result.recipient.get(), result.isPush); } else { Toast.makeText(activity, result.reason, Toast.LENGTH_SHORT).show(); } } activity.updateViewState(); } } private static class FillExistingGroupInfoAsyncTask extends ProgressDialogAsyncTask<byte[],Void,Optional<GroupData>> { private GroupCreateActivity activity; public FillExistingGroupInfoAsyncTask(GroupCreateActivity activity) { super(activity, R.string.GroupCreateActivity_loading_group_details, R.string.please_wait); this.activity = activity; } @Override protected Optional<GroupData> doInBackground(byte[]... groupIds) { final GroupDatabase db = DatabaseFactory.getGroupDatabase(activity); final Recipients recipients = db.getGroupMembers(groupIds[0], false); final GroupRecord group = db.getGroup(groupIds[0]); final Set<Recipient> existingContacts = new HashSet<>(recipients.getRecipientsList().size()); existingContacts.addAll(recipients.getRecipientsList()); if (group != null) { return Optional.of(new GroupData(groupIds[0], existingContacts, BitmapUtil.fromByteArray(group.getAvatar()), group.getAvatar(), group.getTitle())); } else { return Optional.absent(); } } @Override protected void onPostExecute(Optional<GroupData> group) { super.onPostExecute(group); if (group.isPresent() && !activity.isFinishing()) { activity.groupToUpdate = group; activity.groupName.setText(group.get().name); if (group.get().avatarBmp != null) { activity.setAvatar(group.get().avatarBytes, group.get().avatarBmp); } SelectedRecipientsAdapter adapter = new SelectedRecipientsAdapter(activity, group.get().recipients); adapter.setOnRecipientDeletedListener(activity); activity.lv.setAdapter(adapter); activity.updateViewState(); } } } private <T> void setAvatar(T model, Bitmap bitmap) { avatarBmp = bitmap; Glide.with(this) .load(model) .skipMemoryCache(true) .diskCacheStrategy(DiskCacheStrategy.NONE) .transform(new RoundedCorners(this, avatar.getWidth() / 2)) .into(avatar); } private static class GroupData { byte[] id; Set<Recipient> recipients; Bitmap avatarBmp; byte[] avatarBytes; String name; public GroupData(byte[] id, Set<Recipient> recipients, Bitmap avatarBmp, byte[] avatarBytes, String name) { this.id = id; this.recipients = recipients; this.avatarBmp = avatarBmp; this.avatarBytes = avatarBytes; this.name = name; } } }
gpl-3.0
mkrajcov/testsuite
src/main/java/org/jboss/hal/testsuite/fragment/NavigationFragment.java
828
package org.jboss.hal.testsuite.fragment; import org.jboss.arquillian.graphene.Graphene; import org.openqa.selenium.By; import org.openqa.selenium.WebElement; /** * Created by jcechace on 15/02/14. */ public class NavigationFragment extends BaseFragment { public <T extends NavigationSectionFragment> T getSectionBy(By selector, Class<T> clazz) { WebElement sectionRoot = root.findElement(selector); T section = Graphene.createPageFragment(clazz, sectionRoot); return section; } public <T extends NavigationSectionFragment> T getSection(String id, Class<T> clazz) { By selector = By.id(id); return getSectionBy(selector, clazz); } public NavigationSectionFragment getSection(String id) { return getSection(id, NavigationSectionFragment.class); } }
lgpl-2.1
beobal/cassandra
src/java/org/apache/cassandra/db/streaming/CassandraCompressedStreamReader.java
5795
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.db.streaming; import java.io.IOException; import com.google.common.base.Throwables; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.db.ColumnFamilyStore; import org.apache.cassandra.io.compress.CompressionMetadata; import org.apache.cassandra.io.sstable.SSTableMultiWriter; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.util.DataInputPlus; import org.apache.cassandra.io.util.TrackedDataInputPlus; import org.apache.cassandra.streaming.ProgressInfo; import org.apache.cassandra.streaming.StreamSession; import org.apache.cassandra.streaming.messages.StreamMessageHeader; import org.apache.cassandra.utils.ChecksumType; import org.apache.cassandra.utils.FBUtilities; import static org.apache.cassandra.utils.Throwables.extractIOExceptionCause; /** * CassandraStreamReader that reads from streamed compressed SSTable */ public class CassandraCompressedStreamReader extends CassandraStreamReader { private static final Logger logger = LoggerFactory.getLogger(CassandraCompressedStreamReader.class); protected final CompressionInfo compressionInfo; public CassandraCompressedStreamReader(StreamMessageHeader header, CassandraStreamHeader streamHeader, StreamSession session) { super(header, streamHeader, session); this.compressionInfo = streamHeader.compressionInfo; } /** * @return SSTable transferred * @throws java.io.IOException if reading the remote sstable fails. Will throw an RTE if local write fails. */ @Override @SuppressWarnings("resource") // input needs to remain open, streams on top of it can't be closed public SSTableMultiWriter read(DataInputPlus inputPlus) throws IOException { long totalSize = totalSize(); ColumnFamilyStore cfs = ColumnFamilyStore.getIfExists(tableId); if (cfs == null) { // schema was dropped during streaming throw new IOException("CF " + tableId + " was dropped during streaming"); } logger.debug("[Stream #{}] Start receiving file #{} from {}, repairedAt = {}, size = {}, ks = '{}', pendingRepair = '{}', table = '{}'.", session.planId(), fileSeqNum, session.peer, repairedAt, totalSize, cfs.keyspace.getName(), pendingRepair, cfs.getTableName()); StreamDeserializer deserializer = null; SSTableMultiWriter writer = null; try (CompressedInputStream cis = new CompressedInputStream(inputPlus, compressionInfo, ChecksumType.CRC32, cfs::getCrcCheckChance)) { TrackedDataInputPlus in = new TrackedDataInputPlus(cis); deserializer = new StreamDeserializer(cfs.metadata(), in, inputVersion, getHeader(cfs.metadata())); writer = createWriter(cfs, totalSize, repairedAt, pendingRepair, format); String filename = writer.getFilename(); int sectionIdx = 0; for (SSTableReader.PartitionPositionBounds section : sections) { assert cis.chunkBytesRead() <= totalSize; long sectionLength = section.upperPosition - section.lowerPosition; logger.trace("[Stream #{}] Reading section {} with length {} from stream.", session.planId(), sectionIdx++, sectionLength); // skip to beginning of section inside chunk cis.position(section.lowerPosition); in.reset(0); while (in.getBytesRead() < sectionLength) { writePartition(deserializer, writer); // when compressed, report total bytes of compressed chunks read since remoteFile.size is the sum of chunks transferred session.progress(filename + '-' + fileSeqNum, ProgressInfo.Direction.IN, cis.chunkBytesRead(), totalSize); } assert in.getBytesRead() == sectionLength; } logger.trace("[Stream #{}] Finished receiving file #{} from {} readBytes = {}, totalSize = {}", session.planId(), fileSeqNum, session.peer, FBUtilities.prettyPrintMemory(cis.chunkBytesRead()), FBUtilities.prettyPrintMemory(totalSize)); return writer; } catch (Throwable e) { Object partitionKey = deserializer != null ? deserializer.partitionKey() : ""; logger.warn("[Stream {}] Error while reading partition {} from stream on ks='{}' and table='{}'.", session.planId(), partitionKey, cfs.keyspace.getName(), cfs.getTableName()); if (writer != null) { writer.abort(e); } if (extractIOExceptionCause(e).isPresent()) throw e; throw Throwables.propagate(e); } } @Override protected long totalSize() { return compressionInfo.getTotalSize(); } }
apache-2.0
wisgood/hive
ql/src/java/org/apache/hadoop/hive/ql/exec/spark/session/SparkSessionManager.java
2479
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.spark.session; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveException; /** * Defines interface for managing multiple SparkSessions in Hive when multiple users * are executing queries simultaneously on Spark execution engine. */ public interface SparkSessionManager { /** * Initialize based on given configuration. * * @param hiveConf */ void setup(HiveConf hiveConf) throws HiveException; /** * Get a valid SparkSession. First try to check if existing session is reusable * based on the given <i>conf</i>. If not release <i>existingSession</i> and return * a new session based on session manager criteria and <i>conf</i>. * * @param existingSession Existing session (can be null) * @param conf * @param doOpen Should the session be opened before returning? * @return SparkSession */ SparkSession getSession(SparkSession existingSession, HiveConf conf, boolean doOpen) throws HiveException; /** * Return the given <i>sparkSession</i> to pool. This is used when the client * still holds references to session and may want to reuse it in future. * When client wants to reuse the session, it should pass the it <i>getSession</i> method. */ void returnSession(SparkSession sparkSession) throws HiveException; /** * Close the given session and return it to pool. This is used when the client * no longer needs a SparkSession. */ void closeSession(SparkSession sparkSession) throws HiveException; /** * Shutdown the session manager. Also closing up SparkSessions in pool. */ void shutdown(); }
apache-2.0
jomarko/kie-wb-common
kie-wb-common-services/kie-wb-common-verifier/kie-wb-common-verifier-service/src/test/java/org/kie/workbench/common/services/verifier/service/VerifierWebWorkerRegistryTest.java
2692
/* * Copyright 2018 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.workbench.common.services.verifier.service; import java.util.ArrayList; import javax.enterprise.inject.Instance; import org.junit.Test; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; public class VerifierWebWorkerRegistryTest { @Test public void noWebWorkerProviders() throws Exception { final Instance verifierWebWorkerProviders = mock(Instance.class); doReturn(new ArrayList<>().iterator()).when(verifierWebWorkerProviders).iterator(); assertFalse(new VerifierWebWorkerRegistry(verifierWebWorkerProviders).get("something").isPresent()); } @Test public void wrongId() throws Exception { final Instance verifierWebWorkerProviders = mock(Instance.class); final ArrayList<Object> webWorkerProviders = new ArrayList<>(); webWorkerProviders.add(getWebWorkerProvider("somethingElse")); doReturn(webWorkerProviders.iterator()).when(verifierWebWorkerProviders).iterator(); assertFalse(new VerifierWebWorkerRegistry(verifierWebWorkerProviders).get("something").isPresent()); } @Test public void correctId() throws Exception { final Instance verifierWebWorkerProviders = mock(Instance.class); final ArrayList<Object> webWorkerProviders = new ArrayList<>(); webWorkerProviders.add(getWebWorkerProvider("something")); doReturn(webWorkerProviders.iterator()).when(verifierWebWorkerProviders).iterator(); assertTrue(new VerifierWebWorkerRegistry(verifierWebWorkerProviders).get("something").isPresent()); } private VerifierWebWorkerProvider getWebWorkerProvider(final String id) { return new VerifierWebWorkerProvider() { @Override public String getId() { return id; } @Override public String getWebWorker(String fileName) throws Exception { return ""; } }; } }
apache-2.0
vineetgarg02/hive
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/model/MWMPool.java
2459
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.model; import java.util.Set; public class MWMPool { private MWMResourcePlan resourcePlan; private String path; private Double allocFraction; private Integer queryParallelism; private String schedulingPolicy; private Set<MWMTrigger> triggers; public MWMPool() {} public MWMPool(MWMResourcePlan resourcePlan, String path, Double allocFraction, Integer queryParallelism, String schedulingPolicy) { this.resourcePlan = resourcePlan; this.path = path; this.allocFraction = allocFraction; this.queryParallelism = queryParallelism; this.schedulingPolicy = schedulingPolicy; } public MWMResourcePlan getResourcePlan() { return resourcePlan; } public void setResourcePlan(MWMResourcePlan resourcePlan) { this.resourcePlan = resourcePlan; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public Double getAllocFraction() { return allocFraction; } public void setAllocFraction(Double allocFraction) { this.allocFraction = allocFraction; } public Integer getQueryParallelism() { return queryParallelism; } public void setQueryParallelism(Integer queryParallelism) { this.queryParallelism = queryParallelism; } public String getSchedulingPolicy() { return schedulingPolicy; } public void setSchedulingPolicy(String schedulingPolicy) { this.schedulingPolicy = schedulingPolicy; } public Set<MWMTrigger> getTriggers() { return triggers; } public void setTriggers(Set<MWMTrigger> triggers) { this.triggers = triggers; } }
apache-2.0
goodwinnk/intellij-community
plugins/javaFX/src/org/jetbrains/plugins/javaFX/packaging/JavaFxApplicationIconsDialog.java
4187
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.plugins.javaFX.packaging; import com.intellij.openapi.fileChooser.FileChooserDescriptorFactory; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ProjectFileIndex; import com.intellij.openapi.roots.ProjectRootManager; import com.intellij.openapi.ui.DialogWrapper; import com.intellij.openapi.ui.Messages; import com.intellij.openapi.ui.TextFieldWithBrowseButton; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.VfsUtil; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import java.io.File; public class JavaFxApplicationIconsDialog extends DialogWrapper { private Panel myPanel; private final Project myProject; public JavaFxApplicationIconsDialog(JComponent parent, JavaFxApplicationIcons icons, Project project) { super(parent, true); myProject = project; setTitle("Choose Application Icons"); init(); if (icons != null) { JavaFxArtifactPropertiesEditor.setSystemDependentPath(myPanel.myLinuxIconPath, icons.getLinuxIcon()); JavaFxArtifactPropertiesEditor.setSystemDependentPath(myPanel.myMacIconPath, icons.getMacIcon()); JavaFxArtifactPropertiesEditor.setSystemDependentPath(myPanel.myWindowsIconPath, icons.getWindowsIcon()); } addBrowseListener(myPanel.myLinuxIconPath, "png", project); addBrowseListener(myPanel.myMacIconPath, "icns", project); addBrowseListener(myPanel.myWindowsIconPath, "ico", project); } @Nullable @Override protected JComponent createCenterPanel() { myPanel = new Panel(); return myPanel.myWholePanel; } @Override protected void doOKAction() { final ProjectFileIndex index = ProjectRootManager.getInstance(myProject).getFileIndex(); if (!isValidPath(myPanel.myLinuxIconPath, index, "Linux")) return; if (!isValidPath(myPanel.myMacIconPath, index, "Mac")) return; if (!isValidPath(myPanel.myWindowsIconPath, index, "Windows")) return; super.doOKAction(); } @NotNull public JavaFxApplicationIcons getIcons() { JavaFxApplicationIcons icons = new JavaFxApplicationIcons(); icons.setLinuxIcon(JavaFxArtifactPropertiesEditor.getSystemIndependentPath(myPanel.myLinuxIconPath)); icons.setMacIcon(JavaFxArtifactPropertiesEditor.getSystemIndependentPath(myPanel.myMacIconPath)); icons.setWindowsIcon(JavaFxArtifactPropertiesEditor.getSystemIndependentPath(myPanel.myWindowsIconPath)); return icons; } private boolean isValidPath(TextFieldWithBrowseButton withBrowseButton, ProjectFileIndex index, String osName) { final String text = withBrowseButton.getText(); if (StringUtil.isEmptyOrSpaces(text)) return true; final VirtualFile virtualFile = VfsUtil.findFileByIoFile(new File(text.trim()), false); if (virtualFile == null || !virtualFile.exists() || virtualFile.isDirectory()) { Messages.showErrorDialog(myPanel.myWholePanel, osName + " icon file should exist"); return false; } if (!index.isInContent(virtualFile)) { Messages.showErrorDialog(myPanel.myWholePanel, osName + " icon file should be inside the project content"); return false; } return true; } private static void addBrowseListener(TextFieldWithBrowseButton withBrowseButton, String extension, Project project) { withBrowseButton.addBrowseFolderListener("Choose Icon File", "Select icon file (*." + extension + ") for the resulting application", project, FileChooserDescriptorFactory.createSingleFileNoJarsDescriptor() .withFileFilter(file -> extension.equalsIgnoreCase(file.getExtension()))); } protected static class Panel { JPanel myWholePanel; private TextFieldWithBrowseButton myLinuxIconPath; private TextFieldWithBrowseButton myMacIconPath; private TextFieldWithBrowseButton myWindowsIconPath; } }
apache-2.0
dabaitu/presto
presto-hive/src/main/java/com/facebook/presto/hive/ColumnarBinaryHiveRecordCursorProvider.java
2913
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.RecordCursor; import com.facebook.presto.spi.predicate.TupleDomain; import com.facebook.presto.spi.type.TypeManager; import com.google.inject.Inject; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.serde2.columnar.BytesRefArrayWritable; import org.apache.hadoop.hive.serde2.columnar.LazyBinaryColumnarSerDe; import org.apache.hadoop.mapred.RecordReader; import org.joda.time.DateTimeZone; import java.util.List; import java.util.Optional; import java.util.Properties; import static com.facebook.presto.hive.HiveUtil.isDeserializerClass; import static java.util.Objects.requireNonNull; public class ColumnarBinaryHiveRecordCursorProvider implements HiveRecordCursorProvider { private final HdfsEnvironment hdfsEnvironment; @Inject public ColumnarBinaryHiveRecordCursorProvider(HdfsEnvironment hdfsEnvironment) { this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null"); } @Override public Optional<RecordCursor> createRecordCursor( String clientId, Configuration configuration, ConnectorSession session, Path path, long start, long length, Properties schema, List<HiveColumnHandle> columns, TupleDomain<HiveColumnHandle> effectivePredicate, DateTimeZone hiveStorageTimeZone, TypeManager typeManager) { if (!isDeserializerClass(schema, LazyBinaryColumnarSerDe.class)) { return Optional.empty(); } RecordReader<?, ?> recordReader = hdfsEnvironment.doAs(session.getUser(), () -> HiveUtil.createRecordReader(configuration, path, start, length, schema, columns)); return Optional.of(new ColumnarBinaryHiveRecordCursor<>( bytesRecordReader(recordReader), length, schema, columns, typeManager)); } @SuppressWarnings("unchecked") private static RecordReader<?, BytesRefArrayWritable> bytesRecordReader(RecordReader<?, ?> recordReader) { return (RecordReader<?, BytesRefArrayWritable>) recordReader; } }
apache-2.0
HJ-StevenSun/Weex-TestDemo
weex-dev/android/playground/app/src/androidTest/java/com/alibaba/weex/uitest/TC_AG/AG_Border_Input_Border_Left_Color.java
986
package com.alibaba.weex.uitest.TC_AG; import com.alibaba.weex.WXPageActivity; import com.alibaba.weex.util.TestFlow; import java.util.HashMap; import org.junit.Before; import org.junit.Test; public class AG_Border_Input_Border_Left_Color extends TestFlow { public AG_Border_Input_Border_Left_Color() { super(WXPageActivity.class); } @Before public void setUp() throws InterruptedException { super.setUp(); HashMap testMap = new <String, Object> HashMap(); testMap.put("testComponet", "AG_Border"); testMap.put("testChildCaseInit", "AG_Border_Input_Border_Left_Color"); testMap.put("step1",new HashMap(){ { put("click", "#FF0000"); put("screenshot", "AG_Border_Input_Border_Left_Color_01_#FF0000"); } }); testMap.put("step2",new HashMap(){ { put("click", "#00FFFF"); put("screenshot", "AG_Border_Input_Border_Left_Color_02_#00FFFF"); } }); super.setTestMap(testMap); } @Test public void doTest(){ super.testByTestMap(); } }
apache-2.0
JasonHZXie/dubbo
dubbo-compatible/src/main/java/com/alibaba/dubbo/common/serialize/ObjectInput.java
947
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.alibaba.dubbo.common.serialize; @Deprecated public interface ObjectInput extends org.apache.dubbo.common.serialize.ObjectInput { }
apache-2.0
vineetgarg02/hive
ql/src/java/org/apache/hadoop/hive/ql/optimizer/calcite/rules/HiveExceptRewriteRule.java
16536
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.optimizer.calcite.rules; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashSet; import java.util.List; import java.util.Set; import org.apache.calcite.plan.RelOptCluster; import org.apache.calcite.plan.RelOptRule; import org.apache.calcite.plan.RelOptRuleCall; import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.core.AggregateCall; import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rel.type.RelDataTypeField; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexInputRef; import org.apache.calcite.rex.RexLiteral; import org.apache.calcite.rex.RexNode; import org.apache.calcite.util.ImmutableBitSet; import org.apache.hadoop.hive.ql.optimizer.calcite.CalciteSemanticException; import org.apache.hadoop.hive.ql.optimizer.calcite.HiveCalciteUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.TraitsUtil; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveAggregate; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExcept; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveFilter; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveProject; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveRelNode; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveTableFunctionScan; import org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveUnion; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.SqlFunctionConverter; import org.apache.hadoop.hive.ql.optimizer.calcite.translator.TypeConverter; import org.apache.hadoop.hive.ql.parse.SemanticException; import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Function; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.Lists; /** * Planner rule that rewrite * {@link org.apache.hadoop.hive.ql.optimizer.calcite.reloperators.HiveExcept} * Note, we only have 2 branches because of except's semantic. * R1 Except(all) R2 * R1 introduce VCol ‘2’, R2 introduce VCol ‘1’ * R3 = GB(R1 on all keys + VCol + count(VCol) as c) union all GB(R2 on all keys + VCol + count(VCol) as c) * R4 = GB(R3 on all keys + sum(c) as a + sum(VCol*c) as b) we * have m+n=a, 2m+n=b where m is the #row in R1 and n is the #row in R2 then * m=b-a, n=2a-b, m-n=2b-3a * if it is except (distinct) * then R5 = Fil (b-a&gt;0 &amp;&amp; 2a-b=0) R6 = select only keys from R5 * else R5 = Fil (2b-3a&gt; 0) R6 = UDTF (R5) which will explode the tuples based on 2b-3a. * Note that NULLs are handled the same as other values. Please refer to the test cases. */ public class HiveExceptRewriteRule extends RelOptRule { public static final HiveExceptRewriteRule INSTANCE = new HiveExceptRewriteRule(); protected static final Logger LOG = LoggerFactory.getLogger(HiveIntersectRewriteRule.class); // ~ Constructors ----------------------------------------------------------- private HiveExceptRewriteRule() { super(operand(HiveExcept.class, any())); } // ~ Methods ---------------------------------------------------------------- public void onMatch(RelOptRuleCall call) { final HiveExcept hiveExcept = call.rel(0); final RelOptCluster cluster = hiveExcept.getCluster(); final RexBuilder rexBuilder = cluster.getRexBuilder(); Builder<RelNode> bldr = new ImmutableList.Builder<RelNode>(); // 1st level GB: create a GB(R1 on all keys + VCol + count() as c) for each // branch try { bldr.add(createFirstGB(hiveExcept.getInputs().get(0), true, cluster, rexBuilder)); bldr.add(createFirstGB(hiveExcept.getInputs().get(1), false, cluster, rexBuilder)); } catch (CalciteSemanticException e) { LOG.debug(e.toString()); throw new RuntimeException(e); } // create a union above all the branches // the schema of union looks like this // all keys + VCol + c HiveRelNode union = new HiveUnion(cluster, TraitsUtil.getDefaultTraitSet(cluster), bldr.build()); // 2nd level GB: create a GB (all keys + sum(c) as a + sum(VCol*c) as b) for // each branch final List<RexNode> gbChildProjLst = Lists.newArrayList(); final List<Integer> groupSetPositions = Lists.newArrayList(); int unionColumnSize = union.getRowType().getFieldList().size(); for (int cInd = 0; cInd < unionColumnSize; cInd++) { gbChildProjLst.add(rexBuilder.makeInputRef(union, cInd)); // the last 2 columns are VCol and c if (cInd < unionColumnSize - 2) { groupSetPositions.add(cInd); } } try { gbChildProjLst.add(multiply(rexBuilder.makeInputRef(union, unionColumnSize - 2), rexBuilder.makeInputRef(union, unionColumnSize - 1), cluster, rexBuilder)); } catch (CalciteSemanticException e) { LOG.debug(e.toString()); throw new RuntimeException(e); } RelNode gbInputRel = null; try { // Here we create a project for the following reasons: // (1) GBy only accepts arg as a position of the input, however, we need to sum on VCol*c // (2) This can better reuse the function createSingleArgAggCall. gbInputRel = HiveProject.create(union, gbChildProjLst, null); } catch (CalciteSemanticException e) { LOG.debug(e.toString()); throw new RuntimeException(e); } // gbInputRel's schema is like this // all keys + VCol + c + VCol*c List<AggregateCall> aggregateCalls = Lists.newArrayList(); RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()); // sum(c) AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize - 1, aggFnRetType); aggregateCalls.add(aggregateCall); // sum(VCol*c) aggregateCall = HiveCalciteUtil.createSingleArgAggCall("sum", cluster, TypeInfoFactory.longTypeInfo, unionColumnSize, aggFnRetType); aggregateCalls.add(aggregateCall); final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions); HiveRelNode aggregateRel = new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, groupSet, null, aggregateCalls); // the schema after GB is like this // all keys + sum(c) as a + sum(VCol*c) as b // the column size is the same as unionColumnSize; // (1) for except distinct add a filter (b-a>0 && 2a-b=0) // i.e., a > 0 && 2a = b // then add the project // (2) for except all add a project to change it to // (2b-3a) + all keys // then add the UDTF if (!hiveExcept.all) { RelNode filterRel = null; try { filterRel = new HiveFilter(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), aggregateRel, makeFilterExprForExceptDistinct(aggregateRel, unionColumnSize, cluster, rexBuilder)); } catch (CalciteSemanticException e) { LOG.debug(e.toString()); throw new RuntimeException(e); } // finally add a project to project out the last 2 columns Set<Integer> projectOutColumnPositions = new HashSet<>(); projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 2); projectOutColumnPositions.add(filterRel.getRowType().getFieldList().size() - 1); try { call.transformTo(HiveCalciteUtil.createProjectWithoutColumn(filterRel, projectOutColumnPositions)); } catch (CalciteSemanticException e) { LOG.debug(e.toString()); throw new RuntimeException(e); } } else { List<RexNode> originalInputRefs = Lists.transform(aggregateRel.getRowType().getFieldList(), new Function<RelDataTypeField, RexNode>() { @Override public RexNode apply(RelDataTypeField input) { return new RexInputRef(input.getIndex(), input.getType()); } }); List<RexNode> copyInputRefs = new ArrayList<>(); try { copyInputRefs.add(makeExprForExceptAll(aggregateRel, unionColumnSize, cluster, rexBuilder)); } catch (CalciteSemanticException e) { LOG.debug(e.toString()); throw new RuntimeException(e); } for (int i = 0; i < originalInputRefs.size() - 2; i++) { copyInputRefs.add(originalInputRefs.get(i)); } RelNode srcRel = null; try { srcRel = HiveProject.create(aggregateRel, copyInputRefs, null); HiveTableFunctionScan udtf = HiveCalciteUtil.createUDTFForSetOp(cluster, srcRel); // finally add a project to project out the 1st columns Set<Integer> projectOutColumnPositions = new HashSet<>(); projectOutColumnPositions.add(0); call.transformTo(HiveCalciteUtil .createProjectWithoutColumn(udtf, projectOutColumnPositions)); } catch (SemanticException e) { LOG.debug(e.toString()); throw new RuntimeException(e); } } } private RelNode createFirstGB(RelNode input, boolean left, RelOptCluster cluster, RexBuilder rexBuilder) throws CalciteSemanticException { final List<RexNode> gbChildProjLst = Lists.newArrayList(); final List<Integer> groupSetPositions = Lists.newArrayList(); for (int cInd = 0; cInd < input.getRowType().getFieldList().size(); cInd++) { gbChildProjLst.add(rexBuilder.makeInputRef(input, cInd)); groupSetPositions.add(cInd); } if (left) { gbChildProjLst.add(rexBuilder.makeBigintLiteral(new BigDecimal(2))); } else { gbChildProjLst.add(rexBuilder.makeBigintLiteral(new BigDecimal(1))); } // also add the last VCol groupSetPositions.add(input.getRowType().getFieldList().size()); // create the project before GB RelNode gbInputRel = HiveProject.create(input, gbChildProjLst, null); // groupSetPosition includes all the positions final ImmutableBitSet groupSet = ImmutableBitSet.of(groupSetPositions); List<AggregateCall> aggregateCalls = Lists.newArrayList(); RelDataType aggFnRetType = TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()); AggregateCall aggregateCall = HiveCalciteUtil.createSingleArgAggCall("count", cluster, TypeInfoFactory.longTypeInfo, input.getRowType().getFieldList().size(), aggFnRetType); aggregateCalls.add(aggregateCall); return new HiveAggregate(cluster, cluster.traitSetOf(HiveRelNode.CONVENTION), gbInputRel, groupSet, null, aggregateCalls); } private RexNode multiply(RexNode r1, RexNode r2, RelOptCluster cluster, RexBuilder rexBuilder) throws CalciteSemanticException { List<RexNode> childRexNodeLst = new ArrayList<RexNode>(); childRexNodeLst.add(r1); childRexNodeLst.add(r2); ImmutableList.Builder<RelDataType> calciteArgTypesBldr = new ImmutableList.Builder<RelDataType>(); calciteArgTypesBldr.add(TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory())); calciteArgTypesBldr.add(TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory())); return rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn("*", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), true, false), childRexNodeLst); } private RexNode makeFilterExprForExceptDistinct(HiveRelNode input, int columnSize, RelOptCluster cluster, RexBuilder rexBuilder) throws CalciteSemanticException { List<RexNode> childRexNodeLst = new ArrayList<RexNode>(); RexInputRef a = rexBuilder.makeInputRef(input, columnSize - 2); RexLiteral zero = rexBuilder.makeBigintLiteral(new BigDecimal(0)); childRexNodeLst.add(a); childRexNodeLst.add(zero); ImmutableList.Builder<RelDataType> calciteArgTypesBldr = new ImmutableList.Builder<RelDataType>(); calciteArgTypesBldr.add(TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory())); calciteArgTypesBldr.add(TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory())); // a>0 RexNode aMorethanZero = rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn(">", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), false, false), childRexNodeLst); childRexNodeLst = new ArrayList<RexNode>(); RexLiteral two = rexBuilder.makeBigintLiteral(new BigDecimal(2)); childRexNodeLst.add(a); childRexNodeLst.add(two); // 2*a RexNode twoa = rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn("*", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), false, false), childRexNodeLst); childRexNodeLst = new ArrayList<RexNode>(); RexInputRef b = rexBuilder.makeInputRef(input, columnSize - 1); childRexNodeLst.add(twoa); childRexNodeLst.add(b); // 2a=b RexNode twoaEqualTob = rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn("=", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), false, false), childRexNodeLst); childRexNodeLst = new ArrayList<RexNode>(); childRexNodeLst.add(aMorethanZero); childRexNodeLst.add(twoaEqualTob); // a>0 && 2a=b return rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn("and", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), false, false), childRexNodeLst); } private RexNode makeExprForExceptAll(HiveRelNode input, int columnSize, RelOptCluster cluster, RexBuilder rexBuilder) throws CalciteSemanticException { List<RexNode> childRexNodeLst = new ArrayList<RexNode>(); ImmutableList.Builder<RelDataType> calciteArgTypesBldr = new ImmutableList.Builder<RelDataType>(); calciteArgTypesBldr.add(TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory())); calciteArgTypesBldr.add(TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory())); RexInputRef a = rexBuilder.makeInputRef(input, columnSize - 2); RexLiteral three = rexBuilder.makeBigintLiteral(new BigDecimal(3)); childRexNodeLst.add(three); childRexNodeLst.add(a); RexNode threea = rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn("*", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), false, false), childRexNodeLst); RexLiteral two = rexBuilder.makeBigintLiteral(new BigDecimal(2)); RexInputRef b = rexBuilder.makeInputRef(input, columnSize - 1); // 2*b childRexNodeLst = new ArrayList<RexNode>(); childRexNodeLst.add(two); childRexNodeLst.add(b); RexNode twob = rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn("*", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), false, false), childRexNodeLst); // 2b-3a childRexNodeLst = new ArrayList<RexNode>(); childRexNodeLst.add(twob); childRexNodeLst.add(threea); return rexBuilder.makeCall( SqlFunctionConverter.getCalciteFn("-", calciteArgTypesBldr.build(), TypeConverter.convert(TypeInfoFactory.longTypeInfo, cluster.getTypeFactory()), false, false), childRexNodeLst); } }
apache-2.0
smgoller/geode
geode-core/src/main/java/org/apache/geode/internal/cache/backup/BackupWriterFactory.java
2459
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.internal.cache.backup; import static org.apache.geode.internal.cache.backup.AbstractBackupWriterConfig.TIMESTAMP; import java.io.File; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Properties; enum BackupWriterFactory { FILE_SYSTEM("FileSystem") { @Override BackupWriter createWriter(Properties properties, String memberId) { // Remove chars that are illegal in Windows paths memberId = memberId.replaceAll("[:()]", "-"); FileSystemBackupWriterConfig config = new FileSystemBackupWriterConfig(properties); Path targetDir = Paths.get(config.getTargetDirectory()) .resolve(properties.getProperty(TIMESTAMP)).resolve(memberId); String baselineDir = config.getBaselineDirectory(); FileSystemIncrementalBackupLocation incrementalBackupLocation = null; if (baselineDir != null) { File baseline = new File(baselineDir).getAbsoluteFile(); incrementalBackupLocation = new FileSystemIncrementalBackupLocation(baseline, memberId); } return new FileSystemBackupWriter(targetDir, incrementalBackupLocation); } }; private final String type; BackupWriterFactory(String type) { this.type = type; } String getType() { return type; } static BackupWriterFactory getFactoryForType(String type) { for (BackupWriterFactory factory : BackupWriterFactory.values()) { if (factory.type.equals(type)) { return factory; } } throw new IllegalArgumentException("No factory exists for type '" + type + "'"); } abstract BackupWriter createWriter(Properties properties, String memberId); }
apache-2.0
shaotuanchen/sunflower_exp
tools/source/gcc-4.2.4/libjava/classpath/javax/print/attribute/standard/PrinterResolution.java
3768
/* PrinterResolution.java -- Copyright (C) 2004, 2005 Free Software Foundation, Inc. This file is part of GNU Classpath. GNU Classpath is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2, or (at your option) any later version. GNU Classpath is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GNU Classpath; see the file COPYING. If not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Linking this library statically or dynamically with other modules is making a combined work based on this library. Thus, the terms and conditions of the GNU General Public License cover the whole combination. As a special exception, the copyright holders of this library give you permission to link this library with independent modules to produce an executable, regardless of the license terms of these independent modules, and to copy and distribute the resulting executable under terms of your choice, provided that you also meet, for each linked independent module, the terms and conditions of the license of that module. An independent module is a module which is not derived from or based on this library. If you modify this library, you may extend this exception to your version of the library, but you are not obligated to do so. If you do not wish to do so, delete this exception statement from your version. */ package javax.print.attribute.standard; import javax.print.attribute.DocAttribute; import javax.print.attribute.PrintJobAttribute; import javax.print.attribute.PrintRequestAttribute; import javax.print.attribute.ResolutionSyntax; /** * The <code>PrinterResolution</code> printing attribute specifies a * resolution supported by a print service or to be used by a print job. * <p> * <b>IPP Compatibility:</b> PrinterResolution is an IPP 1.1 attribute. * </p> * * @author Michael Koch (konqueror@gmx.de) * @author Wolfgang Baer (WBaer@gmx.de) */ public final class PrinterResolution extends ResolutionSyntax implements DocAttribute, PrintJobAttribute, PrintRequestAttribute { private static final long serialVersionUID = 13090306561090558L; /** * Creates a <code>PrinterResolution</code> object with the given cross * feed and feed resolutions. * * @param crossFeedResolution the cross feed resolution * @param feedResolution the feed resolution * @param units the unit to use (e.g. {@link #DPCM} or {@link #DPI}) * * @exception IllegalArgumentException if either parameter is &lt; 1 */ public PrinterResolution(int crossFeedResolution, int feedResolution, int units) { super(crossFeedResolution, feedResolution, units); } /** * Tests if the given object is equal to this object. * * @param obj the object to test * * @return <code>true</code> if both objects are equal, * <code>false</code> otherwise. */ public boolean equals(Object obj) { if(! (obj instanceof PrinterResolution)) return false; return super.equals(obj); } /** * Returns category of this class. * * @return The class <code>PrinterResolution</code> itself. */ public Class getCategory() { return PrinterResolution.class; } /** * Returns the name of this attribute. * * @return The name "printer-resolution". */ public String getName() { return "printer-resolution"; } }
bsd-3-clause
ngraczewski/modules
sms/src/main/java/org/motechproject/sms/templates/Templates.java
1789
package org.motechproject.sms.templates; import org.motechproject.server.config.SettingsFacade; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Helper template collection. */ public class Templates { /** * Templates as map, where keys are their names. */ private Map<String, Template> templates = new HashMap<>(); /** * Constructs this collection from the provided templates. The templates will * read their default values from the MOTECH configuration system through the provided settings facade. * @param settingFacade the settings facade from which default values will be read * @param templates the collection of templates from which this object will be built */ public Templates(SettingsFacade settingFacade, List<Template> templates) { for (Template template : templates) { template.readDefaults(settingFacade); this.templates.put(template.getName(), template); } } /** * Returns the template with the given name. * @param name the name of the template * @return the matching template or null if no such template exists */ public Template getTemplate(String name) { return templates.get(name); } /** * Returns this collection in a form suitable for the UI. * @return a map where the keys are template names and values are the simplified forms of templates * @see TemplateForWeb */ public Map<String, TemplateForWeb> templatesForWeb() { Map<String, TemplateForWeb> ret = new HashMap<>(); for (Map.Entry<String, Template> entry : templates.entrySet()) { ret.put(entry.getKey(), new TemplateForWeb(entry.getValue())); } return ret; } }
bsd-3-clause
scheib/chromium
remoting/android/java/src/org/chromium/chromoting/SimulatedTouchInputStrategy.java
7754
// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.chromoting; import android.content.Context; import android.graphics.PointF; import android.os.SystemClock; import android.view.MotionEvent; import android.view.ViewConfiguration; /** * This class receives local touch events and translates them into the appropriate mouse based * events for the remote host. The net result is that the local input method feels like a touch * interface but the remote host will be given mouse events to inject. */ public class SimulatedTouchInputStrategy implements InputStrategyInterface { /** Used to adjust the size of the region used for double tap detection. */ private static final float DOUBLE_TAP_SLOP_SCALE_FACTOR = 0.25f; private final RenderData mRenderData; private final InputEventSender mInjector; /** * Stores the time of the most recent left button single tap processed. */ private long mLastTapTimeInMs; /** * Stores the position of the last left button single tap processed. */ private PointF mLastTapPoint; /** * The maximum distance, in pixels, between two points in order for them to be considered a * double tap gesture. */ private final int mDoubleTapSlopSquareInPx; /** * The interval, measured in milliseconds, in which two consecutive left button taps must * occur in order to be considered a double tap gesture. */ private final long mDoubleTapDurationInMs; /** Mouse-button currently held down, or BUTTON_UNDEFINED otherwise. */ private int mHeldButton = InputStub.BUTTON_UNDEFINED; public SimulatedTouchInputStrategy( RenderData renderData, InputEventSender injector, Context context) { Preconditions.notNull(injector); mRenderData = renderData; mInjector = injector; mDoubleTapDurationInMs = ViewConfiguration.getDoubleTapTimeout(); // In order to detect whether the user is attempting to double tap a target, we define a // region around the first point within which the second tap must occur. The standard way // to do this in an Android UI (meaning a UI comprised of UI elements which conform to the // visual guidelines for the platform which are 'Touch Friendly') is to use the // getScaledDoubleTapSlop() value for checking this distance (or use a GestureDetector). // Our scenario is a bit different as our UI consists of an image of a remote machine where // the UI elements were probably designed for mouse and keyboard (meaning smaller targets) // and the image itself which can be zoomed to change the size of the targets. Ths adds up // to the target to be invoked often being either larger or much smaller than a standard // Android UI element. Our approach to this problem is to make double-tap detection // consistent regardless of the zoom level or remote target size so that the user can rely // on their muscle memory when interacting with our UI. With respect to the original // problem, getScaledDoubleTapSlop() gives a value which is optimized for an Android based // UI however this value is too large for interacting with remote elements in our app. // Our solution is to use the original value from getScaledDoubleTapSlop() (which includes // scaling to account for display differences between devices) and apply a fudge/scale // factor to make the interaction more intuitive and useful for our scenario. ViewConfiguration config = ViewConfiguration.get(context); int scaledDoubleTapSlopInPx = config.getScaledDoubleTapSlop(); scaledDoubleTapSlopInPx = (int) (scaledDoubleTapSlopInPx * DOUBLE_TAP_SLOP_SCALE_FACTOR); mDoubleTapSlopSquareInPx = scaledDoubleTapSlopInPx * scaledDoubleTapSlopInPx; mRenderData.drawCursor = false; } @Override public boolean onTap(int button) { PointF currentTapPoint = getCursorPosition(); if (button == InputStub.BUTTON_LEFT) { // Left clicks are handled a little differently than the events for other buttons. // This is needed because translating touch events to mouse events has a problem with // location consistency for double clicks. If you take the center location of each tap // and inject them as mouse clicks, the distance between those two points will often // cause the remote OS to recognize the gesture as two distinct clicks instead of a // double click. In order to increase the success rate of double taps/clicks, we // squirrel away the time and coordinates of each single tap and if we detect the user // attempting a double tap, we use the original event's location for that second tap. long tapInterval = SystemClock.uptimeMillis() - mLastTapTimeInMs; if (isDoubleTap(currentTapPoint.x, currentTapPoint.y, tapInterval)) { currentTapPoint = new PointF(mLastTapPoint.x, mLastTapPoint.y); mLastTapPoint = null; mLastTapTimeInMs = 0; } else { mLastTapPoint = currentTapPoint; mLastTapTimeInMs = SystemClock.uptimeMillis(); } } else { mLastTapPoint = null; mLastTapTimeInMs = 0; } mInjector.sendMouseClick(currentTapPoint, button); return true; } @Override public boolean onPressAndHold(int button) { mInjector.sendMouseDown(getCursorPosition(), button); mHeldButton = button; return true; } @Override public void onScroll(float distanceX, float distanceY) { mInjector.sendReverseMouseWheelEvent(distanceX, distanceY); } @Override public void onMotionEvent(MotionEvent event) { if (event.getActionMasked() == MotionEvent.ACTION_UP && mHeldButton != InputStub.BUTTON_UNDEFINED) { mInjector.sendMouseUp(getCursorPosition(), mHeldButton); mHeldButton = InputStub.BUTTON_UNDEFINED; } } @Override public void injectCursorMoveEvent(int x, int y) { mInjector.sendCursorMove(x, y); } @Override public @RenderStub.InputFeedbackType int getShortPressFeedbackType() { return RenderStub.InputFeedbackType.SHORT_TOUCH_ANIMATION; } @Override public @RenderStub.InputFeedbackType int getLongPressFeedbackType() { return RenderStub.InputFeedbackType.LONG_TOUCH_ANIMATION; } @Override public boolean isIndirectInputMode() { return false; } private PointF getCursorPosition() { return mRenderData.getCursorPosition(); } private boolean isDoubleTap(float currentX, float currentY, long tapInterval) { if (tapInterval > mDoubleTapDurationInMs || mLastTapPoint == null) { return false; } // Convert the image based coordinates back to screen coordinates so the user experiences // consistent double tap behavior regardless of zoom level. // float[] currentValues = {currentX, currentY}; float[] previousValues = {mLastTapPoint.x, mLastTapPoint.y}; mRenderData.transform.mapPoints(currentValues); mRenderData.transform.mapPoints(previousValues); int deltaX = (int) (currentValues[0] - previousValues[0]); int deltaY = (int) (currentValues[1] - previousValues[1]); return ((deltaX * deltaX + deltaY * deltaY) <= mDoubleTapSlopSquareInPx); } }
bsd-3-clause
futuristixa/aima-java
aimax-osm/src/main/java/aimax/osm/data/impl/DefaultWayRef.java
653
package aimax.osm.data.impl; import aimax.osm.data.entities.MapWay; import aimax.osm.data.entities.WayRef; /** * Represents a reference to a way. The node index indicates, * where the node maintaining the reference occurs in the way definition. * @author Ruediger Lunde */ public class DefaultWayRef implements WayRef { private MapWay way; private short nodeIdx; public DefaultWayRef(MapWay way, short nodeIdx) { this.way = way; this.nodeIdx = nodeIdx; } /** {@inheritDoc} */ @Override public MapWay getWay() { return way; } /** {@inheritDoc} */ @Override public short getNodeIdx() { return nodeIdx; } }
mit
clinique/openhab2
bundles/org.openhab.binding.enocean/src/main/java/org/openhab/binding/enocean/internal/eep/F6_10/F6_10_01.java
2602
/** * Copyright (c) 2010-2019 Contributors to the openHAB project * * See the NOTICE file(s) distributed with this work for additional * information. * * This program and the accompanying materials are made available under the * terms of the Eclipse Public License 2.0 which is available at * http://www.eclipse.org/legal/epl-2.0 * * SPDX-License-Identifier: EPL-2.0 */ package org.openhab.binding.enocean.internal.eep.F6_10; import static org.openhab.binding.enocean.internal.EnOceanBindingConstants.*; import org.eclipse.smarthome.config.core.Configuration; import org.eclipse.smarthome.core.library.types.OpenClosedType; import org.eclipse.smarthome.core.library.types.StringType; import org.eclipse.smarthome.core.types.State; import org.eclipse.smarthome.core.types.UnDefType; import org.openhab.binding.enocean.internal.eep.Base._RPSMessage; import org.openhab.binding.enocean.internal.messages.ERP1Message; /** * * @author Daniel Weber - Initial contribution */ public class F6_10_01 extends _RPSMessage { public final byte Closed = 0x0F; // xxxx1111 public final byte Open1 = 0x0E; // xxxx1110 public final byte Open2 = 0x0C; // xxxx1100 public final byte Tilted = 0x0D; // xxxx1101 public F6_10_01() { super(); } public F6_10_01(ERP1Message packet) { super(packet); } @Override protected State convertToStateImpl(String channelId, String channelTypeId, State currentState, Configuration config) { if (!isValid()) { return UnDefType.UNDEF; } byte data = (byte) (bytes[0] & 0x0F); // todo localization switch (channelId) { case CHANNEL_WINDOWHANDLESTATE: if (data == Closed) { return new StringType("CLOSED"); } else if (data == Tilted) { return new StringType("TILTED"); } else if (data == Open1 || data == Open2) { return new StringType("OPEN"); } case CHANNEL_CONTACT: if (data == Closed) { return OpenClosedType.CLOSED; } else if (data == Tilted) { return OpenClosedType.OPEN; } else if (data == Open1 || data == Open2) { return OpenClosedType.OPEN; } } return UnDefType.UNDEF; } @Override protected boolean validateData(byte[] bytes) { return super.validateData(bytes) && getBit(bytes[0], 6) && getBit(bytes[0], 3) && getBit(bytes[0], 2); } }
epl-1.0
lizhekang/TCJDK
sources/openjdk8/jdk/src/share/classes/sun/security/krb5/internal/ccache/CCacheInputStream.java
14295
/* * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * * (C) Copyright IBM Corp. 1999 All Rights Reserved. * Copyright 1997 The Open Group Research Institute. All rights reserved. */ package sun.security.krb5.internal.ccache; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.StringTokenizer; import sun.misc.IOUtils; import sun.security.krb5.*; import sun.security.krb5.internal.*; import sun.security.krb5.internal.util.KrbDataInputStream; /** * This class extends KrbDataInputStream. It is used for parsing FCC-format * data from file to memory. * * @author Yanni Zhang * */ public class CCacheInputStream extends KrbDataInputStream implements FileCCacheConstants { /* * FCC version 2 contains type information for principals. FCC * version 1 does not. * * FCC version 3 contains keyblock encryption type information, and is * architecture independent. Previous versions are not. * * The code will accept version 1, 2, and 3 ccaches, and depending * what KRB5_FCC_DEFAULT_FVNO is set to, it will create version 1, 2, * or 3 FCC caches. * * The default credentials cache should be type 3 for now (see * init_ctx.c). */ /* V4 of the credentials cache format allows for header tags */ private static boolean DEBUG = Krb5.DEBUG; public CCacheInputStream(InputStream is){ super(is); } /* Read tag field introduced in KRB5_FCC_FVNO_4 */ // this needs to be public for Kinit. public Tag readTag() throws IOException { char[] buf = new char[1024]; int len; int tag = -1; int taglen; Integer time_offset = null; Integer usec_offset = null; len = read(2); if (len < 0) { throw new IOException("stop."); } if (len > buf.length) { throw new IOException("Invalid tag length."); } while (len > 0) { tag = read(2); taglen = read(2); switch (tag) { case FCC_TAG_DELTATIME: time_offset = new Integer(read(4)); usec_offset = new Integer(read(4)); break; default: } len = len - (4 + taglen); } return new Tag(len, tag, time_offset, usec_offset); } /* * In file-based credential cache, the realm name is stored as part of * principal name at the first place. */ // made public for KinitOptions to call directly public PrincipalName readPrincipal(int version) throws IOException, RealmException { int type, length, namelength, kret; String[] pname = null; String realm; /* Read principal type */ if (version == KRB5_FCC_FVNO_1) { type = KRB5_NT_UNKNOWN; } else { type = read(4); } length = readLength4(); List<String> result = new ArrayList<String>(); /* * DCE includes the principal's realm in the count; the new format * does not. */ if (version == KRB5_FCC_FVNO_1) length--; for (int i = 0; i <= length; i++) { namelength = readLength4(); byte[] bytes = IOUtils.readFully(this, namelength, true); result.add(new String(bytes)); } if (result.isEmpty()) { throw new IOException("No realm or principal"); } if (isRealm(result.get(0))) { realm = result.remove(0); if (result.isEmpty()) { throw new IOException("No principal name components"); } return new PrincipalName( type, result.toArray(new String[result.size()]), new Realm(realm)); } try { return new PrincipalName( type, result.toArray(new String[result.size()]), Realm.getDefault()); } catch (RealmException re) { return null; } } /* * In practice, a realm is named by uppercasing the DNS domain name. we currently * rely on this to determine if the string within the principal identifier is realm * name. * */ boolean isRealm(String str) { try { Realm r = new Realm(str); } catch (Exception e) { return false; } StringTokenizer st = new StringTokenizer(str, "."); String s; while (st.hasMoreTokens()) { s = st.nextToken(); for (int i = 0; i < s.length(); i++) { if (s.charAt(i) >= 141) { return false; } } } return true; } EncryptionKey readKey(int version) throws IOException { int keyType, keyLen; keyType = read(2); if (version == KRB5_FCC_FVNO_3) read(2); /* keytype recorded twice in fvno 3 */ keyLen = readLength4(); byte[] bytes = IOUtils.readFully(this, keyLen, true); return new EncryptionKey(bytes, keyType, new Integer(version)); } long[] readTimes() throws IOException { long[] times = new long[4]; times[0] = (long)read(4) * 1000; times[1] = (long)read(4) * 1000; times[2] = (long)read(4) * 1000; times[3] = (long)read(4) * 1000; return times; } boolean readskey() throws IOException { if (read() == 0) { return false; } else return true; } HostAddress[] readAddr() throws IOException, KrbApErrException { int numAddrs, addrType, addrLength; numAddrs = readLength4(); if (numAddrs > 0) { List<HostAddress> addrs = new ArrayList<>(); for (int i = 0; i < numAddrs; i++) { addrType = read(2); addrLength = readLength4(); if (!(addrLength == 4 || addrLength == 16)) { if (DEBUG) { System.out.println("Incorrect address format."); } return null; } byte[] result = new byte[addrLength]; for (int j = 0; j < addrLength; j++) result[j] = (byte)read(1); addrs.add(new HostAddress(addrType, result)); } return addrs.toArray(new HostAddress[addrs.size()]); } return null; } AuthorizationDataEntry[] readAuth() throws IOException { int num, adtype, adlength; num = readLength4(); if (num > 0) { List<AuthorizationDataEntry> auData = new ArrayList<>(); byte[] data = null; for (int i = 0; i < num; i++) { adtype = read(2); adlength = readLength4(); data = IOUtils.readFully(this, adlength, true); auData.add(new AuthorizationDataEntry(adtype, data)); } return auData.toArray(new AuthorizationDataEntry[auData.size()]); } else return null; } byte[] readData() throws IOException { int length; length = readLength4(); if (length == 0) { return null; } else { return IOUtils.readFully(this, length, true); } } boolean[] readFlags() throws IOException { boolean[] flags = new boolean[Krb5.TKT_OPTS_MAX+1]; int ticketFlags; ticketFlags = read(4); if ((ticketFlags & 0x40000000) == TKT_FLG_FORWARDABLE) flags[1] = true; if ((ticketFlags & 0x20000000) == TKT_FLG_FORWARDED) flags[2] = true; if ((ticketFlags & 0x10000000) == TKT_FLG_PROXIABLE) flags[3] = true; if ((ticketFlags & 0x08000000) == TKT_FLG_PROXY) flags[4] = true; if ((ticketFlags & 0x04000000) == TKT_FLG_MAY_POSTDATE) flags[5] = true; if ((ticketFlags & 0x02000000) == TKT_FLG_POSTDATED) flags[6] = true; if ((ticketFlags & 0x01000000) == TKT_FLG_INVALID) flags[7] = true; if ((ticketFlags & 0x00800000) == TKT_FLG_RENEWABLE) flags[8] = true; if ((ticketFlags & 0x00400000) == TKT_FLG_INITIAL) flags[9] = true; if ((ticketFlags & 0x00200000) == TKT_FLG_PRE_AUTH) flags[10] = true; if ((ticketFlags & 0x00100000) == TKT_FLG_HW_AUTH) flags[11] = true; if (DEBUG) { String msg = ">>> CCacheInputStream: readFlags() "; if (flags[1] == true) { msg += " FORWARDABLE;"; } if (flags[2] == true) { msg += " FORWARDED;"; } if (flags[3] == true) { msg += " PROXIABLE;"; } if (flags[4] == true) { msg += " PROXY;"; } if (flags[5] == true) { msg += " MAY_POSTDATE;"; } if (flags[6] == true) { msg += " POSTDATED;"; } if (flags[7] == true) { msg += " INVALID;"; } if (flags[8] == true) { msg += " RENEWABLE;"; } if (flags[9] == true) { msg += " INITIAL;"; } if (flags[10] == true) { msg += " PRE_AUTH;"; } if (flags[11] == true) { msg += " HW_AUTH;"; } System.out.println(msg); } return flags; } /** * Reads the next cred in stream. * @return the next cred, null if ticket or second_ticket unparseable. * * Note: MIT krb5 1.8.1 might generate a config entry with server principal * X-CACHECONF:/krb5_ccache_conf_data/fast_avail/krbtgt/REALM@REALM. The * entry is used by KDC to inform the client that it support certain * features. Its ticket is not a valid krb5 ticket and thus this method * returns null. */ Credentials readCred(int version) throws IOException,RealmException, KrbApErrException, Asn1Exception { PrincipalName cpname = null; try { cpname = readPrincipal(version); } catch (Exception e) { // Do not return here. All data for this cred should be fully // consumed so that we can read the next one. } if (DEBUG) { System.out.println(">>>DEBUG <CCacheInputStream> client principal is " + cpname); } PrincipalName spname = null; try { spname = readPrincipal(version); } catch (Exception e) { // same as above } if (DEBUG) { System.out.println(">>>DEBUG <CCacheInputStream> server principal is " + spname); } EncryptionKey key = readKey(version); if (DEBUG) { System.out.println(">>>DEBUG <CCacheInputStream> key type: " + key.getEType()); } long times[] = readTimes(); KerberosTime authtime = new KerberosTime(times[0]); KerberosTime starttime = (times[1]==0) ? null : new KerberosTime(times[1]); KerberosTime endtime = new KerberosTime(times[2]); KerberosTime renewTill = (times[3]==0) ? null : new KerberosTime(times[3]); if (DEBUG) { System.out.println(">>>DEBUG <CCacheInputStream> auth time: " + authtime.toDate().toString()); System.out.println(">>>DEBUG <CCacheInputStream> start time: " + ((starttime==null)?"null":starttime.toDate().toString())); System.out.println(">>>DEBUG <CCacheInputStream> end time: " + endtime.toDate().toString()); System.out.println(">>>DEBUG <CCacheInputStream> renew_till time: " + ((renewTill==null)?"null":renewTill.toDate().toString())); } boolean skey = readskey(); boolean flags[] = readFlags(); TicketFlags tFlags = new TicketFlags(flags); HostAddress addr[] = readAddr(); HostAddresses addrs = null; if (addr != null) { addrs = new HostAddresses(addr); } AuthorizationDataEntry[] auDataEntry = readAuth(); AuthorizationData auData = null; if (auDataEntry != null) { auData = new AuthorizationData(auDataEntry); } byte[] ticketData = readData(); byte[] ticketData2 = readData(); // Skip this cred if either cpname or spname isn't created. if (cpname == null || spname == null) { return null; } try { return new Credentials(cpname, spname, key, authtime, starttime, endtime, renewTill, skey, tFlags, addrs, auData, ticketData != null ? new Ticket(ticketData) : null, ticketData2 != null ? new Ticket(ticketData2) : null); } catch (Exception e) { // If any of new Ticket(*) fails. return null; } } }
gpl-2.0
erpcya/adempierePOS
base/src/org/compiere/model/ModelValidationEngine.java
28589
/****************************************************************************** * Product: Adempiere ERP & CRM Smart Business Solution * * Copyright (C) 1999-2006 ComPiere, Inc. All Rights Reserved. * * This program is free software; you can redistribute it and/or modify it * * under the terms version 2 of the GNU General Public License as published * * by the Free Software Foundation. This program is distributed in the hope * * that it will be useful, but WITHOUT ANY WARRANTY; without even the implied * * warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. * * See the GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * * with this program; if not, write to the Free Software Foundation, Inc., * * 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA. * * For the text or an alternative of this public license, you may reach us * * ComPiere, Inc., 2620 Augustine Dr. #245, Santa Clara, CA 95054, USA * * or via info@compiere.org or http://www.compiere.org/license.html * *****************************************************************************/ package org.compiere.model; import java.util.ArrayList; import java.util.Hashtable; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.StringTokenizer; import java.util.logging.Level; import javax.script.ScriptEngine; import org.adempiere.model.ImportValidator; import org.adempiere.process.ImportProcess; import org.compiere.acct.Fact; import org.compiere.util.CLogger; import org.compiere.util.Env; import org.compiere.util.KeyNamePair; /** * Model Validation Engine * * @author Jorg Janke * @version $Id: ModelValidationEngine.java,v 1.2 2006/07/30 00:58:38 jjanke Exp $ * * @author Teo Sarca, SC ARHIPAC SERVICE SRL * <li>FR [ 1670025 ] ModelValidator.afterLoadPreferences will be useful * <li>BF [ 1679692 ] fireDocValidate doesn't treat exceptions as errors * <li>FR [ 1724662 ] Support Email should contain model validators info * <li>FR [ 2788276 ] Data Import Validator * https://sourceforge.net/tracker/?func=detail&aid=2788276&group_id=176962&atid=879335 * <li>BF [ 2804135 ] Global FactsValidator are not invoked * https://sourceforge.net/tracker/?func=detail&aid=2804135&group_id=176962&atid=879332 * <li>BF [ 2819617 ] NPE if script validator rule returns null * https://sourceforge.net/tracker/?func=detail&aid=2819617&group_id=176962&atid=879332 * @author victor.perez@e-evolution.com, www.e-evolution.com * <li>BF [ 2947607 ] Model Validator Engine duplicate listeners */ public class ModelValidationEngine { /** * Get Singleton * @return engine */ public synchronized static ModelValidationEngine get() { if (s_engine == null) s_engine = new ModelValidationEngine(); return s_engine; } // get /** Engine Singleton */ private static ModelValidationEngine s_engine = null; /* flag to indicate a missing model validation class */ private static String missingModelValidationMessage = null; /************************************************************************** * Constructor. * Creates Model Validators */ private ModelValidationEngine () { super (); // Load global validators MTable table = MTable.get(Env.getCtx(), X_AD_ModelValidator.Table_ID); Query query = table.createQuery("IsActive='Y'", null); query.setOrderBy("SeqNo"); try { List<X_AD_ModelValidator> entityTypes = query.list(); for (X_AD_ModelValidator entityType : entityTypes) { String className = entityType.getModelValidationClass(); if (className == null || className.length() == 0) continue; loadValidatorClass(null, className); } } catch (Exception e) { //logging to db will try to init ModelValidationEngine again! //log.warning(e.getLocalizedMessage()); // System.err.println(e.getLocalizedMessage()); missingModelValidationMessage = missingModelValidationMessage + e.toString() + " global" + '\n'; } // Go through all Clients and start Validators MClient[] clients = MClient.getAll(Env.getCtx()); for (int i = 0; i < clients.length; i++) { String classNames = clients[i].getModelValidationClasses(); if (classNames == null || classNames.length() == 0) continue; loadValidatorClasses(clients[i], classNames); } //logging to db will try to init ModelValidationEngine again! //log.config(toString()); // System.out.println(toString()); } // ModelValidatorEngine private void loadValidatorClasses(MClient client, String classNames) { StringTokenizer st = new StringTokenizer(classNames, ";"); while (st.hasMoreTokens()) { String className = null; try { className = st.nextToken(); if (className == null) continue; className = className.trim(); if (className.length() == 0) continue; // loadValidatorClass(client, className); } catch (Exception e) { //logging to db will try to init ModelValidationEngine again! //log.log(Level.SEVERE, className + ": " + e.getMessage()); // System.err.println(className + ": " + e.getMessage()); missingModelValidationMessage = missingModelValidationMessage + e.toString() + " on client " + client.getName() + '\n'; } } } private void loadValidatorClass(MClient client, String className) { try { // Class<?> clazz = Class.forName(className); ModelValidator validator = (ModelValidator)clazz.newInstance(); initialize(validator, client); } catch (Exception e) { //logging to db will try to init ModelValidationEngine again! //log.log(Level.SEVERE, className + ": " + e.getMessage()); // System.err.println(e.toString()); missingModelValidationMessage = missingModelValidationMessage + e.toString() + (client != null ? (" on client " + client.getName()) : " global") + '\n'; } } /** Logger */ private static CLogger log = CLogger.getCLogger(ModelValidationEngine.class); // /** Change Support */ // private VetoableChangeSupport m_changeSupport = new VetoableChangeSupport(this); /** Validators */ private ArrayList<ModelValidator> m_validators = new ArrayList<ModelValidator>(); /** Model Change Listeners */ private Hashtable<String,ArrayList<ModelValidator>> m_modelChangeListeners = new Hashtable<String,ArrayList<ModelValidator>>(); /** Document Validation Listeners */ private Hashtable<String,ArrayList<ModelValidator>> m_docValidateListeners = new Hashtable<String,ArrayList<ModelValidator>>(); /** Accounting Facts Validation Listeners */ private Hashtable<String,ArrayList<FactsValidator>>m_factsValidateListeners = new Hashtable<String,ArrayList<FactsValidator>>(); /** Data Import Validation Listeners */ private Hashtable<String,ArrayList<ImportValidator>>m_impValidateListeners = new Hashtable<String,ArrayList<ImportValidator>>(); private ArrayList<ModelValidator> m_globalValidators = new ArrayList<ModelValidator>(); /** * Initialize and add validator * @param validator * @param client */ private void initialize(ModelValidator validator, MClient client) { if (client == null) m_globalValidators.add(validator); m_validators.add(validator); validator.initialize(this, client); } // initialize /** * Called when login is complete * @param AD_Client_ID client * @param AD_Org_ID org * @param AD_Role_ID role * @param AD_User_ID user * @return error message or null */ public String loginComplete (int AD_Client_ID, int AD_Org_ID, int AD_Role_ID, int AD_User_ID) { for (int i = 0; i < m_validators.size(); i++) { ModelValidator validator = (ModelValidator)m_validators.get(i); if (AD_Client_ID == validator.getAD_Client_ID() || m_globalValidators.contains(validator)) { String error = validator.login(AD_Org_ID, AD_Role_ID, AD_User_ID); if (error != null && error.length() > 0) return error; } } // now process the script model validator login List<MRule> loginRules = MRule.getModelValidatorLoginRules (Env.getCtx()); if (loginRules != null) { for (MRule loginRule : loginRules) { // currently just JSR 223 supported if ( loginRule.getRuleType().equals(MRule.RULETYPE_JSR223ScriptingAPIs) && loginRule.getEventType().equals(MRule.EVENTTYPE_ModelValidatorLoginEvent)) { String error; try { ScriptEngine engine = loginRule.getScriptEngine(); MRule.setContext(engine, Env.getCtx(), 0); // no window // now add the method arguments to the engine engine.put(MRule.ARGUMENTS_PREFIX + "Ctx", Env.getCtx()); engine.put(MRule.ARGUMENTS_PREFIX + "AD_Client_ID", AD_Client_ID); engine.put(MRule.ARGUMENTS_PREFIX + "AD_Org_ID", AD_Org_ID); engine.put(MRule.ARGUMENTS_PREFIX + "AD_Role_ID", AD_Role_ID); engine.put(MRule.ARGUMENTS_PREFIX + "AD_User_ID", AD_User_ID); Object retval = engine.eval(loginRule.getScript()); error = (retval == null ? "" : retval.toString()); } catch (Exception e) { e.printStackTrace(); error = e.toString(); } if (error != null && error.length() > 0) return error; } } } // if (AD_User_ID == 0 && AD_Role_ID == 0) ; // don't validate for user system on role system else if (missingModelValidationMessage != null) { MSystem system = MSystem.get(Env.getCtx()); if (system.isFailOnMissingModelValidator()) return missingModelValidationMessage; } return null; } // loginComplete /************************************************************************** * Add Model Change Listener * @param tableName table name * @param listener listener */ public void addModelChange (String tableName, ModelValidator listener) { if (tableName == null || listener == null) return; // String propertyName = m_globalValidators.contains(listener) ? tableName + "*" : tableName + listener.getAD_Client_ID(); ArrayList<ModelValidator> list = (ArrayList<ModelValidator>)m_modelChangeListeners.get(propertyName); if (list == null) { list = new ArrayList<ModelValidator>(); list.add(listener); m_modelChangeListeners.put(propertyName, list); } else list.add(listener); } // addModelValidator /** * Remove Model Change Listener * @param tableName table name * @param listener listener */ public void removeModelChange (String tableName, ModelValidator listener) { if (tableName == null || listener == null) return; String propertyName = m_globalValidators.contains(listener) ? tableName + "*" : tableName + listener.getAD_Client_ID(); ArrayList<ModelValidator> list = m_modelChangeListeners.get(propertyName); if (list == null) return; list.remove(listener); if (list.size() == 0) m_modelChangeListeners.remove(propertyName); } // removeModelValidator /** * Fire Model Change. * Call modelChange method of added validators * @param po persistent objects * @param type ModelValidator.TYPE_* * @return error message or NULL for no veto */ public String fireModelChange (PO po, int changeType) { if (po == null || m_modelChangeListeners.size() == 0) return null; String propertyName = po.get_TableName() + "*"; ArrayList<ModelValidator> list = m_modelChangeListeners.get(propertyName); if (list != null) { //ad_entitytype.modelvalidationclasses String error = fireModelChange(po, changeType, list); if (error != null && error.length() > 0) return error; } propertyName = po.get_TableName() + po.getAD_Client_ID(); list = m_modelChangeListeners.get(propertyName); if (list != null) { //ad_client.modelvalidationclasses String error = fireModelChange(po, changeType, list); if (error != null && error.length() > 0) return error; } // now process the script model validator for this event List<MTableScriptValidator> scriptValidators = MTableScriptValidator.getModelValidatorRules( po.getCtx(), po.get_Table_ID(), ModelValidator.tableEventValidators[changeType]); if (scriptValidators != null) { for (MTableScriptValidator scriptValidator : scriptValidators) { MRule rule = MRule.get(po.getCtx(), scriptValidator.getAD_Rule_ID()); // currently just JSR 223 supported if ( rule != null && rule.isActive() && rule.getRuleType().equals(MRule.RULETYPE_JSR223ScriptingAPIs) && rule.getEventType().equals(MRule.EVENTTYPE_ModelValidatorTableEvent)) { String error; try { ScriptEngine engine = rule.getScriptEngine(); MRule.setContext(engine, po.getCtx(), 0); // no window // now add the method arguments to the engine engine.put(MRule.ARGUMENTS_PREFIX + "Ctx", po.getCtx()); engine.put(MRule.ARGUMENTS_PREFIX + "PO", po); engine.put(MRule.ARGUMENTS_PREFIX + "Type", changeType); engine.put(MRule.ARGUMENTS_PREFIX + "Event", ModelValidator.tableEventValidators[changeType]); Object retval = engine.eval(rule.getScript()); error = (retval == null ? "" : retval.toString()); } catch (Exception e) { e.printStackTrace(); error = e.toString(); } if (error != null && error.length() > 0) return error; } } } // return null; } // fireModelChange private String fireModelChange(PO po, int changeType, ArrayList<ModelValidator> list) { for (int i = 0; i < list.size(); i++) { try { ModelValidator validator = list.get(i); if (validator.getAD_Client_ID() == po.getAD_Client_ID() || m_globalValidators.contains(validator)) { String error = validator.modelChange(po, changeType); if (error != null && error.length() > 0) { if (log.isLoggable(Level.FINE)) { log.log(Level.FINE, "po="+po+" validator="+validator+" changeType="+changeType); } return error; } } } catch (Exception e) { //log the exception log.log(Level.SEVERE, e.getLocalizedMessage(), e); String error = e.getLocalizedMessage(); if (error == null) error = e.toString(); return error; } } return null; } /************************************************************************** * Add Document Validation Listener * @param tableName table name * @param listener listener */ public void addDocValidate (String tableName, ModelValidator listener) { if (tableName == null || listener == null) return; // String propertyName = m_globalValidators.contains(listener) ? tableName + "*" : tableName + listener.getAD_Client_ID(); ArrayList<ModelValidator> list = (ArrayList<ModelValidator>)m_docValidateListeners.get(propertyName); if (list == null) { list = new ArrayList<ModelValidator>(); list.add(listener); m_docValidateListeners.put(propertyName, list); } else if (!list.contains(listener)) { list.add(listener); } } // addDocValidate /** * Remove Document Validation Listener * @param tableName table name * @param listener listener */ public void removeDocValidate (String tableName, ModelValidator listener) { if (tableName == null || listener == null) return; String propertyName = m_globalValidators.contains(listener) ? tableName + "*" : tableName + listener.getAD_Client_ID(); ArrayList<ModelValidator> list = m_docValidateListeners.get(propertyName); if (list == null) return; list.remove(listener); if (list.size() == 0) m_docValidateListeners.remove(propertyName); } // removeDocValidate /** * Fire Document Validation. * Call docValidate method of added validators * @param po persistent objects * @param timing see ModelValidator.TIMING_ constants * @return error message or null */ public String fireDocValidate (PO po, int docTiming) { if (po == null || m_docValidateListeners.size() == 0) return null; String propertyName = po.get_TableName() + "*"; ArrayList<ModelValidator> list = m_docValidateListeners.get(propertyName); if (list != null) { //ad_entitytype.modelvalidationclasses String error = fireDocValidate(po, docTiming, list); if (error != null && error.length() > 0) return error; } propertyName = po.get_TableName() + po.getAD_Client_ID(); list = m_docValidateListeners.get(propertyName); if (list != null) { //ad_client.modelvalidationclasses String error = fireDocValidate(po, docTiming, list); if (error != null && error.length() > 0) return error; } // now process the script model validator for this event List<MTableScriptValidator> scriptValidators = MTableScriptValidator.getModelValidatorRules( po.getCtx(), po.get_Table_ID(), ModelValidator.documentEventValidators[docTiming]); if (scriptValidators != null) { for (MTableScriptValidator scriptValidator : scriptValidators) { MRule rule = MRule.get(po.getCtx(), scriptValidator.getAD_Rule_ID()); // currently just JSR 223 supported if ( rule != null && rule.isActive() && rule.getRuleType().equals(MRule.RULETYPE_JSR223ScriptingAPIs) && rule.getEventType().equals(MRule.EVENTTYPE_ModelValidatorDocumentEvent)) { String error; try { ScriptEngine engine = rule.getScriptEngine(); MRule.setContext(engine, po.getCtx(), 0); // no window // now add the method arguments to the engine engine.put(MRule.ARGUMENTS_PREFIX + "Ctx", po.getCtx()); engine.put(MRule.ARGUMENTS_PREFIX + "PO", po); engine.put(MRule.ARGUMENTS_PREFIX + "Type", docTiming); engine.put(MRule.ARGUMENTS_PREFIX + "Event", ModelValidator.documentEventValidators[docTiming]); Object retval = engine.eval(rule.getScript()); error = (retval == null ? "" : retval.toString()); } catch (Exception e) { e.printStackTrace(); error = e.toString(); } if (error != null && error.length() > 0) return error; } } } // return null; } // fireDocValidate private String fireDocValidate(PO po, int docTiming, ArrayList<ModelValidator> list) { for (int i = 0; i < list.size(); i++) { ModelValidator validator = null; try { validator = list.get(i); if (validator.getAD_Client_ID() == po.getAD_Client_ID() || m_globalValidators.contains(validator)) { String error = validator.docValidate(po, docTiming); if (error != null && error.length() > 0) { if (log.isLoggable(Level.FINE)) { log.log(Level.FINE, "po="+po+" validator="+validator+" timing="+docTiming); } return error; } } } catch (Exception e) { //log the stack trace log.log(Level.SEVERE, e.getLocalizedMessage(), e); // Exeptions are errors and should stop the document processing - teo_sarca [ 1679692 ] String error = e.getLocalizedMessage(); if (error == null) error = e.toString(); return error; } } return null; } /************************************************************************** * Add Accounting Facts Validation Listener * @param tableName table name * @param listener listener */ public void addFactsValidate (String tableName, FactsValidator listener) { if (tableName == null || listener == null) return; // String propertyName = m_globalValidators.contains(listener) ? tableName + "*" : tableName + listener.getAD_Client_ID(); ArrayList<FactsValidator> list = (ArrayList<FactsValidator>)m_factsValidateListeners.get(propertyName); if (list == null) { list = new ArrayList<FactsValidator>(); list.add(listener); m_factsValidateListeners.put(propertyName, list); } else list.add(listener); } // addFactsValidate /************************************************************************** * Add Date Import Validation Listener * @param tableName table name * @param listener listener */ public void addImportValidate (String importTableName, ImportValidator listener) { String propertyName = importTableName + "*"; ArrayList<ImportValidator> list = (ArrayList<ImportValidator>)m_impValidateListeners.get(propertyName); if (list == null) { list = new ArrayList<ImportValidator>(); list.add(listener); m_impValidateListeners.put(propertyName, list); } else { list.add(listener); } } /** * Remove Accounting Facts Validation Listener * @param tableName table name * @param listener listener */ public void removeFactsValidate (String tableName, FactsValidator listener) { if (tableName == null || listener == null) return; String propertyName = m_globalValidators.contains(listener) ? tableName + "*" : tableName + listener.getAD_Client_ID(); ArrayList<FactsValidator> list = m_factsValidateListeners.get(propertyName); if (list == null) return; list.remove(listener); if (list.size() == 0) m_factsValidateListeners.remove(propertyName); } // removeFactsValidate /** * Fire Accounting Facts Validation. * Call factsValidate method of added validators * @param schema * @param facts * @param doc * @param po * @return error message or null */ public String fireFactsValidate (MAcctSchema schema, List<Fact> facts, PO po) { if (schema == null || facts == null || po == null || m_factsValidateListeners.size() == 0) return null; String propertyName = po.get_TableName() + "*"; ArrayList<FactsValidator> list = (ArrayList<FactsValidator>)m_factsValidateListeners.get(propertyName); if (list != null) { //ad_entitytype.modelvalidationclasses String error = fireFactsValidate(schema, facts, po, list); if (error != null && error.length() > 0) return error; } propertyName = po.get_TableName() + po.getAD_Client_ID(); list = (ArrayList<FactsValidator>)m_factsValidateListeners.get(propertyName); if (list != null) { //ad_client.modelvalidationclasses String error = fireFactsValidate(schema, facts, po, list); if (error != null && error.length() > 0) return error; } return null; } // fireFactsValidate private String fireFactsValidate(MAcctSchema schema, List<Fact> facts, PO po, ArrayList<FactsValidator> list) { for (int i = 0; i < list.size(); i++) { FactsValidator validator = null; try { validator = list.get(i); if (validator.getAD_Client_ID() == po.getAD_Client_ID() || m_globalValidators.contains(validator)) { String error = validator.factsValidate(schema, facts, po); if (error != null && error.length() > 0) { if (log.isLoggable(Level.FINE)) { log.log(Level.FINE, "po="+po+" schema="+schema+" validator="+validator); } return error; } } } catch (Exception e) { //log the stack trace log.log(Level.SEVERE, e.getLocalizedMessage(), e); // Exeptions are errors and should stop the document processing - teo_sarca [ 1679692 ] String error = e.getLocalizedMessage(); if (error == null) error = e.toString(); return error; } } return null; } /** * Fire Import Validation. * Call {@link ImportValidator#validate(ImportProcess, Object, Object, int)} or registered validators. * @param process import process * @param importModel import record (e.g. X_I_BPartner) * @param targetModel target model (e.g. MBPartner, MBPartnerLocation, MUser) * @param timing see ImportValidator.TIMING_* constants */ public void fireImportValidate (ImportProcess process, PO importModel, PO targetModel, int timing) { if (m_impValidateListeners.size() == 0) return; String propertyName = process.getImportTableName() + "*"; ArrayList<ImportValidator> list = (ArrayList<ImportValidator>)m_impValidateListeners.get(propertyName); if (list != null) { for (ImportValidator validator : list) { validator.validate(process, importModel, targetModel, timing); } } } /** * String Representation * @return info */ public String toString() { StringBuffer sb = new StringBuffer("ModelValidationEngine["); sb.append("Validators=#").append(m_validators.size()) .append(", ModelChange=#").append(m_modelChangeListeners.size()) .append(", DocValidate=#").append(m_docValidateListeners.size()) .append("]"); return sb.toString(); } // toString /** * Create Model Validators Info * @param sb optional string buffer * @param ctx context * @return Model Validators Info * * @author Teo Sarca, FR [ 1724662 ] */ public StringBuffer getInfoDetail(StringBuffer sb, Properties ctx) { if (sb == null) sb = new StringBuffer(); sb.append("=== ModelValidationEngine ===").append(Env.NL); sb.append("Validators #").append(m_validators.size()).append(Env.NL); for (ModelValidator mv : m_validators) { sb.append(mv.toString()).append(Env.NL); } sb.append(Env.NL).append(Env.NL); // sb.append("ModelChange #").append(m_modelChangeListeners.size()).append(Env.NL); Iterator<String> it = m_modelChangeListeners.keySet().iterator(); while(it.hasNext()) { String key = it.next(); ArrayList<ModelValidator> list = m_modelChangeListeners.get(key); for (ModelValidator mv : list) { sb.append(key).append(": ").append(mv.toString()).append(Env.NL); } } sb.append(Env.NL).append(Env.NL); // sb.append("DocValidate #").append(m_docValidateListeners.size()).append(Env.NL); it = m_docValidateListeners.keySet().iterator(); while(it.hasNext()) { String key = it.next(); ArrayList<ModelValidator> list = m_docValidateListeners.get(key); for (ModelValidator mv : list) { sb.append(key).append(": ").append(mv.toString()).append(Env.NL); } } sb.append(Env.NL).append(Env.NL); // return sb; } /** * After Load Preferences into Context for selected client. * @param ctx context * @see org.compiere.util.Login#loadPreferences(KeyNamePair, KeyNamePair, java.sql.Timestamp, String) * @author Teo Sarca - FR [ 1670025 ] - https://sourceforge.net/tracker/index.php?func=detail&aid=1670025&group_id=176962&atid=879335 */ public void afterLoadPreferences (Properties ctx) { int AD_Client_ID = Env.getAD_Client_ID(ctx); for (int i = 0; i < m_validators.size(); i++) { ModelValidator validator = (ModelValidator)m_validators.get(i); if (AD_Client_ID == validator.getAD_Client_ID() || m_globalValidators.contains(validator)) { java.lang.reflect.Method m = null; try { m = validator.getClass().getMethod("afterLoadPreferences", new Class[]{Properties.class}); } catch(NoSuchMethodException e) { // ignore } try { if (m != null) m.invoke(validator, ctx); } catch (Exception e) { log.warning("" + validator + ": " + e.getLocalizedMessage()); } } } } /** * Before Save Properties for selected client. */ public void beforeSaveProperties () { int AD_Client_ID = Env.getAD_Client_ID(Env.getCtx()); for (int i = 0; i < m_validators.size(); i++) { ModelValidator validator = (ModelValidator)m_validators.get(i); if (AD_Client_ID == validator.getAD_Client_ID() || m_globalValidators.contains(validator)) { java.lang.reflect.Method m = null; try { m = validator.getClass().getMethod("beforeSaveProperties"); } catch(NoSuchMethodException e) { // ignore } try { if (m != null) m.invoke(validator); } catch (Exception e) { log.warning("" + validator + ": " + e.getLocalizedMessage()); } } } } } // ModelValidatorEngine
gpl-2.0
kkoop64/oStorybook
src/storybook/ui/dialog/rename/RenameCityDialog.java
2031
/* Storybook: Open Source software for novelists and authors. Copyright (C) 2008 - 2012 Martin Mustun This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package storybook.ui.dialog.rename; import java.util.List; import org.hibernate.Session; import storybook.controller.BookController; import storybook.model.BookModel; import storybook.model.hbn.dao.LocationDAOImpl; import storybook.model.hbn.entity.Location; import storybook.toolkit.I18N; import storybook.ui.MainFrame; @SuppressWarnings("serial") public class RenameCityDialog extends AbstractRenameDialog { public RenameCityDialog(MainFrame mainFrame) { super(mainFrame); } @Override protected List<String> getList() { BookModel model = mainFrame.getBookModel(); Session session = model.beginTransaction(); LocationDAOImpl dao = new LocationDAOImpl(session); List<String> ret = dao.findCities(); model.commit(); return ret; } @Override protected void rename(String oldValue, String newValue) { BookModel model = mainFrame.getBookModel(); BookController ctrl = mainFrame.getBookController(); Session session = model.beginTransaction(); LocationDAOImpl dao = new LocationDAOImpl(session); List<Location> locations = dao.findByCity(oldValue); model.commit(); for (Location location : locations) { location.setCity(newValue); ctrl.updateLocation(location); } } @Override protected String getDlgTitle() { return I18N.getMsg("msg.location.rename.city"); } }
gpl-3.0
pbouillet/inspectIT
CommonsCS/src/info/novatec/inspectit/ci/assignment/ISensorAssignment.java
478
package info.novatec.inspectit.ci.assignment; import info.novatec.inspectit.ci.sensor.ISensorConfig; /** * Base interface for all sensor assignments. * * @author Ivan Senic * * @param <T>Type of the sensor config that relates to the assignment. */ public interface ISensorAssignment<T extends ISensorConfig> { /** * Returns the class of the sensor config. * * @return Returns the class of the sensor config. */ Class<? extends T> getSensorConfigClass(); }
agpl-3.0
jamezp/wildfly-core
host-controller/src/main/java/org/jboss/as/host/controller/model/jvm/JVMOptionAddHandler.java
3532
/* * JBoss, Home of Professional Open Source. * Copyright 2011, Red Hat Middleware LLC, and individual contributors * as indicated by the @author tags. See the copyright.txt file in the * distribution for a full listing of individual contributors. * * This is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as * published by the Free Software Foundation; either version 2.1 of * the License, or (at your option) any later version. * * This software is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this software; if not, write to the Free * Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA * 02110-1301 USA, or see the FSF site: http://www.fsf.org. */ package org.jboss.as.host.controller.model.jvm; import org.jboss.as.controller.OperationContext; import org.jboss.as.controller.OperationDefinition; import org.jboss.as.controller.OperationFailedException; import org.jboss.as.controller.OperationStepHandler; import org.jboss.as.controller.PathAddress; import org.jboss.as.controller.SimpleAttributeDefinition; import org.jboss.as.controller.SimpleAttributeDefinitionBuilder; import org.jboss.as.controller.SimpleOperationDefinitionBuilder; import org.jboss.as.controller.access.management.SensitiveTargetAccessConstraintDefinition; import org.jboss.as.controller.operations.validation.StringLengthValidator; import org.jboss.as.controller.registry.Resource; import org.jboss.as.host.controller.logging.HostControllerLogger; import org.jboss.as.host.controller.descriptions.HostResolver; import org.jboss.dmr.ModelNode; import org.jboss.dmr.ModelType; final class JVMOptionAddHandler implements OperationStepHandler { static final String OPERATION_NAME = "add-jvm-option"; static final JVMOptionAddHandler INSTANCE = new JVMOptionAddHandler(); // the attribute allows expressions that are resolved in JVMAddHandler upon server restart static final SimpleAttributeDefinition JVM_OPTION = SimpleAttributeDefinitionBuilder.create(JvmAttributes.JVM_OPTION, ModelType.STRING, false) .setValidator(new StringLengthValidator(1, false, true)) .setAllowExpression(true) .build(); public static final OperationDefinition DEFINITION = new SimpleOperationDefinitionBuilder(OPERATION_NAME, HostResolver.getResolver("jvm")) .addParameter(JVM_OPTION) .addAccessConstraint(SensitiveTargetAccessConstraintDefinition.JVM) .build(); @Override public void execute(OperationContext context, ModelNode operation) throws OperationFailedException { final Resource resource = context.readResourceForUpdate(PathAddress.EMPTY_ADDRESS); final ModelNode model = resource.getModel(); final ModelNode option = JVM_OPTION.validateOperation(operation); ModelNode jvmOptions = model.get(JvmAttributes.JVM_OPTIONS); if (jvmOptions.isDefined()) { for (ModelNode optionNode : jvmOptions.asList()) { if (optionNode.equals(option)) { throw HostControllerLogger.ROOT_LOGGER.jvmOptionAlreadyExists(option.asString()); } } } model.get(JvmAttributes.JVM_OPTIONS).add(option); } }
lgpl-2.1
MatthiasMann/EnderIO
src/main/java/crazypants/enderio/block/BlockItemDarkSteelPressurePlate.java
359
package crazypants.enderio.block; import net.minecraft.block.Block; import net.minecraft.item.ItemBlockWithMetadata; public class BlockItemDarkSteelPressurePlate extends ItemBlockWithMetadata { public BlockItemDarkSteelPressurePlate(Block block) { super(block, block); } @Override public int getMetadata(int p_77647_1_) { return 0; } }
unlicense
mhd911/openfire
src/java/org/jivesoftware/openfire/session/LocalClientSession.java
36702
/** * $RCSfile$ * $Revision: 3187 $ * $Date: 2005-12-11 13:34:34 -0300 (Sun, 11 Dec 2005) $ * * Copyright (C) 2005-2008 Jive Software. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jivesoftware.openfire.session; import java.net.UnknownHostException; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import org.jivesoftware.openfire.Connection; import org.jivesoftware.openfire.SessionManager; import org.jivesoftware.openfire.StreamID; import org.jivesoftware.openfire.XMPPServer; import org.jivesoftware.openfire.auth.AuthToken; import org.jivesoftware.openfire.auth.UnauthorizedException; import org.jivesoftware.openfire.cluster.ClusterManager; import org.jivesoftware.openfire.net.SASLAuthentication; import org.jivesoftware.openfire.net.SSLConfig; import org.jivesoftware.openfire.net.SocketConnection; import org.jivesoftware.openfire.privacy.PrivacyList; import org.jivesoftware.openfire.privacy.PrivacyListManager; import org.jivesoftware.openfire.user.PresenceEventDispatcher; import org.jivesoftware.openfire.user.UserNotFoundException; import org.jivesoftware.util.JiveGlobals; import org.jivesoftware.util.LocaleUtils; import org.jivesoftware.util.cache.Cache; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.xmlpull.v1.XmlPullParser; import org.xmlpull.v1.XmlPullParserException; import org.xmpp.packet.JID; import org.xmpp.packet.Packet; import org.xmpp.packet.Presence; import org.xmpp.packet.StreamError; /** * Represents a session between the server and a client. * * @author Gaston Dombiak */ public class LocalClientSession extends LocalSession implements ClientSession { private static final Logger Log = LoggerFactory.getLogger(LocalClientSession.class); private static final String ETHERX_NAMESPACE = "http://etherx.jabber.org/streams"; private static final String FLASH_NAMESPACE = "http://www.jabber.com/streams/flash"; /** * Keep the list of IP address that are allowed to connect to the server. If the list is * empty then anyone is allowed to connect to the server.<p> * * Note: Key = IP address or IP range; Value = empty string. A hash map is being used for * performance reasons. */ private static Map<String,String> allowedIPs = new HashMap<String,String>(); private static Map<String,String> allowedAnonymIPs = new HashMap<String,String>(); private boolean messageCarbonsEnabled; /** * The authentication token for this session. */ protected AuthToken authToken; /** * Flag indicating if this session has been initialized yet (upon first available transition). */ private boolean initialized; /** * Flag that indicates if the session was available ever. */ private boolean wasAvailable = false; /** * Flag indicating if the user requested to not receive offline messages when sending * an available presence. The user may send a disco request with node * "http://jabber.org/protocol/offline" so that no offline messages are sent to the * user when he becomes online. If the user is connected from many resources then * if one of the sessions stopped the flooding then no session should flood the user. */ private boolean offlineFloodStopped = false; private Presence presence = null; private int conflictCount = 0; /** * Privacy list that overrides the default privacy list. This list affects only this * session and only for the duration of the session. */ private String activeList; /** * Default privacy list used for the session's user. This list is processed if there * is no active list set for the session. */ private String defaultList; static { // Fill out the allowedIPs with the system property String allowed = JiveGlobals.getProperty("xmpp.client.login.allowed", ""); StringTokenizer tokens = new StringTokenizer(allowed, ", "); while (tokens.hasMoreTokens()) { String address = tokens.nextToken().trim(); allowedIPs.put(address, ""); } String allowedAnonym = JiveGlobals.getProperty("xmpp.client.login.allowedAnonym", ""); tokens = new StringTokenizer(allowedAnonym, ", "); while (tokens.hasMoreTokens()) { String address = tokens.nextToken().trim(); allowedAnonymIPs.put(address, ""); } } /** * Returns the list of IP address that are allowed to connect to the server. If the list is * empty then anyone is allowed to connect to the server except for anonymous users that are * subject to {@link #getAllowedAnonymIPs()}. This list is used for both anonymous and * non-anonymous users. * * @return the list of IP address that are allowed to connect to the server. */ public static Map<String, String> getAllowedIPs() { return allowedIPs; } /** * Returns the list of IP address that are allowed to connect to the server for anonymous * users. If the list is empty then anonymous will be only restricted by {@link #getAllowedIPs()}. * * @return the list of IP address that are allowed to connect to the server. */ public static Map<String, String> getAllowedAnonymIPs() { return allowedAnonymIPs; } /** * Returns a newly created session between the server and a client. The session will * be created and returned only if correct name/prefix (i.e. 'stream' or 'flash') * and namespace were provided by the client. * * @param serverName the name of the server where the session is connecting to. * @param xpp the parser that is reading the provided XML through the connection. * @param connection the connection with the client. * @return a newly created session between the server and a client. * @throws org.xmlpull.v1.XmlPullParserException if an error occurs while parsing incoming data. */ public static LocalClientSession createSession(String serverName, XmlPullParser xpp, Connection connection) throws XmlPullParserException { boolean isFlashClient = xpp.getPrefix().equals("flash"); connection.setFlashClient(isFlashClient); // Conduct error checking, the opening tag should be 'stream' // in the 'etherx' namespace if (!xpp.getName().equals("stream") && !isFlashClient) { throw new XmlPullParserException( LocaleUtils.getLocalizedString("admin.error.bad-stream")); } if (!xpp.getNamespace(xpp.getPrefix()).equals(ETHERX_NAMESPACE) && !(isFlashClient && xpp.getNamespace(xpp.getPrefix()).equals(FLASH_NAMESPACE))) { throw new XmlPullParserException(LocaleUtils.getLocalizedString( "admin.error.bad-namespace")); } if (!allowedIPs.isEmpty()) { String hostAddress = "Unknown"; // The server is using a whitelist so check that the IP address of the client // is authorized to connect to the server try { hostAddress = connection.getHostAddress(); } catch (UnknownHostException e) { // Do nothing } if (!isAllowed(connection)) { // Client cannot connect from this IP address so end the stream and // TCP connection Log.debug("LocalClientSession: Closed connection to client attempting to connect from: " + hostAddress); // Include the not-authorized error in the response StreamError error = new StreamError(StreamError.Condition.not_authorized); connection.deliverRawText(error.toXML()); // Close the underlying connection connection.close(); return null; } } // Default language is English ("en"). String language = "en"; // Default to a version of "0.0". Clients written before the XMPP 1.0 spec may // not report a version in which case "0.0" should be assumed (per rfc3920 // section 4.4.1). int majorVersion = 0; int minorVersion = 0; for (int i = 0; i < xpp.getAttributeCount(); i++) { if ("lang".equals(xpp.getAttributeName(i))) { language = xpp.getAttributeValue(i); } if ("version".equals(xpp.getAttributeName(i))) { try { int[] version = decodeVersion(xpp.getAttributeValue(i)); majorVersion = version[0]; minorVersion = version[1]; } catch (Exception e) { Log.error(e.getMessage(), e); } } } // If the client supports a greater major version than the server, // set the version to the highest one the server supports. if (majorVersion > MAJOR_VERSION) { majorVersion = MAJOR_VERSION; minorVersion = MINOR_VERSION; } else if (majorVersion == MAJOR_VERSION) { // If the client supports a greater minor version than the // server, set the version to the highest one that the server // supports. if (minorVersion > MINOR_VERSION) { minorVersion = MINOR_VERSION; } } // Store language and version information in the connection. connection.setLanaguage(language); connection.setXMPPVersion(majorVersion, minorVersion); // Indicate the TLS policy to use for this connection if (!connection.isSecure()) { boolean hasCertificates = false; try { hasCertificates = SSLConfig.getKeyStore().size() > 0; } catch (Exception e) { Log.error(e.getMessage(), e); } Connection.TLSPolicy tlsPolicy = getTLSPolicy(); if (Connection.TLSPolicy.required == tlsPolicy && !hasCertificates) { Log.error("Client session rejected. TLS is required but no certificates " + "were created."); return null; } // Set default TLS policy connection.setTlsPolicy(hasCertificates ? tlsPolicy : Connection.TLSPolicy.disabled); } else { // Set default TLS policy connection.setTlsPolicy(Connection.TLSPolicy.disabled); } // Indicate the compression policy to use for this connection connection.setCompressionPolicy(getCompressionPolicy()); // Create a ClientSession for this user. LocalClientSession session = SessionManager.getInstance().createClientSession(connection); // Build the start packet response StringBuilder sb = new StringBuilder(200); sb.append("<?xml version='1.0' encoding='"); sb.append(CHARSET); sb.append("'?>"); if (isFlashClient) { sb.append("<flash:stream xmlns:flash=\"http://www.jabber.com/streams/flash\" "); } else { sb.append("<stream:stream "); } sb.append("xmlns:stream=\"http://etherx.jabber.org/streams\" xmlns=\"jabber:client\" from=\""); sb.append(serverName); sb.append("\" id=\""); sb.append(session.getStreamID().toString()); sb.append("\" xml:lang=\""); sb.append(language); // Don't include version info if the version is 0.0. if (majorVersion != 0) { sb.append("\" version=\""); sb.append(majorVersion).append(".").append(minorVersion); } sb.append("\">"); connection.deliverRawText(sb.toString()); // If this is a "Jabber" connection, the session is now initialized and we can // return to allow normal packet parsing. if (majorVersion == 0) { return session; } // Otherwise, this is at least XMPP 1.0 so we need to announce stream features. sb = new StringBuilder(490); sb.append("<stream:features>"); if (connection.getTlsPolicy() != Connection.TLSPolicy.disabled) { sb.append("<starttls xmlns=\"urn:ietf:params:xml:ns:xmpp-tls\">"); if (connection.getTlsPolicy() == Connection.TLSPolicy.required) { sb.append("<required/>"); } sb.append("</starttls>"); } // Include available SASL Mechanisms sb.append(SASLAuthentication.getSASLMechanisms(session)); // Include Stream features String specificFeatures = session.getAvailableStreamFeatures(); if (specificFeatures != null) { sb.append(specificFeatures); } sb.append("</stream:features>"); connection.deliverRawText(sb.toString()); return session; } public static boolean isAllowed(Connection connection) { if (!allowedIPs.isEmpty()) { // The server is using a whitelist so check that the IP address of the client // is authorized to connect to the server boolean forbidAccess = false; try { if (!allowedIPs.containsKey(connection.getHostAddress())) { byte[] address = connection.getAddress(); String range1 = (address[0] & 0xff) + "." + (address[1] & 0xff) + "." + (address[2] & 0xff) + ".*"; String range2 = (address[0] & 0xff) + "." + (address[1] & 0xff) + ".*.*"; String range3 = (address[0] & 0xff) + ".*.*.*"; if (!allowedIPs.containsKey(range1) && !allowedIPs.containsKey(range2) && !allowedIPs.containsKey(range3)) { forbidAccess = true; } } } catch (UnknownHostException e) { forbidAccess = true; } return !forbidAccess; } return true; } /** * Sets the list of IP address that are allowed to connect to the server. If the list is * empty then anyone is allowed to connect to the server except for anonymous users that are * subject to {@link #getAllowedAnonymIPs()}. This list is used for both anonymous and * non-anonymous users. * * @param allowed the list of IP address that are allowed to connect to the server. */ public static void setAllowedIPs(Map<String, String> allowed) { allowedIPs = allowed; if (allowedIPs.isEmpty()) { JiveGlobals.deleteProperty("xmpp.client.login.allowed"); } else { // Iterate through the elements in the map. StringBuilder buf = new StringBuilder(); Iterator<String> iter = allowedIPs.keySet().iterator(); if (iter.hasNext()) { buf.append(iter.next()); } while (iter.hasNext()) { buf.append(", ").append(iter.next()); } JiveGlobals.setProperty("xmpp.client.login.allowed", buf.toString()); } } /** * Sets the list of IP address that are allowed to connect to the server for anonymous * users. If the list is empty then anonymous will be only restricted by {@link #getAllowedIPs()}. * * @param allowed the list of IP address that are allowed to connect to the server. */ public static void setAllowedAnonymIPs(Map<String, String> allowed) { allowedAnonymIPs = allowed; if (allowedAnonymIPs.isEmpty()) { JiveGlobals.deleteProperty("xmpp.client.login.allowedAnonym"); } else { // Iterate through the elements in the map. StringBuilder buf = new StringBuilder(); Iterator<String> iter = allowedAnonymIPs.keySet().iterator(); if (iter.hasNext()) { buf.append(iter.next()); } while (iter.hasNext()) { buf.append(", ").append(iter.next()); } JiveGlobals.setProperty("xmpp.client.login.allowedAnonym", buf.toString()); } } /** * Returns whether TLS is mandatory, optional or is disabled for clients. When TLS is * mandatory clients are required to secure their connections or otherwise their connections * will be closed. On the other hand, when TLS is disabled clients are not allowed to secure * their connections using TLS. Their connections will be closed if they try to secure the * connection. in this last case. * * @return whether TLS is mandatory, optional or is disabled. */ public static SocketConnection.TLSPolicy getTLSPolicy() { // Set the TLS policy stored as a system property String policyName = JiveGlobals.getProperty("xmpp.client.tls.policy", Connection.TLSPolicy.optional.toString()); SocketConnection.TLSPolicy tlsPolicy; try { tlsPolicy = Connection.TLSPolicy.valueOf(policyName); } catch (IllegalArgumentException e) { Log.error("Error parsing xmpp.client.tls.policy: " + policyName, e); tlsPolicy = Connection.TLSPolicy.optional; } return tlsPolicy; } /** * Sets whether TLS is mandatory, optional or is disabled for clients. When TLS is * mandatory clients are required to secure their connections or otherwise their connections * will be closed. On the other hand, when TLS is disabled clients are not allowed to secure * their connections using TLS. Their connections will be closed if they try to secure the * connection. in this last case. * * @param policy whether TLS is mandatory, optional or is disabled. */ public static void setTLSPolicy(SocketConnection.TLSPolicy policy) { JiveGlobals.setProperty("xmpp.client.tls.policy", policy.toString()); } /** * Returns whether compression is optional or is disabled for clients. * * @return whether compression is optional or is disabled. */ public static SocketConnection.CompressionPolicy getCompressionPolicy() { // Set the Compression policy stored as a system property String policyName = JiveGlobals .getProperty("xmpp.client.compression.policy", Connection.CompressionPolicy.optional.toString()); SocketConnection.CompressionPolicy compressionPolicy; try { compressionPolicy = Connection.CompressionPolicy.valueOf(policyName); } catch (IllegalArgumentException e) { Log.error("Error parsing xmpp.client.compression.policy: " + policyName, e); compressionPolicy = Connection.CompressionPolicy.optional; } return compressionPolicy; } /** * Sets whether compression is optional or is disabled for clients. * * @param policy whether compression is optional or is disabled. */ public static void setCompressionPolicy(SocketConnection.CompressionPolicy policy) { JiveGlobals.setProperty("xmpp.client.compression.policy", policy.toString()); } /** * Returns the Privacy list that overrides the default privacy list. This list affects * only this session and only for the duration of the session. * * @return the Privacy list that overrides the default privacy list. */ public PrivacyList getActiveList() { if (activeList != null) { try { return PrivacyListManager.getInstance().getPrivacyList(getUsername(), activeList); } catch (UserNotFoundException e) { Log.error(e.getMessage(), e); } } return null; } /** * Sets the Privacy list that overrides the default privacy list. This list affects * only this session and only for the duration of the session. * * @param activeList the Privacy list that overrides the default privacy list. */ public void setActiveList(PrivacyList activeList) { this.activeList = activeList != null ? activeList.getName() : null; if (ClusterManager.isClusteringStarted()) { // Track information about the session and share it with other cluster nodes Cache<String,ClientSessionInfo> cache = SessionManager.getInstance().getSessionInfoCache(); cache.put(getAddress().toString(), new ClientSessionInfo(this)); } } /** * Returns the default Privacy list used for the session's user. This list is * processed if there is no active list set for the session. * * @return the default Privacy list used for the session's user. */ public PrivacyList getDefaultList() { if (defaultList != null) { try { return PrivacyListManager.getInstance().getPrivacyList(getUsername(), defaultList); } catch (UserNotFoundException e) { Log.error(e.getMessage(), e); } } return null; } /** * Sets the default Privacy list used for the session's user. This list is * processed if there is no active list set for the session. * * @param defaultList the default Privacy list used for the session's user. */ public void setDefaultList(PrivacyList defaultList) { // Do nothing if nothing has changed if ((this.defaultList == null && defaultList == null) || (defaultList != null && defaultList.getName().equals(this.defaultList))) { return; } this.defaultList = defaultList != null ? defaultList.getName() : null; if (ClusterManager.isClusteringStarted()) { // Track information about the session and share it with other cluster nodes Cache<String,ClientSessionInfo> cache = SessionManager.getInstance().getSessionInfoCache(); cache.put(getAddress().toString(), new ClientSessionInfo(this)); } } /** * Creates a session with an underlying connection and permission protection. * * @param serverName name of the server. * @param connection The connection we are proxying. * @param streamID unique identifier of this session. */ public LocalClientSession(String serverName, Connection connection, StreamID streamID) { super(serverName, connection, streamID); // Set an unavailable initial presence presence = new Presence(); presence.setType(Presence.Type.unavailable); } /** * Returns the username associated with this session. Use this information * with the user manager to obtain the user based on username. * * @return the username associated with this session * @throws org.jivesoftware.openfire.user.UserNotFoundException if a user is not associated with a session * (the session has not authenticated yet) */ public String getUsername() throws UserNotFoundException { if (authToken == null) { throw new UserNotFoundException(); } return getAddress().getNode(); } /** * Sets the new Authorization Token for this session. The session is not yet considered fully * authenticated (i.e. active) since a resource has not been binded at this point. This * message will be sent after SASL authentication was successful but yet resource binding * is required. * * @param auth the authentication token obtained from SASL authentication. */ public void setAuthToken(AuthToken auth) { authToken = auth; } /** * Initialize the session with a valid authentication token and * resource name. This automatically upgrades the session's * status to authenticated and enables many features that are not * available until authenticated (obtaining managers for example). * * @param auth the authentication token obtained from the AuthFactory. * @param resource the resource this session authenticated under. */ public void setAuthToken(AuthToken auth, String resource) { setAddress(new JID(auth.getUsername(), getServerName(), resource)); authToken = auth; setStatus(Session.STATUS_AUTHENTICATED); // Set default privacy list for this session setDefaultList(PrivacyListManager.getInstance().getDefaultPrivacyList(auth.getUsername())); // Add session to the session manager. The session will be added to the routing table as well sessionManager.addSession(this); } /** * Initialize the session as an anonymous login. This automatically upgrades the session's * status to authenticated and enables many features that are not available until * authenticated (obtaining managers for example).<p> */ public void setAnonymousAuth() { // Anonymous users have a full JID. Use the random resource as the JID's node String resource = getAddress().getResource(); setAddress(new JID(resource, getServerName(), resource, true)); setStatus(Session.STATUS_AUTHENTICATED); if (authToken == null) { authToken = new AuthToken(resource, true); } // Add session to the session manager. The session will be added to the routing table as well sessionManager.addSession(this); } /** * Returns the authentication token associated with this session. * * @return the authentication token associated with this session (can be null). */ public AuthToken getAuthToken() { return authToken; } public boolean isAnonymousUser() { return authToken == null || authToken.isAnonymous(); } /** * Flag indicating if this session has been initialized once coming * online. Session initialization occurs after the session receives * the first "available" presence update from the client. Initialization * actions include pushing offline messages, presence subscription requests, * and presence statuses to the client. Initialization occurs only once * following the first available presence transition. * * @return True if the session has already been initializsed */ public boolean isInitialized() { return initialized; } /** * Sets the initialization state of the session. * * @param isInit True if the session has been initialized * @see #isInitialized */ public void setInitialized(boolean isInit) { initialized = isInit; } /** * Returns true if the session was available ever. * * @return true if the session was available ever. */ public boolean wasAvailable() { return wasAvailable; } /** * Returns true if the offline messages of the user should be sent to the user when * the user becomes online. If the user sent a disco request with node * "http://jabber.org/protocol/offline" before the available presence then do not * flood the user with the offline messages. If the user is connected from many resources * then if one of the sessions stopped the flooding then no session should flood the user. * * @return true if the offline messages of the user should be sent to the user when the user * becomes online. */ public boolean canFloodOfflineMessages() { if(offlineFloodStopped) { return false; } String username = getAddress().getNode(); for (ClientSession session : sessionManager.getSessions(username)) { if (session.isOfflineFloodStopped()) { return false; } } return true; } /** * Returns true if the user requested to not receive offline messages when sending * an available presence. The user may send a disco request with node * "http://jabber.org/protocol/offline" so that no offline messages are sent to the * user when he becomes online. If the user is connected from many resources then * if one of the sessions stopped the flooding then no session should flood the user. * * @return true if the user requested to not receive offline messages when sending * an available presence. */ public boolean isOfflineFloodStopped() { return offlineFloodStopped; } /** * Sets if the user requested to not receive offline messages when sending * an available presence. The user may send a disco request with node * "http://jabber.org/protocol/offline" so that no offline messages are sent to the * user when he becomes online. If the user is connected from many resources then * if one of the sessions stopped the flooding then no session should flood the user. * * @param offlineFloodStopped if the user requested to not receive offline messages when * sending an available presence. */ public void setOfflineFloodStopped(boolean offlineFloodStopped) { this.offlineFloodStopped = offlineFloodStopped; if (ClusterManager.isClusteringStarted()) { // Track information about the session and share it with other cluster nodes Cache<String,ClientSessionInfo> cache = SessionManager.getInstance().getSessionInfoCache(); cache.put(getAddress().toString(), new ClientSessionInfo(this)); } } /** * Obtain the presence of this session. * * @return The presence of this session or null if not authenticated */ public Presence getPresence() { return presence; } /** * Set the presence of this session * * @param presence The presence for the session */ public void setPresence(Presence presence) { Presence oldPresence = this.presence; this.presence = presence; if (oldPresence.isAvailable() && !this.presence.isAvailable()) { // The client is no longer available sessionManager.sessionUnavailable(this); // Mark that the session is no longer initialized. This means that if the user sends // an available presence again the session will be initialized again thus receiving // offline messages and offline presence subscription requests setInitialized(false); // Notify listeners that the session is no longer available PresenceEventDispatcher.unavailableSession(this, presence); } else if (!oldPresence.isAvailable() && this.presence.isAvailable()) { // The client is available sessionManager.sessionAvailable(this, presence); wasAvailable = true; // Notify listeners that the session is now available PresenceEventDispatcher.availableSession(this, presence); } else if (this.presence.isAvailable() && oldPresence.getPriority() != this.presence.getPriority()) { // The client has changed the priority of his presence sessionManager.changePriority(this, oldPresence.getPriority()); // Notify listeners that the priority of the session/resource has changed PresenceEventDispatcher.presenceChanged(this, presence); } else if (this.presence.isAvailable()) { // Notify listeners that the show or status value of the presence has changed PresenceEventDispatcher.presenceChanged(this, presence); } if (ClusterManager.isClusteringStarted()) { // Track information about the session and share it with other cluster nodes Cache<String,ClientSessionInfo> cache = SessionManager.getInstance().getSessionInfoCache(); cache.put(getAddress().toString(), new ClientSessionInfo(this)); } } @Override public String getAvailableStreamFeatures() { // Offer authenticate and registration only if TLS was not required or if required // then the connection is already secured if (conn.getTlsPolicy() == Connection.TLSPolicy.required && !conn.isSecure()) { return null; } StringBuilder sb = new StringBuilder(200); // Include Stream Compression Mechanism if (conn.getCompressionPolicy() != Connection.CompressionPolicy.disabled && !conn.isCompressed()) { sb.append( "<compression xmlns=\"http://jabber.org/features/compress\"><method>zlib</method></compression>"); } if (getAuthToken() == null) { // Advertise that the server supports Non-SASL Authentication sb.append("<auth xmlns=\"http://jabber.org/features/iq-auth\"/>"); // Advertise that the server supports In-Band Registration if (XMPPServer.getInstance().getIQRegisterHandler().isInbandRegEnabled()) { sb.append("<register xmlns=\"http://jabber.org/features/iq-register\"/>"); } } else { // If the session has been authenticated then offer resource binding // and session establishment sb.append("<bind xmlns=\"urn:ietf:params:xml:ns:xmpp-bind\"/>"); sb.append("<session xmlns=\"urn:ietf:params:xml:ns:xmpp-session\"/>"); } return sb.toString(); } /** * Increments the conflict by one. */ public int incrementConflictCount() { conflictCount++; return conflictCount; } @Override public boolean isMessageCarbonsEnabled() { return messageCarbonsEnabled; } @Override public void setMessageCarbonsEnabled(boolean enabled) { messageCarbonsEnabled = true; } /** * Returns true if the specified packet must not be blocked based on the active or default * privacy list rules. The active list will be tried first. If none was found then the * default list is going to be used. If no default list was defined for this user then * allow the packet to flow. * * @param packet the packet to analyze if it must be blocked. * @return true if the specified packet must be blocked. */ @Override public boolean canProcess(Packet packet) { PrivacyList list = getActiveList(); if (list != null) { // If a privacy list is active then make sure that the packet is not blocked return !list.shouldBlockPacket(packet); } else { list = getDefaultList(); // There is no active list so check if there exists a default list and make // sure that the packet is not blocked return list == null || !list.shouldBlockPacket(packet); } } @Override public void deliver(Packet packet) throws UnauthorizedException { if (conn != null && !conn.isClosed()) { conn.deliver(packet); } } @Override public String toString() { return super.toString() + " presence: " + presence; } }
apache-2.0
brreitme/camel
components/camel-ignite/src/test/java/org/apache/camel/component/ignite/IgniteCacheTest.java
8916
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.ignite; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import javax.cache.Cache.Entry; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterators; import com.google.common.collect.Lists; import org.apache.camel.CamelException; import org.apache.camel.component.ignite.cache.IgniteCacheOperation; import org.apache.camel.util.ObjectHelper; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CachePeekMode; import org.apache.ignite.cache.query.Query; import org.apache.ignite.cache.query.ScanQuery; import org.apache.ignite.lang.IgniteBiPredicate; import org.junit.After; import org.junit.Test; import static com.google.common.truth.Truth.assert_; public class IgniteCacheTest extends AbstractIgniteTest { @Test public void testAddEntry() { template.requestBodyAndHeader("ignite:cache:testcache1?operation=PUT", "1234", IgniteConstants.IGNITE_CACHE_KEY, "abcd"); assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(1); assert_().that(ignite().cache("testcache1").get("abcd")).isEqualTo("1234"); } @Test public void testAddEntrySet() { template.requestBody("ignite:cache:testcache1?operation=PUT", ImmutableMap.of("abcd", "1234", "efgh", "5678")); assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(2); assert_().that(ignite().cache("testcache1").get("abcd")).isEqualTo("1234"); assert_().that(ignite().cache("testcache1").get("efgh")).isEqualTo("5678"); } @Test public void testGetOne() { testAddEntry(); String result = template.requestBody("ignite:cache:testcache1?operation=GET", "abcd", String.class); assert_().that(result).isEqualTo("1234"); result = template.requestBodyAndHeader("ignite:cache:testcache1?operation=GET", "this value won't be used", IgniteConstants.IGNITE_CACHE_KEY, "abcd", String.class); assert_().that(result).isEqualTo("1234"); } @Test @SuppressWarnings("unchecked") public void testGetMany() { IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1"); Set<String> keys = new HashSet<>(); for (int i = 0; i < 100; i++) { cache.put("k" + i, "v" + i); keys.add("k" + i); } Map<String, String> result = template.requestBody("ignite:cache:testcache1?operation=GET", keys, Map.class); for (String k : keys) { assert_().that(result.get(k)).isEqualTo(k.replace("k", "v")); } } @Test public void testGetSize() { IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1"); Set<String> keys = new HashSet<>(); for (int i = 0; i < 100; i++) { cache.put("k" + i, "v" + i); keys.add("k" + i); } Integer result = template.requestBody("ignite:cache:testcache1?operation=SIZE", keys, Integer.class); assert_().that(result).isEqualTo(100); } @Test @SuppressWarnings("unchecked") public void testQuery() { IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1"); Set<String> keys = new HashSet<>(); for (int i = 0; i < 100; i++) { cache.put("k" + i, "v" + i); keys.add("k" + i); } Query<Entry<String, String>> query = new ScanQuery<String, String>(new IgniteBiPredicate<String, String>() { private static final long serialVersionUID = 1L; @Override public boolean apply(String key, String value) { return Integer.parseInt(key.replace("k", "")) >= 50; } }); Iterator<String> iter = template.requestBodyAndHeader("ignite:cache:testcache1?operation=QUERY", keys, IgniteConstants.IGNITE_CACHE_QUERY, query, Iterator.class); ArrayList<Object> results = Lists.newArrayList(Iterators.toArray(iter, Object.class)); assert_().that(results.size()).isEqualTo(50); } @Test public void testGetManyTreatCollectionsAsCacheObjects() { IgniteCache<Object, String> cache = ignite().getOrCreateCache("testcache1"); Set<String> keys = new HashSet<>(); for (int i = 0; i < 100; i++) { cache.put("k" + i, "v" + i); keys.add("k" + i); } // Also add a cache entry with the entire Set as a key. cache.put(keys, "---"); String result = template.requestBody("ignite:cache:testcache1?operation=GET&treatCollectionsAsCacheObjects=true", keys, String.class); assert_().that(result).isEqualTo("---"); } @Test public void testRemoveEntry() { IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1"); cache.put("abcd", "1234"); cache.put("efgh", "5678"); assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(2); template.requestBody("ignite:cache:testcache1?operation=REMOVE", "abcd"); assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(1); assert_().that(cache.get("abcd")).isNull(); template.requestBodyAndHeader("ignite:cache:testcache1?operation=REMOVE", "this value won't be used", IgniteConstants.IGNITE_CACHE_KEY, "efgh"); assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(0); assert_().that(cache.get("efgh")).isNull(); } @Test public void testClearCache() { IgniteCache<String, String> cache = ignite().getOrCreateCache("testcache1"); for (int i = 0; i < 100; i++) { cache.put("k" + i, "v" + i); } assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(100); template.requestBody("ignite:cache:testcache1?operation=CLEAR", "this value won't be used"); assert_().that(cache.size(CachePeekMode.ALL)).isEqualTo(0); } @Test public void testHeaderSetRemoveEntry() { testAddEntry(); String result = template.requestBody("ignite:cache:testcache1?operation=GET", "abcd", String.class); assert_().that(result).isEqualTo("1234"); result = template.requestBodyAndHeader("ignite:cache:testcache1?operation=GET", "abcd", IgniteConstants.IGNITE_CACHE_OPERATION, IgniteCacheOperation.REMOVE, String.class); // The body has not changed, but the cache entry is gone. assert_().that(result).isEqualTo("abcd"); assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(0); } @Test public void testAddEntryNoCacheCreation() { try { template.requestBodyAndHeader("ignite:cache:testcache2?operation=PUT&failIfInexistentCache=true", "1234", IgniteConstants.IGNITE_CACHE_KEY, "abcd"); } catch (Exception e) { assert_().that(ObjectHelper.getException(CamelException.class, e).getMessage()).startsWith("Ignite cache testcache2 doesn't exist"); return; } fail("Should have thrown an exception"); } @Test public void testAddEntryDoNotPropagateIncomingBody() { Object result = template.requestBodyAndHeader("ignite:cache:testcache1?operation=PUT&propagateIncomingBodyIfNoReturnValue=false", "1234", IgniteConstants.IGNITE_CACHE_KEY, "abcd", Object.class); assert_().that(ignite().cache("testcache1").size(CachePeekMode.ALL)).isEqualTo(1); assert_().that(ignite().cache("testcache1").get("abcd")).isEqualTo("1234"); assert_().that(result).isNull(); } @Override public boolean isCreateCamelContextPerClass() { return true; } @After public void deleteCaches() { for (String cacheName : ImmutableSet.<String> of("testcache1", "testcache2")) { IgniteCache<?, ?> cache = ignite().cache(cacheName); if (cache == null) { continue; } cache.clear(); } } }
apache-2.0
Drifftr/devstudio-tooling-bps
plugins/org.eclipse.bpel.ui/src/org/eclipse/bpel/ui/commands/SetFaultNamespaceCommand.java
1265
/******************************************************************************* * Copyright (c) 2005, 2012 IBM Corporation and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * IBM Corporation - initial API and implementation *******************************************************************************/ package org.eclipse.bpel.ui.commands; import org.eclipse.bpel.ui.IBPELUIConstants; import org.eclipse.bpel.ui.util.ModelHelper; import org.eclipse.emf.ecore.EObject; /** * Sets the namespace half of the faultName property of a Catch activity. */ public class SetFaultNamespaceCommand extends SetCommand { public String getDefaultLabel() { return IBPELUIConstants.CMD_EDIT_FAULTNAMESPACE; } public SetFaultNamespaceCommand(EObject target, String newFaultNS) { super(target, newFaultNS); } @Override public Object get() { return ModelHelper.getFaultNamespace(fTarget); } @Override public void set(Object o) { ModelHelper.setFaultNamespace(fTarget, (String)o); } }
apache-2.0
stevem999/gocd
server/src/main/java/com/thoughtworks/go/server/messaging/plugin/PluginNotificationListenerFactory.java
1891
/*************************GO-LICENSE-START********************************* * Copyright 2014 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *************************GO-LICENSE-END***********************************/ package com.thoughtworks.go.server.messaging.plugin; import com.thoughtworks.go.util.SystemEnvironment; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; @Component public class PluginNotificationListenerFactory { private PluginNotificationQueue pluginNotificationQueue; private SystemEnvironment systemEnvironment; private PluginNotificationService pluginNotificationService; @Autowired public PluginNotificationListenerFactory(PluginNotificationQueue pluginNotificationQueue, SystemEnvironment systemEnvironment, PluginNotificationService pluginNotificationService) { this.pluginNotificationQueue = pluginNotificationQueue; this.systemEnvironment = systemEnvironment; this.pluginNotificationService = pluginNotificationService; } public void init() { int numberOfListeners = systemEnvironment.getNumberOfPluginNotificationListener(); for (int i = 0; i < numberOfListeners; i++) { pluginNotificationQueue.addListener(new PluginNotificationListener(pluginNotificationService)); } } }
apache-2.0
jmluy/elasticsearch
x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/query/ShapeQueryBuilder.java
6284
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ package org.elasticsearch.xpack.spatial.index.query; import org.apache.lucene.search.ConstantScoreQuery; import org.apache.lucene.search.Query; import org.elasticsearch.common.geo.GeometryParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.query.AbstractGeometryQueryBuilder; import org.elasticsearch.index.query.QueryRewriteContext; import org.elasticsearch.index.query.QueryShardException; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.spatial.index.mapper.ShapeQueryable; import java.io.IOException; import java.text.ParseException; import java.util.Objects; import java.util.function.Supplier; /** * Derived {@link AbstractGeometryQueryBuilder} that builds a {@code x, y} Shape Query. It * can be applied to any {@link MappedFieldType} that implements {@link ShapeQueryable}. * * GeoJson and WKT shape definitions are supported */ public class ShapeQueryBuilder extends AbstractGeometryQueryBuilder<ShapeQueryBuilder> { public static final String NAME = "shape"; /** * Creates a new ShapeQueryBuilder whose Query will be against the given * field name using the given Shape * * @param fieldName * Name of the field that will be queried * @param shape * Shape used in the Query */ public ShapeQueryBuilder(String fieldName, Geometry shape) { super(fieldName, shape); } protected ShapeQueryBuilder(String fieldName, Supplier<Geometry> shapeSupplier, String indexedShapeId) { super(fieldName, shapeSupplier, indexedShapeId); } /** * Creates a new ShapeQueryBuilder whose Query will be against the given * field name and will use the Shape found with the given ID * * @param fieldName * Name of the field that will be filtered * @param indexedShapeId * ID of the indexed Shape that will be used in the Query */ public ShapeQueryBuilder(String fieldName, String indexedShapeId) { super(fieldName, indexedShapeId); } public ShapeQueryBuilder(StreamInput in) throws IOException { super(in); } @Override protected void doWriteTo(StreamOutput out) throws IOException { super.doWriteTo(out); } @Override protected ShapeQueryBuilder newShapeQueryBuilder(String fieldName, Geometry shape) { return new ShapeQueryBuilder(fieldName, shape); } @Override protected ShapeQueryBuilder newShapeQueryBuilder(String fieldName, Supplier<Geometry> shapeSupplier, String indexedShapeId) { return new ShapeQueryBuilder(fieldName, shapeSupplier, indexedShapeId); } @Override @SuppressWarnings({ "rawtypes" }) public Query buildShapeQuery(SearchExecutionContext context, MappedFieldType fieldType) { if ((fieldType instanceof ShapeQueryable) == false) { throw new QueryShardException( context, "Field [" + fieldName + "] is of unsupported type [" + fieldType.typeName() + "] for [" + NAME + "] query" ); } final ShapeQueryable ft = (ShapeQueryable) fieldType; return new ConstantScoreQuery(ft.shapeQuery(shape, fieldType.name(), relation, context)); } @Override public void doShapeQueryXContent(XContentBuilder builder, Params params) throws IOException { // noop } @Override protected ShapeQueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws IOException { return (ShapeQueryBuilder) super.doRewrite(queryRewriteContext); } @Override protected boolean doEquals(ShapeQueryBuilder other) { return super.doEquals((AbstractGeometryQueryBuilder) other); } @Override protected int doHashCode() { return Objects.hash(super.doHashCode()); } @Override public String getWriteableName() { return NAME; } private static class ParsedShapeQueryParams extends ParsedGeometryQueryParams { private final GeometryParser geometryParser = new GeometryParser(true, true, true); @Override protected boolean parseXContentField(XContentParser parser) throws IOException { if (SHAPE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { try { this.shape = geometryParser.parse(parser); } catch (ParseException e) { throw new IOException(e); } return true; } return false; } } public static ShapeQueryBuilder fromXContent(XContentParser parser) throws IOException { ParsedShapeQueryParams pgsqb = (ParsedShapeQueryParams) AbstractGeometryQueryBuilder.parsedParamsFromXContent( parser, new ParsedShapeQueryParams() ); ShapeQueryBuilder builder; if (pgsqb.shape != null) { builder = new ShapeQueryBuilder(pgsqb.fieldName, pgsqb.shape); } else { builder = new ShapeQueryBuilder(pgsqb.fieldName, pgsqb.id); } if (pgsqb.index != null) { builder.indexedShapeIndex(pgsqb.index); } if (pgsqb.shapePath != null) { builder.indexedShapePath(pgsqb.shapePath); } if (pgsqb.shapeRouting != null) { builder.indexedShapeRouting(pgsqb.shapeRouting); } if (pgsqb.relation != null) { builder.relation(pgsqb.relation); } if (pgsqb.queryName != null) { builder.queryName(pgsqb.queryName); } builder.boost(pgsqb.boost); builder.ignoreUnmapped(pgsqb.ignoreUnmapped); return builder; } }
apache-2.0
pushyamig/sakai
gradebookng/tool/src/java/org/sakaiproject/gradebookng/business/helpers/ImportGradesHelper.java
13522
package org.sakaiproject.gradebookng.business.helpers; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.commons.lang.StringUtils; import org.apache.poi.ss.usermodel.Row; import org.apache.poi.ss.usermodel.Sheet; import org.apache.poi.ss.usermodel.Workbook; import org.apache.poi.ss.usermodel.WorkbookFactory; import org.sakaiproject.entity.api.ResourceProperties; import org.sakaiproject.gradebookng.business.model.GbGradeInfo; import org.sakaiproject.gradebookng.business.model.GbStudentGradeInfo; import org.sakaiproject.gradebookng.business.model.ImportColumn; import org.sakaiproject.gradebookng.business.model.ImportedGrade; import org.sakaiproject.gradebookng.business.model.ImportedGradeItem; import org.sakaiproject.gradebookng.business.model.ImportedGradeWrapper; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItem; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemDetail; import org.sakaiproject.gradebookng.business.model.ProcessedGradeItemStatus; import org.sakaiproject.gradebookng.tool.model.AssignmentStudentGradeInfo; import org.sakaiproject.service.gradebook.shared.Assignment; import org.sakaiproject.util.BaseResourcePropertiesEdit; import au.com.bytecode.opencsv.CSVReader; import lombok.extern.apachecommons.CommonsLog; /** * Created by chmaurer on 1/21/15. */ @CommonsLog public class ImportGradesHelper extends BaseImportHelper { private static final String IMPORT_USER_ID = "Student ID"; private static final String IMPORT_USER_NAME = "Student Name"; protected static final String ASSIGNMENT_HEADER_PATTERN = "{0} [{1}]"; protected static final String ASSIGNMENT_HEADER_COMMENT_PATTERN = "*/ {0} Comments */"; protected static final String HEADER_STANDARD_PATTERN = "{0}"; /** * Parse a CSV into a list of ImportedGrade objects. Returns list if ok, or null if error * * @param is InputStream of the data to parse * @return */ public static ImportedGradeWrapper parseCsv(final InputStream is, final Map<String, String> userMap) { // manually parse method so we can support arbitrary columns final CSVReader reader = new CSVReader(new InputStreamReader(is)); String[] nextLine; int lineCount = 0; final List<ImportedGrade> list = new ArrayList<ImportedGrade>(); Map<Integer, ImportColumn> mapping = null; try { while ((nextLine = reader.readNext()) != null) { if (lineCount == 0) { // header row, capture it mapping = mapHeaderRow(nextLine); } else { // map the fields into the object list.add(mapLine(nextLine, mapping, userMap)); } lineCount++; } } catch (final Exception e) { log.error("Error reading imported file: " + e.getClass() + " : " + e.getMessage()); return null; } finally { try { reader.close(); } catch (final IOException e) { e.printStackTrace(); } } final ImportedGradeWrapper importedGradeWrapper = new ImportedGradeWrapper(); importedGradeWrapper.setColumns(mapping.values()); importedGradeWrapper.setImportedGrades(list); return importedGradeWrapper; } /** * Parse an XLS into a list of ImportedGrade objects Note that only the first sheet of the Excel file is supported. * * @param is InputStream of the data to parse * @return */ public static ImportedGradeWrapper parseXls(final InputStream is, final Map<String, String> userMap) { int lineCount = 0; final List<ImportedGrade> list = new ArrayList<ImportedGrade>(); Map<Integer, ImportColumn> mapping = null; try { final Workbook wb = WorkbookFactory.create(is); final Sheet sheet = wb.getSheetAt(0); for (final Row row : sheet) { final String[] r = convertRow(row); if (lineCount == 0) { // header row, capture it mapping = mapHeaderRow(r); } else { // map the fields into the object list.add(mapLine(r, mapping, userMap)); } lineCount++; } } catch (final Exception e) { log.error("Error reading imported file: " + e.getClass() + " : " + e.getMessage()); return null; } final ImportedGradeWrapper importedGradeWrapper = new ImportedGradeWrapper(); importedGradeWrapper.setColumns(mapping.values()); importedGradeWrapper.setImportedGrades(list); return importedGradeWrapper; } /** * Takes a row of data and maps it into the appropriate ImportedGrade properties We have a fixed list of properties, anything else goes * into ResourceProperties * * @param line * @param mapping * @return */ private static ImportedGrade mapLine(final String[] line, final Map<Integer, ImportColumn> mapping, final Map<String, String> userMap) { final ImportedGrade grade = new ImportedGrade(); final ResourceProperties p = new BaseResourcePropertiesEdit(); for (final Map.Entry<Integer, ImportColumn> entry : mapping.entrySet()) { final int i = entry.getKey(); // trim in case some whitespace crept in final ImportColumn importColumn = entry.getValue(); // String col = trim(entry.getValue()); // In case there aren't enough data fields in the line to match up with the number of columns needed String lineVal = null; if (i < line.length) { lineVal = trim(line[i]); } // now check each of the main properties in turn to determine which one to set, otherwise set into props if (StringUtils.equals(importColumn.getColumnTitle(), IMPORT_USER_ID)) { grade.setStudentEid(lineVal); grade.setStudentUuid(userMap.get(lineVal)); } else if (StringUtils.equals(importColumn.getColumnTitle(), IMPORT_USER_NAME)) { grade.setStudentName(lineVal); } else if (ImportColumn.TYPE_ITEM_WITH_POINTS == importColumn.getType()) { final String assignmentName = importColumn.getColumnTitle(); ImportedGradeItem importedGradeItem = grade.getGradeItemMap().get(assignmentName); if (importedGradeItem == null) { importedGradeItem = new ImportedGradeItem(); grade.getGradeItemMap().put(assignmentName, importedGradeItem); importedGradeItem.setGradeItemName(assignmentName); } importedGradeItem.setGradeItemScore(lineVal); } else if (ImportColumn.TYPE_ITEM_WITH_COMMENTS == importColumn.getType()) { final String assignmentName = importColumn.getColumnTitle(); ImportedGradeItem importedGradeItem = grade.getGradeItemMap().get(assignmentName); if (importedGradeItem == null) { importedGradeItem = new ImportedGradeItem(); grade.getGradeItemMap().put(assignmentName, importedGradeItem); importedGradeItem.setGradeItemName(assignmentName); } importedGradeItem.setGradeItemComment(lineVal); } else { // only add if not blank if (StringUtils.isNotBlank(lineVal)) { p.addProperty(importColumn.getColumnTitle(), lineVal); } } } grade.setProperties(p); return grade; } public static List<ProcessedGradeItem> processImportedGrades(final ImportedGradeWrapper importedGradeWrapper, final List<Assignment> assignments, final List<GbStudentGradeInfo> currentGrades) { final List<ProcessedGradeItem> processedGradeItems = new ArrayList<ProcessedGradeItem>(); final Map<String, Assignment> assignmentNameMap = new HashMap<String, Assignment>(); final Map<String, ProcessedGradeItem> assignmentProcessedGradeItemMap = new HashMap<String, ProcessedGradeItem>(); final Map<Long, AssignmentStudentGradeInfo> transformedGradeMap = transformCurrentGrades(currentGrades); // Map the assignment name back to the Id for (final Assignment assignment : assignments) { assignmentNameMap.put(assignment.getName(), assignment); } for (final ImportColumn column : importedGradeWrapper.getColumns()) { boolean needsAdded = false; final String assignmentName = column.getColumnTitle(); ProcessedGradeItem processedGradeItem = assignmentProcessedGradeItemMap.get(assignmentName); if (processedGradeItem == null) { processedGradeItem = new ProcessedGradeItem(); needsAdded = true; } final Assignment assignment = assignmentNameMap.get(assignmentName); final ProcessedGradeItemStatus status = determineStatus(column, assignment, importedGradeWrapper, transformedGradeMap); if (column.getType() == ImportColumn.TYPE_ITEM_WITH_POINTS) { processedGradeItem.setItemTitle(assignmentName); processedGradeItem.setItemPointValue(column.getPoints()); processedGradeItem.setStatus(status); } else if (column.getType() == ImportColumn.TYPE_ITEM_WITH_COMMENTS) { processedGradeItem.setCommentLabel(assignmentName + " Comments"); processedGradeItem.setCommentStatus(status); } else { // Just get out log.warn("Bad column type - " + column.getType() + ". Skipping."); continue; } if (assignment != null) { processedGradeItem.setItemId(assignment.getId()); } final List<ProcessedGradeItemDetail> processedGradeItemDetails = new ArrayList<>(); for (final ImportedGrade importedGrade : importedGradeWrapper.getImportedGrades()) { final ImportedGradeItem importedGradeItem = importedGrade.getGradeItemMap().get(assignmentName); if (importedGradeItem != null) { final ProcessedGradeItemDetail processedGradeItemDetail = new ProcessedGradeItemDetail(); processedGradeItemDetail.setStudentEid(importedGrade.getStudentEid()); processedGradeItemDetail.setStudentUuid(importedGrade.getStudentUuid()); processedGradeItemDetail.setGrade(importedGradeItem.getGradeItemScore()); processedGradeItemDetail.setComment(importedGradeItem.getGradeItemComment()); processedGradeItemDetails.add(processedGradeItemDetail); } } processedGradeItem.setProcessedGradeItemDetails(processedGradeItemDetails); if (needsAdded) { processedGradeItems.add(processedGradeItem); assignmentProcessedGradeItemMap.put(assignmentName, processedGradeItem); } } return processedGradeItems; } private static ProcessedGradeItemStatus determineStatus(final ImportColumn column, final Assignment assignment, final ImportedGradeWrapper importedGradeWrapper, final Map<Long, AssignmentStudentGradeInfo> transformedGradeMap) { ProcessedGradeItemStatus status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UNKNOWN); if (assignment == null) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_NEW); } else if (assignment.getExternalId() != null) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_EXTERNAL, assignment.getExternalAppName()); } else { for (final ImportedGrade importedGrade : importedGradeWrapper.getImportedGrades()) { final AssignmentStudentGradeInfo assignmentStudentGradeInfo = transformedGradeMap.get(assignment.getId()); final ImportedGradeItem importedGradeItem = importedGrade.getGradeItemMap().get(column.getColumnTitle()); String actualScore = null; String actualComment = null; if (assignmentStudentGradeInfo != null) { final GbGradeInfo actualGradeInfo = assignmentStudentGradeInfo.getStudentGrades().get(importedGrade.getStudentEid()); if (actualGradeInfo != null) { actualScore = actualGradeInfo.getGrade(); actualComment = actualGradeInfo.getGradeComment(); } } String importedScore = null; String importedComment = null; if (importedGradeItem != null) { importedScore = importedGradeItem.getGradeItemScore(); importedComment = importedGradeItem.getGradeItemComment(); } if (column.getType() == ImportColumn.TYPE_ITEM_WITH_POINTS) { final String trimmedImportedScore = StringUtils.removeEnd(importedScore, ".0"); final String trimmedActualScore = StringUtils.removeEnd(actualScore, ".0"); if (trimmedImportedScore != null && !trimmedImportedScore.equals(trimmedActualScore)) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UPDATE); break; } } else if (column.getType() == ImportColumn.TYPE_ITEM_WITH_COMMENTS) { if (importedComment != null && !importedComment.equals(actualComment)) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_UPDATE); break; } } } // If we get here, must not have been any changes if (status.getStatusCode() == ProcessedGradeItemStatus.STATUS_UNKNOWN) { status = new ProcessedGradeItemStatus(ProcessedGradeItemStatus.STATUS_NA); } // TODO - What about if a user was added to the import file? // That probably means that actualGradeInfo from up above is null...but what do I do? } return status; } private static Map<Long, AssignmentStudentGradeInfo> transformCurrentGrades(final List<GbStudentGradeInfo> currentGrades) { final Map<Long, AssignmentStudentGradeInfo> assignmentMap = new HashMap<Long, AssignmentStudentGradeInfo>(); for (final GbStudentGradeInfo studentGradeInfo : currentGrades) { for (final Map.Entry<Long, GbGradeInfo> entry : studentGradeInfo.getGrades().entrySet()) { final Long assignmentId = entry.getKey(); AssignmentStudentGradeInfo assignmentStudentGradeInfo = assignmentMap.get(assignmentId); if (assignmentStudentGradeInfo == null) { assignmentStudentGradeInfo = new AssignmentStudentGradeInfo(); assignmentStudentGradeInfo.setAssignmemtId(assignmentId); assignmentMap.put(assignmentId, assignmentStudentGradeInfo); } assignmentStudentGradeInfo.addGrade(studentGradeInfo.getStudentEid(), entry.getValue()); } } return assignmentMap; } }
apache-2.0
michael-simons/spring-boot
spring-boot-project/spring-boot-autoconfigure/src/test/java/org/springframework/boot/autoconfigure/jersey/JerseyAutoConfigurationDefaultFilterPathTests.java
3099
/* * Copyright 2012-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.boot.autoconfigure.jersey; import java.lang.annotation.Documented; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import javax.ws.rs.GET; import javax.ws.rs.Path; import org.glassfish.jersey.server.ResourceConfig; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.context.PropertyPlaceholderAutoConfiguration; import org.springframework.boot.autoconfigure.web.servlet.ServletWebServerFactoryAutoConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.test.annotation.DirtiesContext; import static org.assertj.core.api.Assertions.assertThat; /** * Tests for {@link JerseyAutoConfiguration} when using custom servlet paths. * * @author Dave Syer */ @SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT, properties = "spring.jersey.type=filter") @DirtiesContext class JerseyAutoConfigurationDefaultFilterPathTests { @Autowired private TestRestTemplate restTemplate; @Test void contextLoads() { ResponseEntity<String> entity = this.restTemplate.getForEntity("/hello", String.class); assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK); } @MinimalWebConfiguration @Path("/hello") public static class Application extends ResourceConfig { @Value("${message:World}") private String msg; Application() { register(Application.class); } @GET public String message() { return "Hello " + this.msg; } static void main(String[] args) { SpringApplication.run(Application.class, args); } } @Target(ElementType.TYPE) @Retention(RetentionPolicy.RUNTIME) @Documented @Configuration @Import({ ServletWebServerFactoryAutoConfiguration.class, JerseyAutoConfiguration.class, PropertyPlaceholderAutoConfiguration.class }) protected @interface MinimalWebConfiguration { } }
apache-2.0
apache/flink
flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/MultipleInputSelectionHandler.java
8181
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.runtime.io; import org.apache.flink.annotation.Internal; import org.apache.flink.streaming.api.operators.InputSelectable; import org.apache.flink.streaming.api.operators.InputSelection; import javax.annotation.Nullable; import java.io.IOException; import static org.apache.flink.util.Preconditions.checkArgument; import static org.apache.flink.util.Preconditions.checkState; /** * This handler is mainly used for selecting the next available input index in {@link * StreamMultipleInputProcessor}. */ @Internal public class MultipleInputSelectionHandler { // if we directly use Long.SIZE, calculation of allSelectedMask will overflow public static final int MAX_SUPPORTED_INPUT_COUNT = Long.SIZE - 1; @Nullable private final InputSelectable inputSelectable; private long selectedInputsMask = InputSelection.ALL.getInputMask(); private final long allSelectedMask; private long availableInputsMask; private long notFinishedInputsMask; private long dataFinishedButNotPartition; private boolean drainOnEndOfData = true; private enum OperatingMode { NO_INPUT_SELECTABLE, INPUT_SELECTABLE_PRESENT_NO_DATA_INPUTS_FINISHED, INPUT_SELECTABLE_PRESENT_SOME_DATA_INPUTS_FINISHED, ALL_DATA_INPUTS_FINISHED; } private OperatingMode operatingMode; public MultipleInputSelectionHandler( @Nullable InputSelectable inputSelectable, int inputCount) { checkSupportedInputCount(inputCount); this.inputSelectable = inputSelectable; this.allSelectedMask = (1L << inputCount) - 1; this.availableInputsMask = allSelectedMask; this.notFinishedInputsMask = allSelectedMask; this.dataFinishedButNotPartition = 0; if (inputSelectable != null) { this.operatingMode = OperatingMode.INPUT_SELECTABLE_PRESENT_NO_DATA_INPUTS_FINISHED; } else { this.operatingMode = OperatingMode.NO_INPUT_SELECTABLE; } } public static void checkSupportedInputCount(int inputCount) { checkArgument( inputCount <= MAX_SUPPORTED_INPUT_COUNT, "Only up to %s inputs are supported at once, while encountered %s", MAX_SUPPORTED_INPUT_COUNT, inputCount); } public DataInputStatus updateStatusAndSelection(DataInputStatus inputStatus, int inputIndex) throws IOException { switch (inputStatus) { case MORE_AVAILABLE: nextSelection(); checkState(checkBitMask(availableInputsMask, inputIndex)); return DataInputStatus.MORE_AVAILABLE; case NOTHING_AVAILABLE: availableInputsMask = unsetBitMask(availableInputsMask, inputIndex); break; case STOPPED: this.drainOnEndOfData = false; // fall through case END_OF_DATA: dataFinishedButNotPartition = setBitMask(dataFinishedButNotPartition, inputIndex); updateModeOnEndOfData(); break; case END_OF_INPUT: dataFinishedButNotPartition = unsetBitMask(dataFinishedButNotPartition, inputIndex); notFinishedInputsMask = unsetBitMask(notFinishedInputsMask, inputIndex); break; default: throw new UnsupportedOperationException("Unsupported inputStatus = " + inputStatus); } nextSelection(); return calculateOverallStatus(inputStatus); } private void updateModeOnEndOfData() { boolean allDataInputsFinished = ((dataFinishedButNotPartition | ~notFinishedInputsMask) & allSelectedMask) == allSelectedMask; if (allDataInputsFinished) { this.operatingMode = OperatingMode.ALL_DATA_INPUTS_FINISHED; } else if (this.operatingMode == OperatingMode.INPUT_SELECTABLE_PRESENT_NO_DATA_INPUTS_FINISHED) { this.operatingMode = OperatingMode.INPUT_SELECTABLE_PRESENT_SOME_DATA_INPUTS_FINISHED; } } private DataInputStatus calculateOverallStatus(DataInputStatus updatedStatus) throws IOException { if (areAllInputsFinished()) { return DataInputStatus.END_OF_INPUT; } if (updatedStatus == DataInputStatus.END_OF_DATA && this.operatingMode == OperatingMode.ALL_DATA_INPUTS_FINISHED) { return drainOnEndOfData ? DataInputStatus.END_OF_DATA : DataInputStatus.STOPPED; } if (isAnyInputAvailable()) { return DataInputStatus.MORE_AVAILABLE; } else { long selectedNotFinishedInputMask = selectedInputsMask & notFinishedInputsMask; if (selectedNotFinishedInputMask == 0) { throw new IOException( "Can not make a progress: all selected inputs are already finished"); } return DataInputStatus.NOTHING_AVAILABLE; } } void nextSelection() { switch (operatingMode) { case NO_INPUT_SELECTABLE: case ALL_DATA_INPUTS_FINISHED: selectedInputsMask = InputSelection.ALL.getInputMask(); break; case INPUT_SELECTABLE_PRESENT_NO_DATA_INPUTS_FINISHED: selectedInputsMask = inputSelectable.nextSelection().getInputMask(); break; case INPUT_SELECTABLE_PRESENT_SOME_DATA_INPUTS_FINISHED: selectedInputsMask = (inputSelectable.nextSelection().getInputMask() | dataFinishedButNotPartition) & allSelectedMask; break; } } int selectNextInputIndex(int lastReadInputIndex) { return InputSelection.fairSelectNextIndex( selectedInputsMask, availableInputsMask & notFinishedInputsMask, lastReadInputIndex); } boolean shouldSetAvailableForAnotherInput() { return (selectedInputsMask & allSelectedMask & ~availableInputsMask) != 0; } void setAvailableInput(int inputIndex) { availableInputsMask = setBitMask(availableInputsMask, inputIndex); } void setUnavailableInput(int inputIndex) { availableInputsMask = unsetBitMask(availableInputsMask, inputIndex); } boolean isAnyInputAvailable() { return (selectedInputsMask & availableInputsMask & notFinishedInputsMask) != 0; } boolean isInputSelected(int inputIndex) { return checkBitMask(selectedInputsMask, inputIndex); } public boolean isInputFinished(int inputIndex) { return !checkBitMask(notFinishedInputsMask, inputIndex); } public boolean areAllInputsFinished() { return notFinishedInputsMask == 0; } public boolean areAllDataInputsFinished() { return this.operatingMode == OperatingMode.ALL_DATA_INPUTS_FINISHED; } long setBitMask(long mask, int inputIndex) { return mask | 1L << inputIndex; } long unsetBitMask(long mask, int inputIndex) { return mask & ~(1L << inputIndex); } boolean checkBitMask(long mask, int inputIndex) { return (mask & (1L << inputIndex)) != 0; } }
apache-2.0
NSAmelchev/ignite
modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/GridCacheMixedModeSelfTest.java
2803
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed; import org.apache.ignite.IgniteCache; import org.apache.ignite.cache.CacheMode; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.junit.Test; /** * Tests cache puts in mixed mode. * * TODO IGNITE-10345: Remove test in ignite 3.0. */ public class GridCacheMixedModeSelfTest extends GridCommonAbstractTest { /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName); ((TcpDiscoverySpi)cfg.getDiscoverySpi()).setForceServerMode(true); cfg.setCacheConfiguration(cacheConfiguration(igniteInstanceName)); return cfg; } /** * @param igniteInstanceName Ignite instance name. * @return Cache configuration. */ private CacheConfiguration cacheConfiguration(String igniteInstanceName) { CacheConfiguration cfg = new CacheConfiguration(DEFAULT_CACHE_NAME); cfg.setCacheMode(CacheMode.PARTITIONED); return cfg; } /** {@inheritDoc} */ @Override protected void beforeTestsStarted() throws Exception { startGrids(3); startClientGrid(3); } /** * @throws Exception If failed. */ @Test public void testBasicOps() throws Exception { IgniteCache<Object, Object> cache = grid(3).cache(DEFAULT_CACHE_NAME); for (int i = 0; i < 1000; i++) cache.put(i, i); for (int i = 0; i < 1000; i++) assertEquals(i, cache.get(i)); for (int i = 0; i < 1000; i++) assertEquals(i, cache.getAndRemove(i)); for (int i = 0; i < 1000; i++) assertNull(cache.get(i)); } }
apache-2.0
lincoln-lil/flink
flink-table/flink-table-runtime/src/main/java/org/apache/flink/table/runtime/operators/join/temporal/TemporalRowTimeJoinOperator.java
18474
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.table.runtime.operators.join.temporal; import org.apache.flink.annotation.VisibleForTesting; import org.apache.flink.api.common.state.MapState; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.state.VoidNamespace; import org.apache.flink.runtime.state.VoidNamespaceSerializer; import org.apache.flink.streaming.api.operators.InternalTimer; import org.apache.flink.streaming.api.operators.InternalTimerService; import org.apache.flink.streaming.api.operators.TimestampedCollector; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.table.data.GenericRowData; import org.apache.flink.table.data.RowData; import org.apache.flink.table.data.util.RowDataUtil; import org.apache.flink.table.data.utils.JoinedRowData; import org.apache.flink.table.runtime.generated.GeneratedJoinCondition; import org.apache.flink.table.runtime.generated.JoinCondition; import org.apache.flink.table.runtime.typeutils.InternalTypeInfo; import java.io.IOException; import java.io.Serializable; import java.util.ArrayList; import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.ListIterator; import java.util.Map; import java.util.Optional; import java.util.TreeMap; /** * The operator for temporal join (FOR SYSTEM_TIME AS OF o.rowtime) on row time, it has no * limitation about message types of the left input and right input, this means the operator deals * changelog well. * * <p>For Event-time temporal join, its probe side is a regular table, its build side is a versioned * table, the version of versioned table can extract from the build side state. This operator works * by keeping on the state collection of probe and build records to process on next watermark. The * idea is that between watermarks we are collecting those elements and once we are sure that there * will be no updates we emit the correct result and clean up the expired data in state. * * <p>Cleaning up the state drops all of the "old" values from the probe side, where "old" is * defined as older then the current watermark. Build side is also cleaned up in the similar * fashion, however we always keep at least one record - the latest one - even if it's past the last * watermark. * * <p>One more trick is how the emitting results and cleaning up is triggered. It is achieved by * registering timers for the keys. We could register a timer for every probe and build side * element's event time (when watermark exceeds this timer, that's when we are emitting and/or * cleaning up the state). However this would cause huge number of registered timers. For example * with following evenTimes of probe records accumulated: {1, 2, 5, 8, 9}, if we had received * Watermark(10), it would trigger 5 separate timers for the same key. To avoid that we always keep * only one single registered timer for any given key, registered for the minimal value. Upon * triggering it, we process all records with event times older then or equal to currentWatermark. */ public class TemporalRowTimeJoinOperator extends BaseTwoInputStreamOperatorWithStateRetention { private static final long serialVersionUID = 6642514795175288193L; private static final String NEXT_LEFT_INDEX_STATE_NAME = "next-index"; private static final String LEFT_STATE_NAME = "left"; private static final String RIGHT_STATE_NAME = "right"; private static final String REGISTERED_TIMER_STATE_NAME = "timer"; private static final String TIMERS_STATE_NAME = "timers"; private final boolean isLeftOuterJoin; private final InternalTypeInfo<RowData> leftType; private final InternalTypeInfo<RowData> rightType; private final GeneratedJoinCondition generatedJoinCondition; private final int leftTimeAttribute; private final int rightTimeAttribute; private final RowtimeComparator rightRowtimeComparator; /** Incremental index generator for {@link #leftState}'s keys. */ private transient ValueState<Long> nextLeftIndex; /** * Mapping from artificial row index (generated by `nextLeftIndex`) into the left side `Row`. We * can not use List to accumulate Rows, because we need efficient deletes of the oldest rows. * * <p>TODO: this could be OrderedMultiMap[Jlong, Row] indexed by row's timestamp, to avoid full * map traversals (if we have lots of rows on the state that exceed `currentWatermark`). */ private transient MapState<Long, RowData> leftState; /** * Mapping from timestamp to right side `Row`. * * <p>TODO: having `rightState` as an OrderedMapState would allow us to avoid sorting cost once * per watermark */ private transient MapState<Long, RowData> rightState; // Long for correct handling of default null private transient ValueState<Long> registeredTimer; private transient TimestampedCollector<RowData> collector; private transient InternalTimerService<VoidNamespace> timerService; private transient JoinCondition joinCondition; private transient JoinedRowData outRow; private transient GenericRowData rightNullRow; public TemporalRowTimeJoinOperator( InternalTypeInfo<RowData> leftType, InternalTypeInfo<RowData> rightType, GeneratedJoinCondition generatedJoinCondition, int leftTimeAttribute, int rightTimeAttribute, long minRetentionTime, long maxRetentionTime, boolean isLeftOuterJoin) { super(minRetentionTime, maxRetentionTime); this.leftType = leftType; this.rightType = rightType; this.generatedJoinCondition = generatedJoinCondition; this.leftTimeAttribute = leftTimeAttribute; this.rightTimeAttribute = rightTimeAttribute; this.rightRowtimeComparator = new RowtimeComparator(rightTimeAttribute); this.isLeftOuterJoin = isLeftOuterJoin; } @Override public void open() throws Exception { super.open(); joinCondition = generatedJoinCondition.newInstance(getRuntimeContext().getUserCodeClassLoader()); joinCondition.setRuntimeContext(getRuntimeContext()); joinCondition.open(new Configuration()); nextLeftIndex = getRuntimeContext() .getState( new ValueStateDescriptor<>(NEXT_LEFT_INDEX_STATE_NAME, Types.LONG)); leftState = getRuntimeContext() .getMapState( new MapStateDescriptor<>(LEFT_STATE_NAME, Types.LONG, leftType)); rightState = getRuntimeContext() .getMapState( new MapStateDescriptor<>(RIGHT_STATE_NAME, Types.LONG, rightType)); registeredTimer = getRuntimeContext() .getState( new ValueStateDescriptor<>( REGISTERED_TIMER_STATE_NAME, Types.LONG)); timerService = getInternalTimerService(TIMERS_STATE_NAME, VoidNamespaceSerializer.INSTANCE, this); outRow = new JoinedRowData(); rightNullRow = new GenericRowData(rightType.toRowType().getFieldCount()); collector = new TimestampedCollector<>(output); } @Override public void processElement1(StreamRecord<RowData> element) throws Exception { RowData row = element.getValue(); leftState.put(getNextLeftIndex(), row); registerSmallestTimer(getLeftTime(row)); // Timer to emit and clean up the state registerProcessingCleanupTimer(); } @Override public void processElement2(StreamRecord<RowData> element) throws Exception { RowData row = element.getValue(); long rowTime = getRightTime(row); rightState.put(rowTime, row); registerSmallestTimer(rowTime); // Timer to clean up the state registerProcessingCleanupTimer(); } @Override public void onEventTime(InternalTimer<Object, VoidNamespace> timer) throws Exception { registeredTimer.clear(); long lastUnprocessedTime = emitResultAndCleanUpState(timerService.currentWatermark()); if (lastUnprocessedTime < Long.MAX_VALUE) { registerTimer(lastUnprocessedTime); } // if we have more state at any side, then update the timer, else clean it up. if (stateCleaningEnabled) { if (lastUnprocessedTime < Long.MAX_VALUE || !rightState.isEmpty()) { registerProcessingCleanupTimer(); } else { cleanupLastTimer(); nextLeftIndex.clear(); } } } @Override public void close() throws Exception { if (joinCondition != null) { joinCondition.close(); } super.close(); } /** * @return a row time of the oldest unprocessed probe record or Long.MaxValue, if all records * have been processed. */ private long emitResultAndCleanUpState(long currentWatermark) throws Exception { List<RowData> rightRowsSorted = getRightRowSorted(rightRowtimeComparator); long lastUnprocessedTime = Long.MAX_VALUE; Iterator<Map.Entry<Long, RowData>> leftIterator = leftState.entries().iterator(); // the output records' order should keep same with left input records arrival order final Map<Long, RowData> orderedLeftRecords = new TreeMap<>(); while (leftIterator.hasNext()) { Map.Entry<Long, RowData> entry = leftIterator.next(); Long leftSeq = entry.getKey(); RowData leftRow = entry.getValue(); long leftTime = getLeftTime(leftRow); if (leftTime <= currentWatermark) { orderedLeftRecords.put(leftSeq, leftRow); leftIterator.remove(); } else { lastUnprocessedTime = Math.min(lastUnprocessedTime, leftTime); } } // iterate the triggered left records in the ascending order of the sequence key, i.e. the // arrival order. orderedLeftRecords.forEach( (leftSeq, leftRow) -> { long leftTime = getLeftTime(leftRow); Optional<RowData> rightRow = latestRightRowToJoin(rightRowsSorted, leftTime); if (rightRow.isPresent() && RowDataUtil.isAccumulateMsg(rightRow.get())) { if (joinCondition.apply(leftRow, rightRow.get())) { collectJoinedRow(leftRow, rightRow.get()); } else { if (isLeftOuterJoin) { collectJoinedRow(leftRow, rightNullRow); } } } else { if (isLeftOuterJoin) { collectJoinedRow(leftRow, rightNullRow); } } }); orderedLeftRecords.clear(); cleanupExpiredVersionInState(currentWatermark, rightRowsSorted); return lastUnprocessedTime; } private void collectJoinedRow(RowData leftSideRow, RowData rightRow) { outRow.setRowKind(leftSideRow.getRowKind()); outRow.replace(leftSideRow, rightRow); collector.collect(outRow); } /** * Removes all expired version in the versioned table's state according to current watermark. */ private void cleanupExpiredVersionInState(long currentWatermark, List<RowData> rightRowsSorted) throws Exception { int i = 0; int indexToKeep = firstIndexToKeep(currentWatermark, rightRowsSorted); // clean old version data that behind current watermark while (i < indexToKeep) { long rightTime = getRightTime(rightRowsSorted.get(i)); rightState.remove(rightTime); i += 1; } } /** * The method to be called when a cleanup timer fires. * * @param time The timestamp of the fired timer. */ @Override public void cleanupState(long time) { leftState.clear(); rightState.clear(); nextLeftIndex.clear(); registeredTimer.clear(); } private int firstIndexToKeep(long timerTimestamp, List<RowData> rightRowsSorted) { int firstIndexNewerThenTimer = indexOfFirstElementNewerThanTimer(timerTimestamp, rightRowsSorted); if (firstIndexNewerThenTimer < 0) { return rightRowsSorted.size() - 1; } else { return firstIndexNewerThenTimer - 1; } } private int indexOfFirstElementNewerThanTimer(long timerTimestamp, List<RowData> list) { ListIterator<RowData> iter = list.listIterator(); while (iter.hasNext()) { if (getRightTime(iter.next()) > timerTimestamp) { return iter.previousIndex(); } } return -1; } /** * Binary search {@code rightRowsSorted} to find the latest right row to join with {@code * leftTime}. Latest means a right row with largest time that is still smaller or equal to * {@code leftTime}. For example with: rightState = [1(+I), 4(+U), 7(+U), 9(-D), 12(I)], * * <p>If left time is 6, the valid period should be [4, 7), data 4(+U) should be joined. * * <p>If left time is 10, the valid period should be [9, 12), but data 9(-D) is a DELETE message * which means the correspond version has no data in period [9, 12), data 9(-D) should not be * correlated. * * @return found element or {@code Optional.empty} If such row was not found (either {@code * rightRowsSorted} is empty or all {@code rightRowsSorted} are are newer). */ private Optional<RowData> latestRightRowToJoin(List<RowData> rightRowsSorted, long leftTime) { return latestRightRowToJoin(rightRowsSorted, 0, rightRowsSorted.size() - 1, leftTime); } private Optional<RowData> latestRightRowToJoin( List<RowData> rightRowsSorted, int low, int high, long leftTime) { if (low > high) { // exact value not found, we are returning largest from the values smaller then leftTime if (low - 1 < 0) { return Optional.empty(); } else { return Optional.of(rightRowsSorted.get(low - 1)); } } else { int mid = (low + high) >>> 1; RowData midRow = rightRowsSorted.get(mid); long midTime = getRightTime(midRow); int cmp = Long.compare(midTime, leftTime); if (cmp < 0) { return latestRightRowToJoin(rightRowsSorted, mid + 1, high, leftTime); } else if (cmp > 0) { return latestRightRowToJoin(rightRowsSorted, low, mid - 1, leftTime); } else { return Optional.of(midRow); } } } private void registerSmallestTimer(long timestamp) throws IOException { Long currentRegisteredTimer = registeredTimer.value(); if (currentRegisteredTimer == null) { registerTimer(timestamp); } else if (currentRegisteredTimer > timestamp) { timerService.deleteEventTimeTimer(VoidNamespace.INSTANCE, currentRegisteredTimer); registerTimer(timestamp); } } private void registerTimer(long timestamp) throws IOException { registeredTimer.update(timestamp); timerService.registerEventTimeTimer(VoidNamespace.INSTANCE, timestamp); } private List<RowData> getRightRowSorted(RowtimeComparator rowtimeComparator) throws Exception { List<RowData> rightRows = new ArrayList<>(); for (RowData row : rightState.values()) { rightRows.add(row); } rightRows.sort(rowtimeComparator); return rightRows; } private long getNextLeftIndex() throws IOException { Long index = nextLeftIndex.value(); if (index == null) { index = 0L; } nextLeftIndex.update(index + 1); return index; } private long getLeftTime(RowData leftRow) { return leftRow.getLong(leftTimeAttribute); } private long getRightTime(RowData rightRow) { return rightRow.getLong(rightTimeAttribute); } // ------------------------------------------------------------------------------------------ private static class RowtimeComparator implements Comparator<RowData>, Serializable { private static final long serialVersionUID = 8160134014590716914L; private final int timeAttribute; private RowtimeComparator(int timeAttribute) { this.timeAttribute = timeAttribute; } @Override public int compare(RowData o1, RowData o2) { long o1Time = o1.getLong(timeAttribute); long o2Time = o2.getLong(timeAttribute); return Long.compare(o1Time, o2Time); } } @VisibleForTesting static String getNextLeftIndexStateName() { return NEXT_LEFT_INDEX_STATE_NAME; } @VisibleForTesting static String getRegisteredTimerStateName() { return REGISTERED_TIMER_STATE_NAME; } }
apache-2.0
google/mr4c
java/src/java/com/google/mr4c/sources/DataFileSink.java
1237
/** * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.mr4c.sources; import java.io.File; import java.io.InputStream; import java.io.IOException; import java.io.OutputStream; public interface DataFileSink { OutputStream getFileOutputStream() throws IOException; void writeFile(byte[] bytes) throws IOException; void writeFile(InputStream input) throws IOException; /** * Returns the name of the file in a file system. If there is no actual file system, may return null */ String getFileName(); /** * Returns the file on local disk, or null if not on a local disk */ File getLocalFile(); String getDescription(); }
apache-2.0
bowenli86/flink
flink-runtime/src/main/java/org/apache/flink/runtime/checkpoint/StandaloneCheckpointIDCounter.java
1893
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.checkpoint; import org.apache.flink.api.common.JobStatus; import org.apache.flink.runtime.jobmanager.HighAvailabilityMode; import java.util.concurrent.atomic.AtomicLong; /** * {@link CheckpointIDCounter} instances for JobManagers running in {@link HighAvailabilityMode#NONE}. * * <p>Simple wrapper around an {@link AtomicLong}. */ public class StandaloneCheckpointIDCounter implements CheckpointIDCounter { private final AtomicLong checkpointIdCounter = new AtomicLong(1); @Override public void start() throws Exception {} @Override public void shutdown(JobStatus jobStatus) throws Exception {} @Override public long getAndIncrement() throws Exception { return checkpointIdCounter.getAndIncrement(); } @Override public long get() { return checkpointIdCounter.get(); } @Override public void setCount(long newCount) { checkpointIdCounter.set(newCount); } /** * Returns the last checkpoint ID (current - 1). * * @return Last checkpoint ID. */ public long getLast() { return checkpointIdCounter.get() - 1; } }
apache-2.0
ThiagoGarciaAlves/intellij-community
plugins/eclipse/jps-plugin/src/org/jetbrains/jps/eclipse/model/JpsIdeaSpecificSettings.java
6924
/* * Copyright 2000-2016 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jetbrains.jps.eclipse.model; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.SystemInfo; import com.intellij.openapi.util.io.FileUtil; import com.intellij.util.ArrayUtil; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.jetbrains.idea.eclipse.IdeaXml; import org.jetbrains.idea.eclipse.conversion.AbstractIdeaSpecificSettings; import org.jetbrains.jps.model.JpsElement; import org.jetbrains.jps.model.java.*; import org.jetbrains.jps.model.library.sdk.JpsSdkType; import org.jetbrains.jps.model.module.JpsDependenciesList; import org.jetbrains.jps.model.module.JpsModule; import org.jetbrains.jps.model.module.JpsModuleSourceRoot; import org.jetbrains.jps.model.serialization.JpsMacroExpander; import org.jetbrains.jps.model.serialization.java.JpsJavaModelSerializerExtension; import org.jetbrains.jps.model.serialization.library.JpsSdkTableSerializer; import java.io.File; import java.util.Map; class JpsIdeaSpecificSettings extends AbstractIdeaSpecificSettings<JpsModule, String, JpsSdkType<?>> { private JpsMacroExpander myExpander; JpsIdeaSpecificSettings(JpsMacroExpander expander) { myExpander = expander; } @Override protected void readLibraryLevels(Element root, @NotNull Map<String, String> levels) { final Element levelsElement = root.getChild("levels"); if (levelsElement != null) { for (Element element : levelsElement.getChildren("level")) { String libName = element.getAttributeValue("name"); String libLevel = element.getAttributeValue("value"); if (libName != null && libLevel != null) { levels.put(libName, libLevel); } } } } @Override protected String[] getEntries(JpsModule model) { return ArrayUtil.toStringArray(model.getContentRootsList().getUrls()); } @Override protected String createContentEntry(JpsModule model, String url) { model.getContentRootsList().addUrl(url); return url; } @Override protected void setupLibraryRoots(Element root, JpsModule model) {} @Override protected void setupJdk(Element root, JpsModule model, @Nullable JpsSdkType<?> projectSdkType) { final String inheritJdk = root.getAttributeValue("inheritJdk"); final JpsDependenciesList dependenciesList = model.getDependenciesList(); if (inheritJdk != null && Boolean.parseBoolean(inheritJdk)) { dependenciesList.addSdkDependency(projectSdkType != null ? projectSdkType : JpsJavaSdkType.INSTANCE); } else { final String jdkName = root.getAttributeValue("jdk"); if (jdkName != null) { String jdkType = root.getAttributeValue("jdk_type"); JpsSdkType<?> sdkType = null; if (jdkType != null) { sdkType = JpsSdkTableSerializer.getSdkType(jdkType); } if (sdkType == null) { sdkType = JpsJavaSdkType.INSTANCE; } dependenciesList.addSdkDependency(sdkType); JpsSdkTableSerializer.setSdkReference(model.getSdkReferencesTable(), jdkName, sdkType); if (sdkType instanceof JpsJavaSdkTypeWrapper) { dependenciesList.addSdkDependency(JpsJavaSdkType.INSTANCE); } } } } @Override protected void setupCompilerOutputs(Element root, JpsModule model) { final JpsJavaModuleExtension extension = getService().getOrCreateModuleExtension(model); final Element testOutputElement = root.getChild(IdeaXml.OUTPUT_TEST_TAG); if (testOutputElement != null) { extension.setTestOutputUrl(testOutputElement.getAttributeValue(IdeaXml.URL_ATTR)); } final String inheritedOutput = root.getAttributeValue(JpsJavaModelSerializerExtension.INHERIT_COMPILER_OUTPUT_ATTRIBUTE); if (inheritedOutput != null && Boolean.parseBoolean(inheritedOutput)) { extension.setInheritOutput(true); } extension.setExcludeOutput(root.getChild(IdeaXml.EXCLUDE_OUTPUT_TAG) != null); } @Override protected void readLanguageLevel(Element root, JpsModule model) { final String languageLevel = root.getAttributeValue("LANGUAGE_LEVEL"); final JpsJavaModuleExtension extension = getService().getOrCreateModuleExtension(model); if (languageLevel != null) { extension.setLanguageLevel(LanguageLevel.valueOf(languageLevel)); } } @Override protected void expandElement(Element root, JpsModule model) { myExpander.substitute(root, SystemInfo.isFileSystemCaseSensitive); } @Override protected void overrideModulesScopes(Element root, JpsModule model) {} @Override public void readContentEntry(Element root, String contentUrl, JpsModule model) { for (Object o : root.getChildren(IdeaXml.TEST_FOLDER_TAG)) { final String url = ((Element)o).getAttributeValue(IdeaXml.URL_ATTR); JpsModuleSourceRoot folderToBeTest = null; for (JpsModuleSourceRoot folder : model.getSourceRoots()) { if (Comparing.strEqual(folder.getUrl(), url)) { folderToBeTest = folder; break; } } if (folderToBeTest != null) { model.removeSourceRoot(folderToBeTest.getUrl(), JavaSourceRootType.SOURCE); } model.addSourceRoot(url, JavaSourceRootType.TEST_SOURCE); } for (Object o : root.getChildren(IdeaXml.EXCLUDE_FOLDER_TAG)) { final String excludeUrl = ((Element)o).getAttributeValue(IdeaXml.URL_ATTR); if (FileUtil.isAncestor(new File(contentUrl), new File(excludeUrl), false)) { model.getExcludeRootsList().addUrl(excludeUrl); } } for (Object o : root.getChildren(IdeaXml.PACKAGE_PREFIX_TAG)) { Element ppElement = (Element)o; final String prefix = ppElement.getAttributeValue(IdeaXml.PACKAGE_PREFIX_VALUE_ATTR); final String url = ppElement.getAttributeValue(IdeaXml.URL_ATTR); for (JpsModuleSourceRoot sourceRoot : model.getSourceRoots()) { if (Comparing.strEqual(sourceRoot.getUrl(), url)) { JpsElement properties = sourceRoot.getProperties(); if (properties instanceof JavaSourceRootProperties) { ((JavaSourceRootProperties)properties).setPackagePrefix(prefix); } break; } } } } private static JpsJavaExtensionService getService() { return JpsJavaExtensionService.getInstance(); } }
apache-2.0
zqq90/webit-editor
src/main/java/jsyntaxpane/actions/PythonIndentAction.java
2122
/* * Copyright 2009 Stefan Bechtold (stefan.bechtold@googlemail.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License * at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package jsyntaxpane.actions; import java.awt.event.ActionEvent; import javax.swing.text.JTextComponent; import javax.swing.text.TextAction; import jsyntaxpane.SyntaxDocument; import jsyntaxpane.util.Configuration; public class PythonIndentAction extends DefaultSyntaxAction { /** * creates new PythonIndentAction. */ public PythonIndentAction() { super("PYTHONINDENT"); } /** * {@inheritDoc} * @param e */ @Override public void actionPerformed(ActionEvent e) { JTextComponent target = getTextComponent(e); if (target != null) { SyntaxDocument sDoc = ActionUtils.getSyntaxDocument(target); int pos = target.getCaretPosition(); int start = sDoc.getParagraphElement(pos).getStartOffset(); String line = ActionUtils.getLine(target); String lineToPos = line.substring(0, pos - start); String prefix = ActionUtils.getIndent(line); int tabSize = ActionUtils.getTabSize(target); if (lineToPos.trim().endsWith(":")) { prefix += ActionUtils.SPACES.substring(0, tabSize); } else { String noComment = sDoc.getUncommentedText(start, pos); // skip EOL comments if (noComment.trim().endsWith(":")) { prefix += ActionUtils.SPACES.substring(0, tabSize); } } target.replaceSelection("\n" + prefix); } } }
bsd-3-clause
krady/rain-workload-toolkit
src/radlab/rain/communication/RainPipe.java
4003
package radlab.rain.communication; import java.io.IOException; import java.net.ServerSocket; import java.util.LinkedList; /* * Singleton conduit into Rain driver from the outside world. * Listens on a socket and waits for commands from a controller * */ public class RainPipe { public static int DEFAULT_PORT = 7851; public static int DEFAULT_NUM_THREADS = 3; private static Object _instLock = new Object(); private static RainPipe _instance = null; private int _port = RainPipe.DEFAULT_PORT; // Default port private ServerSocket _sck = null; private LinkedList<LFThread> _workers = new LinkedList<LFThread>(); private long _numThreads = RainPipe.DEFAULT_NUM_THREADS; private boolean _threadpoolActive = false; /*private Benchmark _benchmark = null; public Benchmark getBenchmark() { return this._benchmark; } public void setBenchmark( Benchmark benchmark ) { this._benchmark = benchmark; } */ public static RainPipe getInstance() { // Double-checked locking (avoids unnecessary locking after first initialization // and mitigates against multiple parallel initializations) if( _instance == null ) { synchronized( _instLock ) { if( _instance == null ) _instance = new RainPipe(); } } return _instance; } private RainPipe() {} public int getPort() { return this._port; } public void setPort( int val ) { this._port = val; } public long getNumThreads() { return this._numThreads; } public void setNumThreads( long val ) { this._numThreads = val; } public void printThreadStats() { int leaders = 0; int followers = 0; int busy = 0; for( int i = 0; i < this._numThreads; i++ ) { LFThread p = this._workers.get(i); if( p.getLFThreadState() == LFThread.ThreadState.Leading ) System.out.println( "[Comm Threadpool stats] " + p.getName() + " " + p.getMessagesProcessed() + " (leader)" ); else System.out.println( "[Comm Threadpool stats] " + p.getName() + " " + p.getMessagesProcessed() ); switch( p.getLFThreadState() ) { case Busy: busy++; break; case Leading: leaders++; break; case Following: followers++; break; } } System.out.println( "[Comm Threadpool stats] Leaders: " + leaders + " Busy: " + busy + " Followers: " + followers ); System.out.println( "[Comm Threadpool stats] Total Messages received : " + LFThread.messagesReceived ); System.out.println( "[Comm Threadpool stats] Total Messages processed: " + LFThread.messagesProccessed ); System.out.println( "[Comm Threadpool stats] Total Messages ignored : " + (LFThread.messagesReceived - LFThread.messagesProccessed) ); System.out.println( " " ); } public void start() throws IOException { // Create a new server socket for the pipe this._sck = new ServerSocket( this._port ); // Now that the socket is connected, let the threads take waiting on client // connections and reading messages this.initializeThreadPool( this._sck ); } private void initializeThreadPool( ServerSocket sck ) { if( this._threadpoolActive ) return; for( int i = 0; i < this._numThreads; i++ ) { LFThread p = new LFThread( sck ); p.setName( "Worker-" + Integer.toString(i) ); p.start(); this._workers.add( p ); } this._threadpoolActive = true; } private void shutdownThreadPool() { if( !this._threadpoolActive ) return; for( int i = 0; i < this._numThreads; i++ ) { LFThread p = this._workers.get(i); p.setDone( true ); try { p.interrupt(); p.join( 5000 ); } catch( Exception e ) {} } this._threadpoolActive = false; } public boolean stop() { return this.disconnect(); } public boolean disconnect() { // Close the socket if( this._sck != null ) { try { if( !this._sck.isClosed() ) { this._sck.close(); } } catch( Exception e ) {} } // Drain/shutdown threadpool this.shutdownThreadPool(); return true; } }
bsd-3-clause
TSavo/XChange
xchange-livecoin/src/main/java/org/knowm/xchange/livecoin/dto/marketdata/LivecoinTrade.java
1212
package org.knowm.xchange.livecoin.dto.marketdata; import com.fasterxml.jackson.annotation.JsonProperty; import java.math.BigDecimal; public class LivecoinTrade { private final Long time; private final Long id; private final BigDecimal price; private final BigDecimal quantity; private final String type; public LivecoinTrade( @JsonProperty("time") Long time, @JsonProperty("id") Long id, @JsonProperty("price") BigDecimal price, @JsonProperty("quantity") BigDecimal quantity, @JsonProperty("type") String type) { super(); this.time = time; this.id = id; this.price = price; this.quantity = quantity; this.type = type; } public Long getTime() { return time; } public Long getId() { return id; } public BigDecimal getPrice() { return price; } public BigDecimal getQuantity() { return quantity; } public String getType() { return type; } @Override public String toString() { return "LivecoinTrade [time=" + time + ", id=" + id + ", price=" + price + ", quantity=" + quantity + ", type=" + type + "]"; } }
mit
vkolotov/smarthome
extensions/binding/org.eclipse.smarthome.binding.lifx/src/main/java/org/eclipse/smarthome/binding/lifx/internal/protocol/GetVersionRequest.java
1240
/** * Copyright (c) 2014-2017 by the respective copyright holders. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html */ package org.eclipse.smarthome.binding.lifx.internal.protocol; import java.nio.ByteBuffer; /** * @author Tim Buckley - Initial Contribution * @author Karel Goderis - Enhancement for the V2 LIFX Firmware and LAN Protocol Specification */ public class GetVersionRequest extends Packet { public static final int TYPE = 0x20; public GetVersionRequest() { setTagged(false); setAddressable(true); setResponseRequired(true); } @Override public int packetType() { return TYPE; } @Override protected int packetLength() { return 0; } @Override protected void parsePacket(ByteBuffer bytes) { // do nothing } @Override protected ByteBuffer packetBytes() { return ByteBuffer.allocate(0); } @Override public int[] expectedResponses() { return new int[] { StateVersionResponse.TYPE }; } }
epl-1.0
md-5/jdk10
test/hotspot/jtreg/vmTestbase/vm/share/CommentedFileReader.java
3808
/* * Copyright (c) 2014, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package vm.share; import java.io.*; import java.util.LinkedList; /** * Utility class intended to read file line by line and skip comments. */ public class CommentedFileReader { /** * Type of comments that should be removed from file. */ public static enum CommentStyle { /** * Comments started with <i>#</i>. */ BASH, /** * Comments started with <i>//</i>. */ JAVA } /** * Get lines from specified file and filter out comments. * Only comments in BASH style will be filtered out. * * @param path to file that should be readed * @return filtered lines from file */ public static String[] readFile(String path) throws IOException { return readFile(new File(path), CommentStyle.BASH); } /** * Get lines from specified file and filter out comments. * Only comments in BASH style will be filtered out. * * @param file that should be readed * @return filtered lines from file */ public static String[] readFile(File file) throws IOException { return readFile(file, CommentStyle.BASH); } /** * Get lines from specified file without comments. * * @param path to file that should be readed * @param commentStyle describes what strings will be treated as comments * @return filtered lines from file */ public static String[] readFile(String path, CommentStyle commentStyle) throws IOException { return readFile(new File(path), commentStyle); } /** * Get lines from specified file without comments. * * @param file that should be readed * @param commentStyle describes what strings will be treated as comments * @return filtered lines from file */ public static String[] readFile(File file, CommentStyle commentStyle) throws IOException { LinkedList<String> entries = new LinkedList<String>(); BufferedReader reader = new BufferedReader(new FileReader(file)); String commentBeginning; switch (commentStyle) { case BASH: commentBeginning = "#"; break; case JAVA: commentBeginning = "//"; break; default: throw new IllegalArgumentException("Unknown comment style"); } while (true) { String entry = reader.readLine(); if (entry == null) { break; } entry = entry.replaceAll(commentBeginning + ".*", "").trim(); if (entry.length() > 0) { entries.add(entry); } } return entries.toArray(new String[entries.size()]); } }
gpl-2.0
md-5/jdk10
test/hotspot/jtreg/vmTestbase/nsk/jdi/TypeComponent/isProtected/isprotected001a.java
4383
/* * Copyright (c) 2002, 2018, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package nsk.jdi.TypeComponent.isProtected; import nsk.share.*; import nsk.share.jpda.*; import nsk.share.jdi.*; public class isprotected001a { boolean z0, z1[] = {z0}, z2[][] = {z1}; byte b0, b1[] = {b0}, b2[][] = {b1}; char c0, c1[] = {c0}, c2[][] = {c1}; double d0, d1[] = {d0}, d2[][] = {d1}; float f0, f1[] = {f0}, f2[][] = {f1}; int i0, i1[] = {i0}, i2[][] = {i1}; long l0, l1[] = {l0}, l2[][] = {l1}; short r0, r1[] = {r0}, r2[][] = {r1}; protected boolean zP0, zP1[] = {zP0}, zP2[][] = {zP1}; protected byte bP0, bP1[] = {bP0}, bP2[][] = {bP1}; protected char cP0, cP1[] = {cP0}, cP2[][] = {cP1}; protected double dP0, dP1[] = {dP0}, dP2[][] = {dP1}; protected float fP0, fP1[] = {fP0}, fP2[][] = {fP1}; protected int iP0, iP1[] = {iP0}, iP2[][] = {iP1}; protected long lP0, lP1[] = {lP0}, lP2[][] = {lP1}; protected short rP0, rP1[] = {rP0}, rP2[][] = {rP1}; Boolean Z0, Z1[] = {Z0}, Z2[][] = {Z1}; Byte B0, B1[] = {B0}, B2[][] = {B1}; Character C0, C1[] = {C0}, C2[][] = {C1}; Double D0, D1[] = {D0}, D2[][] = {D1}; Float F0, F1[] = {F0}, F2[][] = {F1}; Integer I0, I1[] = {I0}, I2[][] = {I1}; Long L0, L1[] = {L0}, L2[][] = {L1}; Short R0, R1[] = {R0}, R2[][] = {R1}; protected Boolean ZP0, ZP1[] = {ZP0}, ZP2[][] = {ZP1}; protected Byte BP0, BP1[] = {BP0}, BP2[][] = {BP1}; protected Character CP0, CP1[] = {CP0}, CP2[][] = {CP1}; protected Double DP0, DP1[] = {DP0}, DP2[][] = {DP1}; protected Float FP0, FP1[] = {FP0}, FP2[][] = {FP1}; protected Integer IP0, IP1[] = {IP0}, IP2[][] = {IP1}; protected Long LP0, LP1[] = {LP0}, LP2[][] = {LP1}; protected Short RP0, RP1[] = {RP0}, RP2[][] = {RP1}; String s0, s1[] = {s0}, s2[][] = {s1}; Object o0, o1[] = {o0}, o2[][] = {o1}; protected String S0, S1[] = {S0}, S2[][] = {S1}; protected Object O0, O1[] = {O0}, O2[][] = {O1}; private P u0, u1[] = {u0}, u2[][] = {u1}; protected P v0, v1[] = {v0}, v2[][] = {v1}; public P w0, w1[] = {w0}, w2[][] = {w1}; P p0, p1[] = {p0}, p2[][] = {p1}; // package private private M h0, h1[] = {h0}, h2[][] = {h1}; protected M j0, j1[] = {j0}, j2[][] = {j1}; public M k0, k1[] = {k0}, k2[][] = {k1}; M m0, m1[] = {m0}, m2[][] = {m1}; // package private public static void main (String argv[]) { ArgumentHandler argHandler = new ArgumentHandler(argv); Log log = argHandler.createDebugeeLog(); IOPipe pipe = argHandler.createDebugeeIOPipe(log); isprotected001a isprotected001a_ = new isprotected001a(); log.display(" debuggee started."); pipe.println("ready"); String instruction = pipe.readln(); if (instruction.equals("quit")) { log.display("debuggee > \"quit\" signal recieved."); log.display("debuggee > completed succesfully."); System.exit(Consts.TEST_PASSED + Consts.JCK_STATUS_BASE); } log.complain("debuggee > unexpected signal (not \"quit\") - " + instruction); log.complain("debuggee > TEST FAILED"); System.exit(Consts.TEST_FAILED + Consts.JCK_STATUS_BASE); } } class P {} interface M {}
gpl-2.0
greenlion/mysql-server
storage/ndb/clusterj/clusterj-core/src/main/java/com/mysql/clusterj/core/query/LikePredicateImpl.java
2222
/* Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License, version 2.0, as published by the Free Software Foundation. This program is also distributed with certain software (including but not limited to OpenSSL) that is licensed under separate terms, as designated in a particular file or component or in included license documentation. The authors of MySQL hereby grant you an additional permission to link the program and your derivative works with the separately licensed software that they have included with MySQL. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License, version 2.0, for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ package com.mysql.clusterj.core.query; import com.mysql.clusterj.core.spi.QueryExecutionContext; import com.mysql.clusterj.core.store.ScanFilter; import com.mysql.clusterj.core.store.ScanOperation; public class LikePredicateImpl extends ComparativePredicateImpl { public LikePredicateImpl(QueryDomainTypeImpl<?> dobj, PropertyImpl property, ParameterImpl param) { super(dobj, property, param); } @Override public void markBoundsForCandidateIndices(QueryExecutionContext context, CandidateIndexImpl[] candidateIndices) { // like does not support index bounds } /** Set the condition into the filter. * @param context the query execution context with the parameter values * @param op the operation * @param filter the filter */ @Override public void filterCmpValue(QueryExecutionContext context, ScanOperation op, ScanFilter filter) { property.filterCmpValue(param.getParameterValue(context), ScanFilter.BinaryCondition.COND_LIKE, filter); } }
gpl-2.0
AdalbertoJoseToledoEscalona/catalogo_productos_old_version
pantallaMT v0.95/MT4j/examples/advanced/mtShell/StartMTShell.java
351
package advanced.mtShell; import org.mt4j.MTApplication; public class StartMTShell extends MTApplication { private static final long serialVersionUID = 1L; public static void main(String args[]){ initialize(); } @Override public void startUp(){ this.addScene(new MTShellScene(this, "Multi-Touch Shell Scene")); } }
gpl-2.0
psoreide/bnd
biz.aQute.bndlib.tests/compilerversions/src/jdk_9_0/ClassRef.java
189
package jdk_9_0; public class ClassRef { class Inner { }; static { System.out.println(Inner.class); } public static void main() { System.out.println(javax.swing.Box.class); } }
apache-2.0
maio/closure-compiler
test/com/google/javascript/jscomp/InlineFunctionsTest.java
75954
/* * Copyright 2008 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; /** * Inline function tests. * @author johnlenz@google.com (john lenz) */ public final class InlineFunctionsTest extends CompilerTestCase { boolean allowGlobalFunctionInlining = true; boolean allowBlockInlining = true; final boolean allowExpressionDecomposition = true; final boolean allowFunctionExpressionInlining = true; final boolean allowLocalFunctionInlining = true; boolean assumeStrictThis = false; boolean assumeMinimumCapture = false; int maxSizeAfterInlining = CompilerOptions.UNLIMITED_FUN_SIZE_AFTER_INLINING; final static String EXTERNS = "/** @nosideeffects */ function nochg(){}\n" + "function chg(){}\n"; public InlineFunctionsTest() { super(EXTERNS); this.enableNormalize(); this.enableComputeSideEffects(); } @Override protected void setUp() throws Exception { super.setUp(); super.enableLineNumberCheck(true); enableInferConsts(true); allowGlobalFunctionInlining = true; allowBlockInlining = true; assumeStrictThis = false; assumeMinimumCapture = false; } @Override protected CompilerPass getProcessor(Compiler compiler) { compiler.resetUniqueNameId(); return new InlineFunctions( compiler, compiler.getUniqueNameIdSupplier(), allowGlobalFunctionInlining, allowLocalFunctionInlining, allowBlockInlining, assumeStrictThis, assumeMinimumCapture, maxSizeAfterInlining); } /** * Returns the number of times the pass should be run before results are * verified. */ @Override protected int getNumRepetitions() { // Some inlining can only be done in multiple passes. return 3; } public void testInlineEmptyFunction1() { // Empty function, no params. test("function foo(){}" + "foo();", "void 0;"); } public void testInlineEmptyFunction2() { // Empty function, params with no side-effects. test("function foo(){}" + "foo(1, new Date, function(){});", "void 0;"); } public void testInlineEmptyFunction3() { // Empty function, multiple references. test("function foo(){}" + "foo();foo();foo();", "void 0;void 0;void 0"); } public void testInlineEmptyFunction4() { // Empty function, params with side-effects forces block inlining. test("function foo(){}" + "foo(x());", "{var JSCompiler_inline_anon_param_0=x();}"); } public void testInlineEmptyFunction5() { // Empty function, call params with side-effects in expression can not // be inlined. allowBlockInlining = false; testSame("function foo(){}" + "foo(x());"); } public void testInlineFunctions1() { // As simple a test as we can get. test("function foo(){ return 4 }" + "foo();", "4"); } public void testInlineFunctions2() { // inline simple constants // NOTE: CD is not inlined. test("var t;var AB=function(){return 4};" + "function BC(){return 6;}" + "CD=function(x){return x + 5};x=CD(3);y=AB();z=BC();", "var t;CD=function(x){return x+5};x=CD(3);y=4;z=6" ); } public void testInlineFunctions3() { // inline simple constants test("var t;var AB=function(){return 4};" + "function BC(){return 6;}" + "var CD=function(x){return x + 5};x=CD(3);y=AB();z=BC();", "var t;x=3+5;y=4;z=6"); } public void testInlineFunctions4() { // don't inline if there are multiple definitions (need DFA for that). test("var t; var AB = function() { return 4 }; " + "function BC() { return 6; }" + "CD = 0;" + "CD = function(x) { return x + 5 }; x = CD(3); y = AB(); z = BC();", "var t;CD=0;CD=function(x){return x+5};x=CD(3);y=4;z=6"); } public void testInlineFunctions5() { // inline additions test("var FOO_FN=function(x,y) { return \"de\" + x + \"nu\" + y };" + "var a = FOO_FN(\"ez\", \"ts\")", "var a=\"de\"+\"ez\"+\"nu\"+\"ts\""); } public void testInlineFunctions6() { // more complex inlines test("function BAR_FN(x, y, z) { return z(nochg(x + y)) }" + "alert(BAR_FN(1, 2, baz))", "alert(baz(nochg(1+2)))"); } public void testInlineFunctions7() { // inlines appearing multiple times test("function FN(x,y,z){return x+x+y}" + "var b=FN(1,2,3)", "var b=1+1+2"); } public void testInlineFunctions8() { // check correct parenthesization test("function MUL(x,y){return x*y}function ADD(x,y){return x+y}" + "var a=1+MUL(2,3);var b=2*ADD(3,4)", "var a=1+2*3;var b=2*(3+4)"); } public void testInlineFunctions9() { // don't inline if the input parameter is modified. test("function INC(x){return x++}" + "var y=INC(i)", "var y;{var x$$inline_0=i;" + "y=x$$inline_0++}"); } public void testInlineFunctions10() { test("function INC(x){return x++}" + "var y=INC(i);y=INC(i)", "var y;" + "{var x$$inline_0=i;" + "y=x$$inline_0++}" + "{var x$$inline_2=i;" + "y=x$$inline_2++}"); } public void testInlineFunctions11() { test("function f(x){return x}" + "var y=f(i)", "var y=i"); } public void testInlineFunctions12() { // don't inline if the input parameter has side-effects. allowBlockInlining = false; test("function f(x){return x}" + "var y=f(i)", "var y=i"); testSame("function f(x){return x}" + "var y=f(i++)"); } public void testInlineFunctions13() { // inline as block if the input parameter has side-effects. test("function f(x){return x}" + "var y=f(i++)", "var y;{var x$$inline_0=i++;y=x$$inline_0}"); } public void testInlineFunctions14() { // don't remove functions that are referenced on other ways test("function FOO(x){return x}var BAR=function(y){return y}" + ";b=FOO;a(BAR);x=FOO(1);y=BAR(2)", "function FOO(x){return x}var BAR=function(y){return y}" + ";b=FOO;a(BAR);x=1;y=2"); } public void testInlineFunctions15a() { // closure factories: do inline into global scope. test("function foo(){return function(a){return a+1}}" + "var b=function(){return c};" + "var d=b()+foo()", "var d=c+function(a){return a+1}"); } public void testInlineFunctions15b() { assumeMinimumCapture = false; // closure factories: don't inline closure with locals into global scope. test("function foo(){var x;return function(a){return a+1}}" + "var b=function(){return c};" + "var d=b()+foo()", "function foo(){var x;return function(a){return a+1}}" + "var d=c+foo()"); assumeMinimumCapture = true; test("function foo(){var x;return function(a){return a+1}}" + "var b=function(){return c};" + "var d=b()+foo()", "var JSCompiler_inline_result$$0;" + "{var x$$inline_1;" + "JSCompiler_inline_result$$0=function(a$$inline_2){return a$$inline_2+1};}" + "var d=c+JSCompiler_inline_result$$0"); } public void testInlineFunctions15c() { assumeMinimumCapture = false; // closure factories: don't inline into non-global scope. test("function foo(){return function(a){return a+1}}" + "var b=function(){return c};" + "function _x(){ var d=b()+foo() }", "function foo(){return function(a){return a+1}}" + "function _x(){ var d=c+foo() }"); assumeMinimumCapture = true; // closure factories: don't inline into non-global scope. test("function foo(){return function(a){return a+1}}" + "var b=function(){return c};" + "function _x(){ var d=b()+foo() }", "function _x(){var d=c+function(a){return a+1}}"); } public void testInlineFunctions15d() { assumeMinimumCapture = false; // closure factories: don't inline functions with vars. test("function foo(){var x; return function(a){return a+1}}" + "var b=function(){return c};" + "function _x(){ var d=b()+foo() }", "function foo(){var x; return function(a){return a+1}}" + "function _x(){ var d=c+foo() }"); assumeMinimumCapture = true; // closure factories: inline functions with vars. test("function foo(){var x; return function(a){return a+1}}" + "var b=function(){return c};" + "function _x(){ var d=b()+foo() }", "function _x() {" + " var JSCompiler_inline_result$$0;" + " {" + " var x$$inline_1;" + " JSCompiler_inline_result$$0=function(a$$inline_2) {" + " return a$$inline_2+1};" + " }" + " var d=c+JSCompiler_inline_result$$0" + "}"); } public void testInlineFunctions16a() { assumeMinimumCapture = false; testSame("function foo(b){return window.bar(function(){c(b)})}" + "var d=foo(e)"); assumeMinimumCapture = true; test( "function foo(b){return window.bar(function(){c(b)})}" + "var d=foo(e)", "var d;{var b$$inline_0=e;" + "d=window.bar(function(){c(b$$inline_0)})}"); } public void testInlineFunctions16b() { test("function foo(){return window.bar(function(){c()})}" + "var d=foo(e)", "var d=window.bar(function(){c()})"); } public void testInlineFunctions17() { // don't inline recursive functions testSame("function foo(x){return x*x+foo(3)}var bar=foo(4)"); } public void testInlineFunctions18() { // TRICKY ... test nested inlines allowBlockInlining = false; test("function foo(a, b){return a+b}" + "function bar(d){return c}" + "var d=foo(bar(1),e)", "var d=c+e"); } public void testInlineFunctions19() { // TRICKY ... test nested inlines // with block inlining possible test("function foo(a, b){return a+b}" + "function bar(d){return c}" + "var d=foo(bar(1),e)", "var d=c+e;"); } public void testInlineFunctions20() { // Make sure both orderings work allowBlockInlining = false; test("function foo(a, b){return a+b}" + "function bar(d){return c}" + "var d=bar(foo(1,e));", "var d=c"); } public void testInlineFunctions21() { // with block inlining possible test("function foo(a, b){return a+b}" + "function bar(d){return c}" + "var d=bar(foo(1,e))", "var d=c"); } public void testInlineFunctions22() { // Another tricky case ... test nested compiler inlines test("function plex(a){if(a) return 0;else return 1;}" + "function foo(a, b){return bar(a+b)}" + "function bar(d){return plex(d)}" + "var d=foo(1,2)", "var d;{JSCompiler_inline_label_plex_1:{" + "if(1+2){" + "d=0;break JSCompiler_inline_label_plex_1}" + "else{" + "d=1;break JSCompiler_inline_label_plex_1}d=void 0}}"); } public void testInlineFunctions23() { // Test both orderings again test("function complex(a){if(a) return 0;else return 1;}" + "function bar(d){return complex(d)}" + "function foo(a, b){return bar(a+b)}" + "var d=foo(1,2)", "var d;{JSCompiler_inline_label_complex_1:{" + "if(1+2){" + "d=0;break JSCompiler_inline_label_complex_1" + "}else{" + "d=1;break JSCompiler_inline_label_complex_1" + "}d=void 0}}"); } public void testInlineFunctions24() { // Don't inline functions with 'arguments' or 'this' testSame("function foo(x){return this}foo(1)"); } public void testInlineFunctions25() { testSame("function foo(){return arguments[0]}foo()"); } public void testInlineFunctions26() { // Don't inline external functions testSame("function _foo(x){return x}_foo(1)"); } public void testInlineFunctions27() { test("var window = {}; function foo(){window.bar++; return 3;}" + "var x = {y: 1, z: foo(2)};", "var window={};" + "var JSCompiler_inline_result$$0;" + "{" + " window.bar++;" + " JSCompiler_inline_result$$0 = 3;" + "}" + "var x = {y: 1, z: JSCompiler_inline_result$$0};"); } public void testInlineFunctions28() { test("var window = {}; function foo(){window.bar++; return 3;}" + "var x = {y: alert(), z: foo(2)};", "var window = {};" + "var JSCompiler_temp_const$$0 = alert();" + "var JSCompiler_inline_result$$1;" + "{" + " window.bar++;" + " JSCompiler_inline_result$$1 = 3;}" + "var x = {" + " y: JSCompiler_temp_const$$0," + " z: JSCompiler_inline_result$$1" + "};"); } public void testInlineFunctions29() { test("var window = {}; function foo(){window.bar++; return 3;}" + "var x = {a: alert(), b: alert2(), c: foo(2)};", "var window = {};" + "var JSCompiler_temp_const$$1 = alert();" + "var JSCompiler_temp_const$$0 = alert2();" + "var JSCompiler_inline_result$$2;" + "{" + " window.bar++;" + " JSCompiler_inline_result$$2 = 3;}" + "var x = {" + " a: JSCompiler_temp_const$$1," + " b: JSCompiler_temp_const$$0," + " c: JSCompiler_inline_result$$2" + "};"); } public void testInlineFunctions30() { // As simple a test as we can get. testSame("function foo(){ return eval() }" + "foo();"); } public void testInlineFunctions31() { // Don't introduce a duplicate label in the same scope test("function foo(){ lab:{4;} }" + "lab:{foo();}", "lab:{{JSCompiler_inline_label_0:{4}}}"); } public void testMixedModeInlining1() { // Base line tests, direct inlining test("function foo(){return 1}" + "foo();", "1;"); } public void testMixedModeInlining2() { // Base line tests, block inlining. Block inlining is needed by // possible-side-effect parameter. test("function foo(){return 1}" + "foo(x());", "{var JSCompiler_inline_anon_param_0=x();1}"); } public void testMixedModeInlining3() { // Inline using both modes. test("function foo(){return 1}" + "foo();foo(x());", "1;{var JSCompiler_inline_anon_param_0=x();1}"); } public void testMixedModeInlining4() { // Inline using both modes. Alternating. Second call of each type has // side-effect-less parameter, this is thrown away. test("function foo(){return 1}" + "foo();foo(x());" + "foo(1);foo(1,x());", "1;{var JSCompiler_inline_anon_param_0=x();1}" + "1;{var JSCompiler_inline_anon_param_4=x();1}"); } public void testMixedModeInliningCosting1() { // Inline using both modes. Costing estimates. // Base line. test( "function foo(a,b){return a+b+a+b+4+5+6+7+8+9+1+2+3+4+5}" + "foo(1,2);" + "foo(2,3)", "1+2+1+2+4+5+6+7+8+9+1+2+3+4+5;" + "2+3+2+3+4+5+6+7+8+9+1+2+3+4+5"); } public void testMixedModeInliningCosting2() { // Don't inline here because the function definition can not be eliminated. // TODO(johnlenz): Should we add constant removing to the unit test? testSame( "function foo(a,b){return a+b+a+b+4+5+6+7+8+9+1+2+3+4+5}" + "foo(1,2);" + "foo(2,3,x())"); } public void testMixedModeInliningCosting3() { // Do inline here because the function definition can be eliminated. test( "function foo(a,b){return a+b+a+b+4+5+6+7+8+9+1+2+3+10}" + "foo(1,2);" + "foo(2,3,x())", "1+2+1+2+4+5+6+7+8+9+1+2+3+10;" + "{var JSCompiler_inline_anon_param_2=x();" + "2+3+2+3+4+5+6+7+8+9+1+2+3+10}"); } public void testMixedModeInliningCosting4() { // Threshold test. testSame( "function foo(a,b){return a+b+a+b+4+5+6+7+8+9+1+2+3+4+101}" + "foo(1,2);" + "foo(2,3,x())"); } public void testNoInlineIfParametersModified1() { // Assignment test("function f(x){return x=1}f(undefined)", "{var x$$inline_0=undefined;" + "x$$inline_0=1}"); } public void testNoInlineIfParametersModified2() { test("function f(x){return (x)=1;}f(2)", "{var x$$inline_0=2;" + "x$$inline_0=1}"); } public void testNoInlineIfParametersModified3() { // Assignment variant. test("function f(x){return x*=2}f(2)", "{var x$$inline_0=2;" + "x$$inline_0*=2}"); } public void testNoInlineIfParametersModified4() { // Assignment in if. test("function f(x){return x?(x=2):0}f(2)", "{var x$$inline_0=2;" + "x$$inline_0?(" + "x$$inline_0=2):0}"); } public void testNoInlineIfParametersModified5() { // Assignment in if, multiple params test("function f(x,y){return x?(y=2):0}f(2,undefined)", "{var y$$inline_1=undefined;2?(" + "y$$inline_1=2):0}"); } public void testNoInlineIfParametersModified6() { test("function f(x,y){return x?(y=2):0}f(2)", "{var y$$inline_1=void 0;2?(" + "y$$inline_1=2):0}"); } public void testNoInlineIfParametersModified7() { // Increment test("function f(a){return++a<++a}f(1)", "{var a$$inline_0=1;" + "++a$$inline_0<" + "++a$$inline_0}"); } public void testInlineIfParametersModified8() { // OK, object parameter modified. test("function f(a){return a.x=2}f(o)", "o.x=2"); } public void testInlineIfParametersModified9() { // OK, array parameter modified. test("function f(a){return a[2]=2}f(o)", "o[2]=2"); } public void testInlineNeverPartialSubtitution1() { test("function f(z){return x.y.z;}f(1)", "x.y.z"); } public void testInlineNeverPartialSubtitution2() { test("function f(z){return x.y[z];}f(a)", "x.y[a]"); } public void testInlineNeverMutateConstants() { test("function f(x){return x=1}f(undefined)", "{var x$$inline_0=undefined;" + "x$$inline_0=1}"); } public void testInlineNeverOverrideNewValues() { test("function f(a){return++a<++a}f(1)", "{var a$$inline_0=1;" + "++a$$inline_0<++a$$inline_0}"); } public void testInlineMutableArgsReferencedOnce() { test("function foo(x){return x;}foo([])", "{" + " var x$$inline_0=[];" + " x$$inline_0; " + "}"); } public void testInlineMutableArgsReferencedOnce2() { this.assumeMinimumCapture = true; // Don't inline a mutable value that will be reused. test( "function foo(x){return function(){ return x; }} repeat(foo([]))", "var JSCompiler_inline_result$$0;" + "{ " + "var x$$inline_1=[];" + "JSCompiler_inline_result$$0=function(){return x$$inline_1}; " + "}" + "repeat(JSCompiler_inline_result$$0)"); } public void testInlineMutableArgsReferencedOnce3() { this.assumeMinimumCapture = true; // Don't inline a mutable value that will be reused. test( "function f(a) {\n" + " for(var i=0; i<0; i++) {\n" + " g(a);\n" + " }\n" + "}\n" + "f([]);", "{" + "var a$$inline_0=[];" + "var i$$inline_1=0;" + "for(;i$$inline_1<0;i$$inline_1++) {" + " g(a$$inline_0)" + "}" + "}"); } public void testNoInlineMutableArgs1() { allowBlockInlining = false; testSame("function foo(x){return x+x} foo([])"); } public void testNoInlineMutableArgs2() { allowBlockInlining = false; testSame("function foo(x){return x+x} foo(new Date)"); } public void testNoInlineMutableArgs3() { allowBlockInlining = false; testSame("function foo(x){return x+x} foo(true&&new Date)"); } public void testNoInlineMutableArgs4() { allowBlockInlining = false; testSame("function foo(x){return x+x} foo({})"); } public void testInlineBlockMutableArgs1() { test("function foo(x){x+x}foo([])", "{var x$$inline_0=[];" + "x$$inline_0+x$$inline_0}"); } public void testInlineBlockMutableArgs2() { test("function foo(x){x+x}foo(new Date)", "{var x$$inline_0=new Date;" + "x$$inline_0+x$$inline_0}"); } public void testInlineBlockMutableArgs3() { test("function foo(x){x+x}foo(true&&new Date)", "{var x$$inline_0=true&&new Date;" + "x$$inline_0+x$$inline_0}"); } public void testInlineBlockMutableArgs4() { test("function foo(x){x+x}foo({})", "{var x$$inline_0={};" + "x$$inline_0+x$$inline_0}"); } public void testShadowVariables1() { // The Normalize pass now guarantees that that globals are never shadowed // by locals. // "foo" is inlined here as its parameter "a" doesn't conflict. // "bar" is assigned a new name. test("var a=0;" + "function foo(a){return 3+a}" + "function bar(){var a=foo(4)}" + "bar();", "var a=0;" + "{var a$$inline_0=3+4}"); } public void testShadowVariables2() { // "foo" is inlined here as its parameter "a" doesn't conflict. // "bar" is inlined as its uses global "a", and does introduce any new // globals. test("var a=0;" + "function foo(a){return 3+a}" + "function bar(){a=foo(4)}" + "bar()", "var a=0;" + "{a=3+4}"); } public void testShadowVariables3() { // "foo" is inlined into exported "_bar", aliasing foo's "a". test("var a=0;" + "function foo(){var a=2;return 3+a}" + "function _bar(){a=foo()}", "var a=0;" + "function _bar(){{var a$$inline_0=2;" + "a=3+a$$inline_0}}"); } public void testShadowVariables4() { // "foo" is inlined. // block access to global "a". test("var a=0;" + "function foo(){return 3+a}" + "function _bar(a){a=foo(4)+a}", "var a=0;function _bar(a$$1){" + "a$$1=" + "3+a+a$$1}"); } public void testShadowVariables5() { // Can't yet inline multiple statements functions into expressions // (though some are possible using the COMMA operator). allowBlockInlining = false; testSame("var a=0;" + "function foo(){var a=4;return 3+a}" + "function _bar(a){a=foo(4)+a}"); } public void testShadowVariables6() { test("var a=0;" + "function foo(){var a=4;return 3+a}" + "function _bar(a){a=foo(4)}", "var a=0;function _bar(a$$2){{" + "var a$$inline_0=4;" + "a$$2=3+a$$inline_0}}"); } public void testShadowVariables7() { assumeMinimumCapture = false; test("var a=3;" + "function foo(){return a}" + "(function(){var a=5;(function(){foo()})()})()", "var a=3;" + "{var a$$inline_0=5;{a}}" ); assumeMinimumCapture = true; test("var a=3;" + "function foo(){return a}" + "(function(){var a=5;(function(){foo()})()})()", "var a=3;" + "{var a$$inline_1=5;{a}}" ); } public void testShadowVariables8() { // this should be inlined test("var a=0;" + "function foo(){return 3}" + "function _bar(){var a=foo()}", "var a=0;" + "function _bar(){var a=3}"); } public void testShadowVariables9() { // this should be inlined too [even if the global is not declared] test("function foo(){return 3}" + "function _bar(){var a=foo()}", "function _bar(){var a=3}"); } public void testShadowVariables10() { // callee var must be renamed. test("var a;function foo(){return a}" + "function _bar(){var a=foo()}", "var a;function _bar(){var a$$1=a}"); } public void testShadowVariables11() { // The call has a local variable // which collides with the function being inlined test("var a=0;var b=1;" + "function foo(){return a+a}" + "function _bar(){var a=foo();alert(a)}", "var a=0;var b=1;" + "function _bar(){var a$$1=a+a;" + "alert(a$$1)}" ); } public void testShadowVariables12() { // 2 globals colliding test("var a=0;var b=1;" + "function foo(){return a+b}" + "function _bar(){var a=foo(),b;alert(a)}", "var a=0;var b=1;" + "function _bar(){var a$$1=a+b," + "b$$1;" + "alert(a$$1)}"); } public void testShadowVariables13() { // The only change is to remove the collision test("var a=0;var b=1;" + "function foo(){return a+a}" + "function _bar(){var c=foo();alert(c)}", "var a=0;var b=1;" + "function _bar(){var c=a+a;alert(c)}"); } public void testShadowVariables14() { // There is a collision even though it is not read. test("var a=0;var b=1;" + "function foo(){return a+b}" + "function _bar(){var c=foo(),b;alert(c)}", "var a=0;var b=1;" + "function _bar(){var c=a+b," + "b$$1;alert(c)}"); } public void testShadowVariables15() { // Both parent and child reference a global test("var a=0;var b=1;" + "function foo(){return a+a}" + "function _bar(){var c=foo();alert(c+a)}", "var a=0;var b=1;" + "function _bar(){var c=a+a;alert(c+a)}"); } public void testShadowVariables16() { assumeMinimumCapture = false; // Inline functions defined as a child of the CALL node. test("var a=3;" + "function foo(){return a}" + "(function(){var a=5;(function(){foo()})()})()", "var a=3;" + "{var a$$inline_0=5;{a}}" ); assumeMinimumCapture = true; // Inline functions defined as a child of the CALL node. test("var a=3;" + "function foo(){return a}" + "(function(){var a=5;(function(){foo()})()})()", "var a=3;" + "{var a$$inline_1=5;{a}}" ); } public void testShadowVariables17() { test("var a=0;" + "function bar(){return a+a}" + "function foo(){return bar()}" + "function _goo(){var a=2;var x=foo();}", "var a=0;" + "function _goo(){var a$$1=2;var x=a+a}"); } public void testShadowVariables18() { test("var a=0;" + "function bar(){return a+a}" + "function foo(){var a=3;return bar()}" + "function _goo(){var a=2;var x=foo();}", "var a=0;" + "function _goo(){var a$$2=2;var x;" + "{var a$$inline_0=3;x=a+a}}"); } public void testCostBasedInlining1() { testSame( "function foo(a){return a}" + "foo=new Function(\"return 1\");" + "foo(1)"); } public void testCostBasedInlining2() { // Baseline complexity tests. // Single call, function not removed. test( "function foo(a){return a}" + "var b=foo;" + "function _t1(){return foo(1)}", "function foo(a){return a}" + "var b=foo;" + "function _t1(){return 1}"); } public void testCostBasedInlining3() { // Two calls, function not removed. test( "function foo(a,b){return a+b}" + "var b=foo;" + "function _t1(){return foo(1,2)}" + "function _t2(){return foo(2,3)}", "function foo(a,b){return a+b}" + "var b=foo;" + "function _t1(){return 1+2}" + "function _t2(){return 2+3}"); } public void testCostBasedInlining4() { // Two calls, function not removed. // Here there isn't enough savings to justify inlining. testSame( "function foo(a,b){return a+b+a+b}" + "var b=foo;" + "function _t1(){return foo(1,2)}" + "function _t2(){return foo(2,3)}"); } public void testCostBasedInlining5() { // Here there is enough savings to justify inlining. test( "function foo(a,b){return a+b+a+b}" + "function _t1(){return foo(1,2)}" + "function _t2(){return foo(2,3)}", "function _t1(){return 1+2+1+2}" + "function _t2(){return 2+3+2+3}"); } public void testCostBasedInlining6() { // Here we have a threshold test. // Do inline here: test( "function foo(a,b){return a+b+a+b+a+b+a+b+4+5+6+7+8+9+1+2+3+4+5}" + "function _t1(){return foo(1,2)}" + "function _t2(){return foo(2,3)}", "function _t1(){return 1+2+1+2+1+2+1+2+4+5+6+7+8+9+1+2+3+4+5}" + "function _t2(){return 2+3+2+3+2+3+2+3+4+5+6+7+8+9+1+2+3+4+5}"); } public void testCostBasedInlining7() { // Don't inline here (not enough savings): testSame( "function foo(a,b){" + " return a+b+a+b+a+b+a+b+4+5+6+7+8+9+1+2+3+4+5+6}" + "function _t1(){return foo(1,2)}" + "function _t2(){return foo(2,3)}"); } public void testCostBasedInlining8() { // Verify multiple references in the same statement: // Here "f" is not known to be removable, as it is a used as parameter // and is not known to be side-effect free. The first call to f() can // not be inlined on the first pass (as the call to f() as a parameter // prevents this). However, the call to f() would be inlinable, if it // is small enough to be inlined without removing the function declaration. // but it is not in this first test. allowBlockInlining = false; testSame("function f(a){return chg() + a + a;}" + "var a = f(f(1));"); } public void testCostBasedInlining9() { // Here both direct and block inlining is used. The call to f as a // parameter is inlined directly, which the call to f with f as a parameter // is inlined using block inlining. test("function f(a){return chg() + a + a;}" + "var a = f(f(1));", "var a;" + "{var a$$inline_0=chg()+1+1;" + "a=chg()+a$$inline_0+a$$inline_0}"); } public void testCostBasedInlining10() { allowBlockInlining = false; // The remaining use of 'f' would be inlined after the constants are folded. test("function f(a){return a + a;}" + "var a = f(f(1));", "function f(a$$1){return a$$1+a$$1}var a=f(1+1)"); } public void testCostBasedInlining11() { // With block inlining test("function f(a){return chg() + a + a;}" + "var a = f(f(1))", "var a;" + "{var a$$inline_0=chg()+1+1;" + "a=chg()+a$$inline_0+a$$inline_0}"); } public void testCostBasedInlining12() { test("function f(a){return 1 + a + a;}" + "var a = f(1) + f(2);", "var a=1+1+1+(1+2+2)"); } public void testCostBasedInliningComplex1() { testSame( "function foo(a){a()}" + "foo=new Function(\"return 1\");" + "foo(1)"); } public void testCostBasedInliningComplex2() { // Baseline complexity tests. // Single call, function not removed. test( "function foo(a){a()}" + "var b=foo;" + "function _t1(){foo(x)}", "function foo(a){a()}" + "var b=foo;" + "function _t1(){{x()}}"); } public void testCostBasedInliningComplex3() { // Two calls, function not removed. test( "function foo(a,b){a+b}" + "var b=foo;" + "function _t1(){foo(1,2)}" + "function _t2(){foo(2,3)}", "function foo(a,b){a+b}" + "var b=foo;" + "function _t1(){{1+2}}" + "function _t2(){{2+3}}"); } public void testCostBasedInliningComplex4() { // Two calls, function not removed. // Here there isn't enough savings to justify inlining. testSame( "function foo(a,b){a+b+a+b}" + "var b=foo;" + "function _t1(){foo(1,2)}" + "function _t2(){foo(2,3)}"); } public void testCostBasedInliningComplex5() { // Here there is enough savings to justify inlining. test( "function foo(a,b){a+b+a+b}" + "function _t1(){foo(1,2)}" + "function _t2(){foo(2,3)}", "function _t1(){{1+2+1+2}}" + "function _t2(){{2+3+2+3}}"); } public void testCostBasedInliningComplex6() { // Here we have a threshold test. // Do inline here: test( "function foo(a,b){a+b+a+b+a+b+a+b+4+5+6+7+8+9+1}" + "function _t1(){foo(1,2)}" + "function _t2(){foo(2,3)}", "function _t1(){{1+2+1+2+1+2+1+2+4+5+6+7+8+9+1}}" + "function _t2(){{2+3+2+3+2+3+2+3+4+5+6+7+8+9+1}}"); } public void testCostBasedInliningComplex7() { // Don't inline here (not enough savings): testSame( "function foo(a,b){a+b+a+b+a+b+a+b+4+5+6+7+8+9+1+2}" + "function _t1(){foo(1,2)}" + "function _t2(){foo(2,3)}"); } public void testCostBasedInliningComplex8() { // Verify multiple references in the same statement. testSame("function _f(a){1+a+a}" + "a=_f(1)+_f(1)"); } public void testCostBasedInliningComplex9() { test("function f(a){1 + a + a;}" + "f(1);f(2);", "{1+1+1}{1+2+2}"); } public void testDoubleInlining1() { allowBlockInlining = false; test("var foo = function(a) { return nochg(a); };" + "var bar = function(b) { return b; };" + "foo(bar(x));", "nochg(x)"); } public void testDoubleInlining2() { test("var foo = function(a) { return getWindow(a); };" + "var bar = function(b) { return b; };" + "foo(bar(x));", "getWindow(x)"); } public void testNoInlineOfNonGlobalFunction1() { test("var g;function _f(){function g(){return 0}}" + "function _h(){return g()}", "var g;function _f(){}" + "function _h(){return g()}"); } public void testNoInlineOfNonGlobalFunction2() { test("var g;function _f(){var g=function(){return 0}}" + "function _h(){return g()}", "var g;function _f(){}" + "function _h(){return g()}"); } public void testNoInlineOfNonGlobalFunction3() { test("var g;function _f(){var g=function(){return 0}}" + "function _h(){return g()}", "var g;function _f(){}" + "function _h(){return g()}"); } public void testNoInlineOfNonGlobalFunction4() { test("var g;function _f(){function g(){return 0}}" + "function _h(){return g()}", "var g;function _f(){}" + "function _h(){return g()}"); } public void testNoInlineMaskedFunction() { // Normalization makes this test of marginal value. // The unreferenced function is removed. test("var g=function(){return 0};" + "function _f(g){return g()}", "function _f(g$$1){return g$$1()}"); } public void testNoInlineNonFunction() { testSame("var g=3;function _f(){return g()}"); } public void testInlineCall() { test("function f(g) { return g.h(); } f('x');", "\"x\".h()"); } public void testInlineFunctionWithArgsMismatch1() { test("function f(g) { return g; } f();", "void 0"); } public void testInlineFunctionWithArgsMismatch2() { test("function f() { return 0; } f(1);", "0"); } public void testInlineFunctionWithArgsMismatch3() { test("function f(one, two, three) { return one + two + three; } f(1);", "1+void 0+void 0"); } public void testInlineFunctionWithArgsMismatch4() { test("function f(one, two, three) { return one + two + three; }" + "f(1,2,3,4,5);", "1+2+3"); } public void testArgumentsWithSideEffectsNeverInlined1() { allowBlockInlining = false; testSame("function f(){return 0} f(new goo());"); } public void testArgumentsWithSideEffectsNeverInlined2() { allowBlockInlining = false; testSame("function f(g,h){return h+g}f(g(),h());"); } public void testOneSideEffectCallDoesNotRuinOthers() { allowBlockInlining = false; test("function f(){return 0}f(new goo());f()", "function f(){return 0}f(new goo());0"); } public void testComplexInlineNoResultNoParamCall1() { test("function f(){a()}f()", "{a()}"); } public void testComplexInlineNoResultNoParamCall2() { test("function f(){if (true){return;}else;} f();", "{JSCompiler_inline_label_f_0:{" + "if(true)break JSCompiler_inline_label_f_0;else;}}"); } public void testComplexInlineNoResultNoParamCall3() { // We now allow vars in the global space. // Don't inline into vars into global scope. // testSame("function f(){a();b();var z=1+1}f()"); // But do inline into functions test("function f(){a();b();var z=1+1}function _foo(){f()}", "function _foo(){{a();b();var z$$inline_0=1+1}}"); } public void testComplexInlineNoResultWithParamCall1() { test("function f(x){a(x)}f(1)", "{a(1)}"); } public void testComplexInlineNoResultWithParamCall2() { test("function f(x,y){a(x)}var b=1;f(1,b)", "var b=1;{a(1)}"); } public void testComplexInlineNoResultWithParamCall3() { test("function f(x,y){if (x) y(); return true;}var b=1;f(1,b)", "var b=1;{if(1)b();true}"); } public void testComplexInline1() { test("function f(){if (true){return;}else;} z=f();", "{JSCompiler_inline_label_f_0:" + "{if(true){z=void 0;" + "break JSCompiler_inline_label_f_0}else;z=void 0}}"); } public void testComplexInline2() { test("function f(){if (true){return;}else return;} z=f();", "{JSCompiler_inline_label_f_0:{if(true){z=void 0;" + "break JSCompiler_inline_label_f_0}else{z=void 0;" + "break JSCompiler_inline_label_f_0}z=void 0}}"); } public void testComplexInline3() { test("function f(){if (true){return 1;}else return 0;} z=f();", "{JSCompiler_inline_label_f_0:{if(true){z=1;" + "break JSCompiler_inline_label_f_0}else{z=0;" + "break JSCompiler_inline_label_f_0}z=void 0}}"); } public void testComplexInline4() { test("function f(x){a(x)} z = f(1)", "{a(1);z=void 0}"); } public void testComplexInline5() { test("function f(x,y){a(x)}var b=1;z=f(1,b)", "var b=1;{a(1);z=void 0}"); } public void testComplexInline6() { test("function f(x,y){if (x) y(); return true;}var b=1;z=f(1,b)", "var b=1;{if(1)b();z=true}"); } public void testComplexInline7() { test("function f(x,y){if (x) return y(); else return true;}" + "var b=1;z=f(1,b)", "var b=1;{JSCompiler_inline_label_f_2:{if(1){z=b();" + "break JSCompiler_inline_label_f_2}else{z=true;" + "break JSCompiler_inline_label_f_2}z=void 0}}"); } public void testComplexInline8() { test("function f(x){a(x)}var z=f(1)", "var z;{a(1);z=void 0}"); } public void testComplexInlineVars1() { test("function f(){if (true){return;}else;}var z=f();", "var z;{JSCompiler_inline_label_f_0:{" + "if(true){z=void 0;break JSCompiler_inline_label_f_0}else;z=void 0}}"); } public void testComplexInlineVars2() { test("function f(){if (true){return;}else return;}var z=f();", "var z;{JSCompiler_inline_label_f_0:{" + "if(true){z=void 0;break JSCompiler_inline_label_f_0" + "}else{" + "z=void 0;break JSCompiler_inline_label_f_0}z=void 0}}"); } public void testComplexInlineVars3() { test("function f(){if (true){return 1;}else return 0;}var z=f();", "var z;{JSCompiler_inline_label_f_0:{if(true){" + "z=1;break JSCompiler_inline_label_f_0" + "}else{" + "z=0;break JSCompiler_inline_label_f_0}z=void 0}}"); } public void testComplexInlineVars4() { test("function f(x){a(x)}var z = f(1)", "var z;{a(1);z=void 0}"); } public void testComplexInlineVars5() { test("function f(x,y){a(x)}var b=1;var z=f(1,b)", "var b=1;var z;{a(1);z=void 0}"); } public void testComplexInlineVars6() { test("function f(x,y){if (x) y(); return true;}var b=1;var z=f(1,b)", "var b=1;var z;{if(1)b();z=true}"); } public void testComplexInlineVars7() { test("function f(x,y){if (x) return y(); else return true;}" + "var b=1;var z=f(1,b)", "var b=1;var z;" + "{JSCompiler_inline_label_f_2:{if(1){z=b();" + "break JSCompiler_inline_label_f_2" + "}else{" + "z=true;break JSCompiler_inline_label_f_2}z=void 0}}"); } public void testComplexInlineVars8() { test("function f(x){a(x)}var x;var z=f(1)", "var x;var z;{a(1);z=void 0}"); } public void testComplexInlineVars9() { test("function f(x){a(x)}var x;var z=f(1);var y", "var x;var z;{a(1);z=void 0}var y"); } public void testComplexInlineVars10() { test("function f(x){a(x)}var x=blah();var z=f(1);var y=blah();", "var x=blah();var z;{a(1);z=void 0}var y=blah()"); } public void testComplexInlineVars11() { test("function f(x){a(x)}var x=blah();var z=f(1);var y;", "var x=blah();var z;{a(1);z=void 0}var y"); } public void testComplexInlineVars12() { test("function f(x){a(x)}var x;var z=f(1);var y=blah();", "var x;var z;{a(1);z=void 0}var y=blah()"); } public void testComplexInlineInExpressionss1() { test("function f(){a()}var z=f()", "var z;{a();z=void 0}"); } public void testComplexInlineInExpressionss2() { test("function f(){a()}c=z=f()", "var JSCompiler_inline_result$$0;" + "{a();JSCompiler_inline_result$$0=void 0;}" + "c=z=JSCompiler_inline_result$$0"); } public void testComplexInlineInExpressionss3() { test("function f(){a()}c=z=f()", "var JSCompiler_inline_result$$0;" + "{a();JSCompiler_inline_result$$0=void 0;}" + "c=z=JSCompiler_inline_result$$0"); } public void testComplexInlineInExpressionss4() { test("function f(){a()}if(z=f());", "var JSCompiler_inline_result$$0;" + "{a();JSCompiler_inline_result$$0=void 0;}" + "if(z=JSCompiler_inline_result$$0);"); } public void testComplexInlineInExpressionss5() { test("function f(){a()}if(z.y=f());", "var JSCompiler_temp_const$$0=z;" + "var JSCompiler_inline_result$$1;" + "{a();JSCompiler_inline_result$$1=void 0;}" + "if(JSCompiler_temp_const$$0.y=JSCompiler_inline_result$$1);"); } public void testComplexNoInline1() { testSame("function f(){a()}while(z=f())continue"); } public void testComplexNoInline2() { testSame("function f(){a()}do;while(z=f())"); } public void testComplexSample() { String result = "" + "{{" + "var styleSheet$$inline_2=null;" + "if(goog$userAgent$IE)" + "styleSheet$$inline_2=0;" + "else " + "var head$$inline_3=0;" + "{" + "var element$$inline_0=" + "styleSheet$$inline_2;" + "var stylesString$$inline_1=a;" + "if(goog$userAgent$IE)" + "element$$inline_0.cssText=" + "stylesString$$inline_1;" + "else " + "{" + "var propToSet$$inline_2=" + "\"innerText\";" + "element$$inline_0[" + "propToSet$$inline_2]=" + "stylesString$$inline_1" + "}" + "}" + "styleSheet$$inline_2" + "}}"; test("var foo = function(stylesString, opt_element) { " + "var styleSheet = null;" + "if (goog$userAgent$IE)" + "styleSheet = 0;" + "else " + "var head = 0;" + "" + "goo$zoo(styleSheet, stylesString);" + "return styleSheet;" + " };\n " + "var goo$zoo = function(element, stylesString) {" + "if (goog$userAgent$IE)" + "element.cssText = stylesString;" + "else {" + "var propToSet = 'innerText';" + "element[propToSet] = stylesString;" + "}" + "};" + "(function(){foo(a,b);})();", result); } public void testComplexSampleNoInline() { testSame( "foo=function(stylesString,opt_element){" + "var styleSheet=null;" + "if(goog$userAgent$IE)" + "styleSheet=0;" + "else " + "var head=0;" + "" + "goo$zoo(styleSheet,stylesString);" + "return styleSheet" + "};" + "goo$zoo=function(element,stylesString){" + "if(goog$userAgent$IE)" + "element.cssText=stylesString;" + "else{" + "var propToSet=goog$userAgent$WEBKIT?\"innerText\":\"innerHTML\";" + "element[propToSet]=stylesString" + "}" + "}"); } // Test redefinition of parameter name. public void testComplexNoVarSub() { test( "function foo(x){" + "var x;" + "y=x" + "}" + "foo(1)", "{y=1}" ); } public void testComplexFunctionWithFunctionDefinition1() { test("function f(){call(function(){return})}f()", "{call(function(){return})}"); } public void testComplexFunctionWithFunctionDefinition2() { assumeMinimumCapture = false; // Don't inline if local names might be captured. testSame("function f(a){call(function(){return})}f()"); assumeMinimumCapture = true; test("(function(){" + "var f = function(a){call(function(){return a})};f()})()", "{{var a$$inline_0=void 0;call(function(){return a$$inline_0})}}"); } public void testComplexFunctionWithFunctionDefinition2a() { assumeMinimumCapture = false; // Don't inline if local names might be captured. testSame("(function(){" + "var f = function(a){call(function(){return a})};f()})()"); assumeMinimumCapture = true; test("(function(){" + "var f = function(a){call(function(){return a})};f()})()", "{{var a$$inline_0=void 0;call(function(){return a$$inline_0})}}"); } public void testComplexFunctionWithFunctionDefinition3() { assumeMinimumCapture = false; // Don't inline if local names might need to be captured. testSame("function f(){var a; call(function(){return a})}f()"); assumeMinimumCapture = true; test("function f(){var a; call(function(){return a})}f()", "{var a$$inline_0;call(function(){return a$$inline_0})}"); } public void testDecomposePlusEquals() { test("function f(){a=1;return 1} var x = 1; x += f()", "var x = 1;" + "var JSCompiler_temp_const$$0 = x;" + "var JSCompiler_inline_result$$1;" + "{a=1;" + " JSCompiler_inline_result$$1=1}" + "x = JSCompiler_temp_const$$0 + JSCompiler_inline_result$$1;"); } public void testDecomposeFunctionExpressionInCall() { test( "(function(map){descriptions_=map})(\n" + "function(){\n" + "var ret={};\n" + "ret[ONE]='a';\n" + "ret[TWO]='b';\n" + "return ret\n" + "}()\n" + ");", "var JSCompiler_inline_result$$0;" + "{" + "var ret$$inline_1={};\n" + "ret$$inline_1[ONE]='a';\n" + "ret$$inline_1[TWO]='b';\n" + "JSCompiler_inline_result$$0 = ret$$inline_1;\n" + "}" + "{" + "descriptions_=JSCompiler_inline_result$$0;" + "}" ); } public void testInlineConstructor1() { test("function f() {} function _g() {f.call(this)}", "function _g() {void 0}"); } public void testInlineConstructor2() { test("function f() {} f.prototype.a = 0; function _g() {f.call(this)}", "function f() {} f.prototype.a = 0; function _g() {void 0}"); } public void testInlineConstructor3() { test("function f() {x.call(this)} f.prototype.a = 0;" + "function _g() {f.call(this)}", "function f() {x.call(this)} f.prototype.a = 0;" + "function _g() {{x.call(this)}}"); } public void testInlineConstructor4() { test("function f() {x.call(this)} f.prototype.a = 0;" + "function _g() {var t = f.call(this)}", "function f() {x.call(this)} f.prototype.a = 0;" + "function _g() {var t; {x.call(this); t = void 0}}"); } public void testFunctionExpressionInlining1() { test("(function(){})()", "void 0"); } public void testFunctionExpressionInlining2() { test("(function(){foo()})()", "{foo()}"); } public void testFunctionExpressionInlining3() { test("var a = (function(){return foo()})()", "var a = foo()"); } public void testFunctionExpressionInlining4() { test("var a; a = 1 + (function(){return foo()})()", "var a; a = 1 + foo()"); } public void testFunctionExpressionCallInlining1() { test("(function(){}).call(this)", "void 0"); } public void testFunctionExpressionCallInlining2() { test("(function(){foo(this)}).call(this)", "{foo(this)}"); } public void testFunctionExpressionCallInlining3() { test("var a = (function(){return foo(this)}).call(this)", "var a = foo(this)"); } public void testFunctionExpressionCallInlining4() { test("var a; a = 1 + (function(){return foo(this)}).call(this)", "var a; a = 1 + foo(this)"); } public void testFunctionExpressionCallInlining5() { test("a:(function(){return foo()})()", "a:foo()"); } public void testFunctionExpressionCallInlining6() { test("a:(function(){return foo()}).call(this)", "a:foo()"); } public void testFunctionExpressionCallInlining7() { test("a:(function(){})()", "a:void 0"); } public void testFunctionExpressionCallInlining8() { test("a:(function(){}).call(this)", "a:void 0"); } public void testFunctionExpressionCallInlining9() { // ... with unused recursive name. test("(function foo(){})()", "void 0"); } public void testFunctionExpressionCallInlining10() { // ... with unused recursive name. test("(function foo(){}).call(this)", "void 0"); } public void testFunctionExpressionCallInlining11a() { // Inline functions that return inner functions. test("((function(){return function(){foo()}})())();", "{foo()}"); } public void testFunctionExpressionCallInlining11b() { assumeMinimumCapture = false; // Can't inline functions that return inner functions and have local names. testSame("((function(){var a; return function(){foo()}})())();"); assumeMinimumCapture = true; test( "((function(){var a; return function(){foo()}})())();", "var JSCompiler_inline_result$$0;" + "{var a$$inline_1;" + "JSCompiler_inline_result$$0=function(){foo()};}" + "JSCompiler_inline_result$$0()"); } public void testFunctionExpressionCallInlining11c() { // TODO(johnlenz): Can inline, not temps needed. assumeMinimumCapture = false; testSame("function _x() {" + " ((function(){return function(){foo()}})())();" + "}"); assumeMinimumCapture = true; test( "function _x() {" + " ((function(){return function(){foo()}})())();" + "}", "function _x() {" + " {foo()}" + "}"); } public void testFunctionExpressionCallInlining11d() { // TODO(johnlenz): Can inline into a function containing eval, if // no names are introduced. assumeMinimumCapture = false; testSame("function _x() {" + " eval();" + " ((function(){return function(){foo()}})())();" + "}"); assumeMinimumCapture = true; test( "function _x() {" + " eval();" + " ((function(){return function(){foo()}})())();" + "}", "function _x() {" + " eval();" + " {foo()}" + "}"); } public void testFunctionExpressionCallInlining11e() { // No, don't inline into a function containing eval, // if temps are introduced. assumeMinimumCapture = false; testSame("function _x() {" + " eval();" + " ((function(a){return function(){foo()}})())();" + "}"); assumeMinimumCapture = true; test("function _x() {" + " eval();" + " ((function(a){return function(){foo()}})())();" + "}", "function _x() {" + " eval();" + " {foo();}" + "}"); } public void testFunctionExpressionCallInlining12() { // Can't inline functions that recurse. testSame("(function foo(){foo()})()"); } public void testFunctionExpressionOmega() { // ... with unused recursive name. test("(function (f){f(f)})(function(f){f(f)})", "{var f$$inline_0=function(f$$1){f$$1(f$$1)};" + "{{f$$inline_0(f$$inline_0)}}}"); } public void testLocalFunctionInlining1() { test("function _f(){ function g() {} g() }", "function _f(){ void 0 }"); } public void testLocalFunctionInlining2() { test("function _f(){ function g() {foo(); bar();} g() }", "function _f(){ {foo(); bar();} }"); } public void testLocalFunctionInlining3() { test("function _f(){ function g() {foo(); bar();} g() }", "function _f(){ {foo(); bar();} }"); } public void testLocalFunctionInlining4() { test("function _f(){ function g() {return 1} return g() }", "function _f(){ return 1 }"); } public void testLocalFunctionInlining5() { testSame("function _f(){ function g() {this;} g() }"); } public void testLocalFunctionInlining6() { testSame("function _f(){ function g() {this;} return g; }"); } public void testLocalFunctionInliningOnly1() { this.allowGlobalFunctionInlining = true; test("function f(){} f()", "void 0;"); this.allowGlobalFunctionInlining = false; testSame("function f(){} f()"); } public void testLocalFunctionInliningOnly2() { this.allowGlobalFunctionInlining = false; testSame("function f(){} f()"); test("function f(){ function g() {return 1} return g() }; f();", "function f(){ return 1 }; f();"); } public void testLocalFunctionInliningOnly3() { this.allowGlobalFunctionInlining = false; testSame("function f(){} f()"); test("(function(){ function g() {return 1} return g() })();", "(function(){ return 1 })();"); } public void testLocalFunctionInliningOnly4() { this.allowGlobalFunctionInlining = false; testSame("function f(){} f()"); test("(function(){ return (function() {return 1})() })();", "(function(){ return 1 })();"); } public void testInlineWithThis1() { assumeStrictThis = false; // If no "this" is provided it might need to be coerced to the global // "this". testSame("function f(){} f.call();"); testSame("function f(){this} f.call();"); assumeStrictThis = true; // In strict mode, "this" is never coerced so we can use the provided value. test("function f(){} f.call();", "{}"); test("function f(){this} f.call();", "{void 0;}"); } public void testInlineWithThis2() { // "this" can always be replaced with "this" assumeStrictThis = false; test("function f(){} f.call(this);", "void 0"); assumeStrictThis = true; test("function f(){} f.call(this);", "void 0"); } public void testInlineWithThis3() { assumeStrictThis = false; // If no "this" is provided it might need to be coerced to the global // "this". testSame("function f(){} f.call([]);"); assumeStrictThis = true; // In strict mode, "this" is never coerced so we can use the provided value. test("function f(){} f.call([]);", "{}"); } public void testInlineWithThis4() { assumeStrictThis = false; // If no "this" is provided it might need to be coerced to the global // "this". testSame("function f(){} f.call(new g);"); assumeStrictThis = true; // In strict mode, "this" is never coerced so we can use the provided value. test("function f(){} f.call(new g);", "{var JSCompiler_inline_this_0=new g}"); } public void testInlineWithThis5() { assumeStrictThis = false; // If no "this" is provided it might need to be coerced to the global // "this". testSame("function f(){} f.call(g());"); assumeStrictThis = true; // In strict mode, "this" is never coerced so we can use the provided value. test("function f(){} f.call(g());", "{var JSCompiler_inline_this_0=g()}"); } public void testInlineWithThis6() { assumeStrictThis = false; // If no "this" is provided it might need to be coerced to the global // "this". testSame("function f(){this} f.call(new g);"); assumeStrictThis = true; // In strict mode, "this" is never coerced so we can use the provided value. test("function f(){this} f.call(new g);", "{var JSCompiler_inline_this_0=new g;JSCompiler_inline_this_0}"); } public void testInlineWithThis7() { assumeStrictThis = true; // In strict mode, "this" is never coerced so we can use the provided value. test("function f(a){a=1;this} f.call();", "{var a$$inline_0=void 0; a$$inline_0=1; void 0;}"); test("function f(a){a=1;this} f.call(x, x);", "{var a$$inline_0=x; a$$inline_0=1; x;}"); } // http://en.wikipedia.org/wiki/Fixed_point_combinator#Y_combinator public void testFunctionExpressionYCombinator() { assumeMinimumCapture = false; testSame( "var factorial = ((function(M) {\n" + " return ((function(f) {\n" + " return M(function(arg) {\n" + " return (f(f))(arg);\n" + " })\n" + " })\n" + " (function(f) {\n" + " return M(function(arg) {\n" + " return (f(f))(arg);\n" + " })\n" + " }));\n" + " })\n" + " (function(f) {\n" + " return function(n) {\n" + " if (n === 0)\n" + " return 1;\n" + " else\n" + " return n * f(n - 1);\n" + " };\n" + " }));\n" + "\n" + "factorial(5)\n"); assumeMinimumCapture = true; test( "var factorial = ((function(M) {\n" + " return ((function(f) {\n" + " return M(function(arg) {\n" + " return (f(f))(arg);\n" + " })\n" + " })\n" + " (function(f) {\n" + " return M(function(arg) {\n" + " return (f(f))(arg);\n" + " })\n" + " }));\n" + " })\n" + " (function(f) {\n" + " return function(n) {\n" + " if (n === 0)\n" + " return 1;\n" + " else\n" + " return n * f(n - 1);\n" + " };\n" + " }));\n" + "\n" + "factorial(5)\n", "var factorial;\n" + "{\n" + "var M$$inline_4 = function(f$$2) {\n" + " return function(n){if(n===0)return 1;else return n*f$$2(n-1)}\n" + "};\n" + "{\n" + "var f$$inline_0=function(f$$inline_7){\n" + " return M$$inline_4(\n" + " function(arg$$inline_8){\n" + " return f$$inline_7(f$$inline_7)(arg$$inline_8)\n" + " })\n" + "};\n" + "factorial=M$$inline_4(\n" + " function(arg$$inline_1){\n" + " return f$$inline_0(f$$inline_0)(arg$$inline_1)\n" + "});\n" + "}\n" + "}" + "factorial(5)"); } public void testRenamePropertyFunction() { testSame("function JSCompiler_renameProperty(x) {return x} " + "JSCompiler_renameProperty('foo')"); } public void testReplacePropertyFunction() { // baseline: an alias doesn't prevents declaration removal, but not // inlining. test("function f(x) {return x} " + "foo(window, f); f(1)", "function f(x) {return x} " + "foo(window, f); 1"); // a reference passed to JSCompiler_ObjectPropertyString prevents inlining // as well. testSame("function f(x) {return x} " + "new JSCompiler_ObjectPropertyString(window, f); f(1)"); } public void testInlineWithClosureContainingThis() { test("(function (){return f(function(){return this})})();", "f(function(){return this})"); } public void testIssue5159924a() { test("function f() { if (x()) return y() }\n" + "while(1){ var m = f() || z() }", "for(;1;) {" + " var JSCompiler_inline_result$$0;" + " {" + " JSCompiler_inline_label_f_1: {" + " if(x()) {" + " JSCompiler_inline_result$$0 = y();" + " break JSCompiler_inline_label_f_1" + " }" + " JSCompiler_inline_result$$0 = void 0;" + " }" + " }" + " var m=JSCompiler_inline_result$$0 || z()" + "}"); } public void testIssue5159924b() { test("function f() { if (x()) return y() }\n" + "while(1){ var m = f() }", "for(;1;){" + " var m;" + " {" + " JSCompiler_inline_label_f_0: { " + " if(x()) {" + " m = y();" + " break JSCompiler_inline_label_f_0" + " }" + " m = void 0" + " }" + " }" + "}"); } public void testInlineObject() { new StringCompare().testInlineObject(); } private static class StringCompare extends CompilerTestCase { private boolean allowGlobalFunctionInlining = true; StringCompare() { super("", false); this.enableNormalize(); this.enableMarkNoSideEffects(); } @Override public void setUp() throws Exception { super.setUp(); super.enableLineNumberCheck(true); allowGlobalFunctionInlining = true; } @Override protected CompilerPass getProcessor(Compiler compiler) { compiler.resetUniqueNameId(); return new InlineFunctions( compiler, compiler.getUniqueNameIdSupplier(), allowGlobalFunctionInlining, true, // allowLocalFunctionInlining true, // allowBlockInlining true, // assumeStrictThis true, // assumeMinimumCapture CompilerOptions.UNLIMITED_FUN_SIZE_AFTER_INLINING); } public void testInlineObject() { allowGlobalFunctionInlining = false; // TODO(johnlenz): normalize the AST so an AST comparison can be done. // As is, the expected AST does not match the actual correct result: // The AST matches "g.a()" with a FREE_CALL annotation, but this as // expected string would fail as it won't be mark as a free call. // "(0,g.a)()" matches the output, but not the resulting AST. test("function inner(){function f(){return g.a}(f())()}", "function inner(){(0,g.a)()}"); } } public void testBug4944818() { test( "var getDomServices_ = function(self) {\n" + " if (!self.domServices_) {\n" + " self.domServices_ = goog$component$DomServices.get(" + " self.appContext_);\n" + " }\n" + "\n" + " return self.domServices_;\n" + "};\n" + "\n" + "var getOwnerWin_ = function(self) {\n" + " return getDomServices_(self).getDomHelper().getWindow();\n" + "};\n" + "\n" + "HangoutStarter.prototype.launchHangout = function() {\n" + " var self = a.b;\n" + " var myUrl = new goog.Uri(getOwnerWin_(self).location.href);\n" + "};", "HangoutStarter.prototype.launchHangout = function() { " + " var self$$2 = a.b;" + " var JSCompiler_temp_const$$0 = goog.Uri;" + " var JSCompiler_inline_result$$1;" + " {" + " var self$$inline_2 = self$$2;" + " if (!self$$inline_2.domServices_) {" + " self$$inline_2.domServices_ = goog$component$DomServices.get(" + " self$$inline_2.appContext_);" + " }" + " JSCompiler_inline_result$$1=self$$inline_2.domServices_;" + " }" + " var myUrl = new JSCompiler_temp_const$$0(" + " JSCompiler_inline_result$$1.getDomHelper()." + " getWindow().location.href)" + "}"); } public void testIssue423() { assumeMinimumCapture = false; test( "(function($) {\n" + " $.fn.multicheck = function(options) {\n" + " initialize.call(this, options);\n" + " };\n" + "\n" + " function initialize(options) {\n" + " options.checkboxes = $(this).siblings(':checkbox');\n" + " preload_check_all.call(this);\n" + " }\n" + "\n" + " function preload_check_all() {\n" + " $(this).data('checkboxes');\n" + " }\n" + "})(jQuery)", "(function($){" + " $.fn.multicheck=function(options$$1){" + " {" + " options$$1.checkboxes=$(this).siblings(\":checkbox\");" + " {" + " $(this).data(\"checkboxes\")" + " }" + " }" + " }" + "})(jQuery)"); assumeMinimumCapture = true; test( "(function($) {\n" + " $.fn.multicheck = function(options) {\n" + " initialize.call(this, options);\n" + " };\n" + "\n" + " function initialize(options) {\n" + " options.checkboxes = $(this).siblings(':checkbox');\n" + " preload_check_all.call(this);\n" + " }\n" + "\n" + " function preload_check_all() {\n" + " $(this).data('checkboxes');\n" + " }\n" + "})(jQuery)", "{var $$$inline_0=jQuery;\n" + "$$$inline_0.fn.multicheck=function(options$$inline_4){\n" + " {options$$inline_4.checkboxes=" + "$$$inline_0(this).siblings(\":checkbox\");\n" + " {$$$inline_0(this).data(\"checkboxes\")}" + " }\n" + "}\n" + "}"); } public void testIssue728() { String f = "var f = function() { return false; };"; StringBuilder calls = new StringBuilder(); StringBuilder folded = new StringBuilder(); for (int i = 0; i < 30; i++) { calls.append("if (!f()) alert('x');"); folded.append("if (!false) alert('x');"); } test(f + calls, folded.toString()); } public void testAnonymous1() { assumeMinimumCapture = false; test("(function(){var a=10;(function(){var b=a;a++;alert(b)})()})();", "{var a$$inline_0=10;" + "{var b$$inline_1=a$$inline_0;" + "a$$inline_0++;alert(b$$inline_1)}}"); assumeMinimumCapture = true; test("(function(){var a=10;(function(){var b=a;a++;alert(b)})()})();", "{var a$$inline_2=10;" + "{var b$$inline_0=a$$inline_2;" + "a$$inline_2++;alert(b$$inline_0)}}"); } public void testAnonymous2() { testSame("(function(){eval();(function(){var b=a;a++;alert(b)})()})();"); } public void testAnonymous3() { // Introducing a new value into is tricky assumeMinimumCapture = false; testSame("(function(){var a=10;(function(){arguments;})()})();"); assumeMinimumCapture = true; test("(function(){var a=10;(function(){arguments;})()})();", "{var a$$inline_0=10;(function(){arguments;})();}"); test("(function(){(function(){arguments;})()})();", "{(function(){arguments;})()}"); } public void testLoopWithFunctionWithFunction() { assumeMinimumCapture = true; test("function _testLocalVariableInLoop_() {\n" + " var result = 0;\n" + " function foo() {\n" + " var arr = [1, 2, 3, 4, 5];\n" + " for (var i = 0, l = arr.length; i < l; i++) {\n" + " var j = arr[i];\n" + // don't inline this function, because the correct behavior depends // captured values. " (function() {\n" + " var k = j;\n" + " setTimeout(function() { result += k; }, 5 * i);\n" + " })();\n" + " }\n" + " }\n" + " foo();\n" + "}", "function _testLocalVariableInLoop_(){\n" + " var result=0;\n" + " {" + " var arr$$inline_0=[1,2,3,4,5];\n" + " var i$$inline_1=0;\n" + " var l$$inline_2=arr$$inline_0.length;\n" + " for(;i$$inline_1<l$$inline_2;i$$inline_1++){\n" + " var j$$inline_3=arr$$inline_0[i$$inline_1];\n" + " (function(){\n" + " var k$$inline_4=j$$inline_3;\n" + " setTimeout(function(){result+=k$$inline_4},5*i$$inline_1)\n" + " })()\n" + " }\n" + " }\n" + "}"); } public void testMethodWithFunctionWithFunction() { assumeMinimumCapture = true; test("function _testLocalVariable_() {\n" + " var result = 0;\n" + " function foo() {\n" + " var j = [i];\n" + " (function(j) {\n" + " setTimeout(function() { result += j; }, 5 * i);\n" + " })(j);\n" + " j = null;" + " }\n" + " foo();\n" + "}", "function _testLocalVariable_(){\n" + " var result=0;\n" + " {\n" + " var j$$inline_2=[i];\n" + " {\n" + " var j$$inline_0=j$$inline_2;\n" + // this temp is needed. " setTimeout(function(){result+=j$$inline_0},5*i);\n" + " }\n" + " j$$inline_2=null\n" + // because this value can be modified later. " }\n" + "}"); } // Inline a single reference function into deeper modules public void testCrossModuleInlining1() { test(createModuleChain( // m1 "function foo(){return f(1)+g(2)+h(3);}", // m2 "foo()" ), new String[] { // m1 "", // m2 "f(1)+g(2)+h(3);" } ); } // Inline a single reference function into shallow modules, only if it // is cheaper than the call itself. public void testCrossModuleInlining2() { testSame(createModuleChain( // m1 "foo()", // m2 "function foo(){return f(1)+g(2)+h(3);}" ) ); test(createModuleChain( // m1 "foo()", // m2 "function foo(){return f();}" ), new String[] { // m1 "f();", // m2 "" } ); } // Inline a multi-reference functions into shallow modules, only if it // is cheaper than the call itself. public void testCrossModuleInlining3() { testSame(createModuleChain( // m1 "foo()", // m2 "function foo(){return f(1)+g(2)+h(3);}", // m3 "foo()" ) ); test(createModuleChain( // m1 "foo()", // m2 "function foo(){return f();}", // m3 "foo()" ), new String[] { // m1 "f();", // m2 "", // m3 "f();" } ); } public void test6671158() { enableInferConsts(false); test( "function f() {return g()}" + "function Y(a){a.loader_()}" + "function _Z(){}" + "function _X() { new _Z(a,b, Y(singleton), f()) }", "function _Z(){}" + "function _X(){" + " var JSCompiler_temp_const$$2=_Z;" + " var JSCompiler_temp_const$$1=a;" + " var JSCompiler_temp_const$$0=b;" + " var JSCompiler_inline_result$$3;" + " {" + " singleton.loader_();" + " JSCompiler_inline_result$$3=void 0;" + " }" + " new JSCompiler_temp_const$$2(" + " JSCompiler_temp_const$$1," + " JSCompiler_temp_const$$0," + " JSCompiler_inline_result$$3," + " g())}"); } public void test6671158b() { test( "function f() {return g()}" + "function Y(a){a.loader_()}" + "function _Z(){}" + "function _X() { new _Z(a,b, Y(singleton), f()) }", "function _Z(){}" + "function _X(){" + " var JSCompiler_temp_const$$1=a;" + " var JSCompiler_temp_const$$0=b;" + " var JSCompiler_inline_result$$2;" + " {" + " singleton.loader_();" + " JSCompiler_inline_result$$2=void 0;" + " }" + " new _Z(" + " JSCompiler_temp_const$$1," + " JSCompiler_temp_const$$0," + " JSCompiler_inline_result$$2," + " g())}"); } public void test8609285a() { test( "function f(x){ for(x in y){} } f()", "{var x$$inline_0=void 0;for(x$$inline_0 in y);}"); } public void test8609285b() { test( "function f(x){ for(var x in y){} } f()", "{var x$$inline_0=void 0;for(x$$inline_0 in y);}"); } public void testIssue1101() { test( "var x = (function (saved) {" + " return modifyObjProp(obj) + saved;" + " })(obj[\"prop\"]);", "var x;" + "{" + " var saved$$inline_0=obj[\"prop\"];x=modifyObjProp(obj)+\n" + " saved$$inline_0" + "}"); } public void testMaxFunSizeAfterInlining() { this.maxSizeAfterInlining = 1; test(// Always inline single-statement functions "function g() { return 123; }\n" + "function f() { g(); }", "function f() { 123; }"); this.maxSizeAfterInlining = 10; test(// Always inline at the top level "function g() { 123; return 123; }\n" + "g();", "{ 123; 123; }"); this.maxSizeAfterInlining = 1; testSame(// g is too big to be inlined "function g() { 123; return 123; }\n" + "g();"); this.maxSizeAfterInlining = 20; test( "function g() { 123; return 123; }\n" + "function f() {\n" + " g();\n" + "}", ""); // g's size ends up exceeding the max size because all inlining decisions // were made in the same inlining round. this.maxSizeAfterInlining = 25; test( "function f1() { 1; return 1; }\n" + "function f2() { 2; return 2; }\n" + "function f3() { 3; return 3; }\n" + "function f4() { 4; return 4; }\n" + "function g() {\n" + " f1(); f2(); f3(); f4();\n" + "}\n" + "g(); g(); g();", "function g() { {1; 1;} {2; 2;} {3; 3;} {4; 4;} }\n" + "g(); g(); g();"); this.maxSizeAfterInlining = CompilerOptions.UNLIMITED_FUN_SIZE_AFTER_INLINING; } }
apache-2.0
allotria/intellij-community
plugins/InspectionGadgets/test/com/siyeh/igfixes/controlflow/enumswitch/afterWithoutBracesDfaJava13.java
205
// "Create missing switch branch 'E2'" "true" class Foo { void foo(E e) { if (e != E.E1) { switch (e) { case E2: break; } } } } enum E { E1, E2; }
apache-2.0
upthewaterspout/incubator-geode
gemfire-core/src/main/java/com/gemstone/gemfire/cache/query/internal/IndexInfo.java
2361
/* * ========================================================================= * Copyright Copyright (c) 2000-2014 Pivotal Software, Inc. All Rights Reserved. * This product is protected by U.S. and international copyright * and intellectual property laws. Pivotal products are covered by * more patents listed at http://www.pivotal.io/patents. * $Id: CompiledComparison.java,v 1.1 2005/01/27 06:26:33 vaibhav Exp $ * ========================================================================= */ package com.gemstone.gemfire.cache.query.internal; import com.gemstone.gemfire.cache.query.FunctionDomainException; import com.gemstone.gemfire.cache.query.IndexType; import com.gemstone.gemfire.cache.query.NameResolutionException; import com.gemstone.gemfire.cache.query.QueryInvocationTargetException; import com.gemstone.gemfire.cache.query.TypeMismatchException; import com.gemstone.gemfire.cache.query.internal.index.AbstractIndex; import com.gemstone.gemfire.cache.query.internal.index.IndexProtocol; import com.gemstone.gemfire.cache.query.internal.parse.OQLLexerTokenTypes; public class IndexInfo { final private CompiledValue _key; final CompiledValue _path; final int _operator; final IndexProtocol _index; final int _matchLevel; final int[] mapping; IndexInfo(CompiledValue key, CompiledValue path, IndexProtocol index, int matchLevel, int mapping[], int op) { _key = key; _path = path; _operator = op; _index = index; _matchLevel = matchLevel; this.mapping = mapping; } Object evaluateIndexKey(ExecutionContext context) throws FunctionDomainException, TypeMismatchException, NameResolutionException, QueryInvocationTargetException { if(((AbstractIndex)_index).isMapType()) { //assert _path.getType() == OQLLexerTokenTypes.METHOD_INV; //Get the map key & value. both need to be passed as index key. CompiledValue mapKey = ((MapIndexable)this._path).getMapLookupKey(); return new Object[]{ this._key.evaluate(context),mapKey.evaluate(context)}; }else { return _key.evaluate(context); } } public CompiledValue _key() { return this._key; } public CompiledValue _path() { return this._path; } public int _operator() { return this._operator; } public IndexProtocol _getIndex() { return _index; } }
apache-2.0
eljefe6a/kafka
streams/src/main/java/org/apache/kafka/streams/kstream/Predicate.java
1062
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.streams.kstream; /** * Represents a predicate (boolean-valued function) of two arguments. * * @param <K> the type of key * @param <V> the type of value */ public interface Predicate<K, V> { boolean test(K key, V value); }
apache-2.0
stillalex/jackrabbit-oak
oak-store-document/src/main/java/org/apache/jackrabbit/oak/plugins/document/persistentCache/async/CacheAction.java
1157
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.plugins.document.persistentCache.async; /** * Object represents an action on the cache (eg. put or invalidate). */ interface CacheAction { /** * Execute the action */ void execute(); /** * @return the size of the memory in bytes this cache action occupies. */ int getMemory(); }
apache-2.0
apache/flink
flink-python/src/main/java/org/apache/flink/streaming/api/functions/python/DataStreamPythonFunctionInfo.java
2300
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.streaming.api.functions.python; import org.apache.flink.annotation.Internal; import org.apache.flink.table.functions.python.PythonFunction; import org.apache.flink.table.functions.python.PythonFunctionInfo; /** {@link DataStreamPythonFunctionInfo} holds a PythonFunction and its function type. */ @Internal public class DataStreamPythonFunctionInfo extends PythonFunctionInfo { private static final long serialVersionUID = 2L; private static final Object[] EMPTY = new Object[0]; private final int functionType; public DataStreamPythonFunctionInfo(PythonFunction pythonFunction, int functionType) { super(pythonFunction, EMPTY); this.functionType = functionType; } public DataStreamPythonFunctionInfo( PythonFunction pythonFunction, DataStreamPythonFunctionInfo input, int functionType) { super(pythonFunction, new DataStreamPythonFunctionInfo[] {input}); this.functionType = functionType; } public int getFunctionType() { return this.functionType; } public DataStreamPythonFunctionInfo copy() { if (getInputs().length == 0) { return new DataStreamPythonFunctionInfo(getPythonFunction(), this.functionType); } else { return new DataStreamPythonFunctionInfo( getPythonFunction(), ((DataStreamPythonFunctionInfo) getInputs()[0]).copy(), this.functionType); } } }
apache-2.0
perojonsson/bridgepoint
src/org.xtuml.bp.xtext.masl.parent/org.xtuml.bp.xtext.masl/emf-gen/org/xtuml/bp/xtext/masl/masl/behavior/CreateArgument.java
3678
/** * generated by Xtext 2.9.2 */ package org.xtuml.bp.xtext.masl.masl.behavior; import org.eclipse.emf.ecore.EObject; import org.xtuml.bp.xtext.masl.masl.structure.AttributeDefinition; import org.xtuml.bp.xtext.masl.masl.structure.StateDeclaration; /** * <!-- begin-user-doc --> * A representation of the model object '<em><b>Create Argument</b></em>'. * <!-- end-user-doc --> * * <p> * The following features are supported: * </p> * <ul> * <li>{@link org.xtuml.bp.xtext.masl.masl.behavior.CreateArgument#getAttribute <em>Attribute</em>}</li> * <li>{@link org.xtuml.bp.xtext.masl.masl.behavior.CreateArgument#getValue <em>Value</em>}</li> * <li>{@link org.xtuml.bp.xtext.masl.masl.behavior.CreateArgument#getCurrentState <em>Current State</em>}</li> * </ul> * * @see org.xtuml.bp.xtext.masl.masl.behavior.BehaviorPackage#getCreateArgument() * @model * @generated */ public interface CreateArgument extends EObject { /** * Returns the value of the '<em><b>Attribute</b></em>' reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Attribute</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Attribute</em>' reference. * @see #setAttribute(AttributeDefinition) * @see org.xtuml.bp.xtext.masl.masl.behavior.BehaviorPackage#getCreateArgument_Attribute() * @model * @generated */ AttributeDefinition getAttribute(); /** * Sets the value of the '{@link org.xtuml.bp.xtext.masl.masl.behavior.CreateArgument#getAttribute <em>Attribute</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Attribute</em>' reference. * @see #getAttribute() * @generated */ void setAttribute(AttributeDefinition value); /** * Returns the value of the '<em><b>Value</b></em>' containment reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Value</em>' containment reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Value</em>' containment reference. * @see #setValue(Expression) * @see org.xtuml.bp.xtext.masl.masl.behavior.BehaviorPackage#getCreateArgument_Value() * @model containment="true" * @generated */ Expression getValue(); /** * Sets the value of the '{@link org.xtuml.bp.xtext.masl.masl.behavior.CreateArgument#getValue <em>Value</em>}' containment reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Value</em>' containment reference. * @see #getValue() * @generated */ void setValue(Expression value); /** * Returns the value of the '<em><b>Current State</b></em>' reference. * <!-- begin-user-doc --> * <p> * If the meaning of the '<em>Current State</em>' reference isn't clear, * there really should be more of a description here... * </p> * <!-- end-user-doc --> * @return the value of the '<em>Current State</em>' reference. * @see #setCurrentState(StateDeclaration) * @see org.xtuml.bp.xtext.masl.masl.behavior.BehaviorPackage#getCreateArgument_CurrentState() * @model * @generated */ StateDeclaration getCurrentState(); /** * Sets the value of the '{@link org.xtuml.bp.xtext.masl.masl.behavior.CreateArgument#getCurrentState <em>Current State</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @param value the new value of the '<em>Current State</em>' reference. * @see #getCurrentState() * @generated */ void setCurrentState(StateDeclaration value); } // CreateArgument
apache-2.0
apache/flink
flink-runtime/src/test/java/org/apache/flink/runtime/state/filesystem/AbstractFileCheckpointStorageAccessTestBase.java
14220
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.state.filesystem; import org.apache.flink.core.fs.FSDataInputStream; import org.apache.flink.core.fs.FSDataOutputStream; import org.apache.flink.core.fs.FileStatus; import org.apache.flink.core.fs.FileSystem; import org.apache.flink.core.fs.FileSystem.WriteMode; import org.apache.flink.core.fs.Path; import org.apache.flink.runtime.state.CheckpointMetadataOutputStream; import org.apache.flink.runtime.state.CheckpointStorageAccess; import org.apache.flink.runtime.state.CheckpointStorageLocation; import org.apache.flink.runtime.state.CompletedCheckpointStorageLocation; import org.apache.flink.runtime.state.StreamStateHandle; import org.apache.flink.runtime.state.memory.MemoryBackendCheckpointStorageAccess; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import javax.annotation.Nullable; import java.io.EOFException; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.util.Random; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Test base for file-system-based checkoint storage, such as the {@link * MemoryBackendCheckpointStorageAccess} and the {@link FsCheckpointStorageAccess}. */ public abstract class AbstractFileCheckpointStorageAccessTestBase { @Rule public final TemporaryFolder tmp = new TemporaryFolder(); // ------------------------------------------------------------------------ // factories for the actual state storage to be tested // ------------------------------------------------------------------------ protected abstract CheckpointStorageAccess createCheckpointStorage(Path checkpointDir) throws Exception; protected abstract CheckpointStorageAccess createCheckpointStorageWithSavepointDir( Path checkpointDir, Path savepointDir) throws Exception; // ------------------------------------------------------------------------ // pointers // ------------------------------------------------------------------------ @Test public void testPointerPathResolution() throws Exception { final FileSystem fs = FileSystem.getLocalFileSystem(); final Path metadataFile = new Path( Path.fromLocalFile(tmp.newFolder()), AbstractFsCheckpointStorageAccess.METADATA_FILE_NAME); final String basePointer = metadataFile.getParent().toString(); final String pointer1 = metadataFile.toString(); final String pointer2 = metadataFile.getParent().toString(); final String pointer3 = metadataFile.getParent().toString() + '/'; // create the storage for some random checkpoint directory final CheckpointStorageAccess storage = createCheckpointStorage(randomTempPath()); final byte[] data = new byte[23686]; new Random().nextBytes(data); try (FSDataOutputStream out = fs.create(metadataFile, WriteMode.NO_OVERWRITE)) { out.write(data); } CompletedCheckpointStorageLocation completed1 = storage.resolveCheckpoint(pointer1); CompletedCheckpointStorageLocation completed2 = storage.resolveCheckpoint(pointer2); CompletedCheckpointStorageLocation completed3 = storage.resolveCheckpoint(pointer3); assertEquals(basePointer, completed1.getExternalPointer()); assertEquals(basePointer, completed2.getExternalPointer()); assertEquals(basePointer, completed3.getExternalPointer()); StreamStateHandle handle1 = completed1.getMetadataHandle(); StreamStateHandle handle2 = completed2.getMetadataHandle(); StreamStateHandle handle3 = completed3.getMetadataHandle(); assertNotNull(handle1); assertNotNull(handle2); assertNotNull(handle3); validateContents(handle1, data); validateContents(handle2, data); validateContents(handle3, data); } @Test public void testFailingPointerPathResolution() throws Exception { // create the storage for some random checkpoint directory final CheckpointStorageAccess storage = createCheckpointStorage(randomTempPath()); // null value try { storage.resolveCheckpoint(null); fail("expected exception"); } catch (NullPointerException ignored) { } // empty string try { storage.resolveCheckpoint(""); fail("expected exception"); } catch (IllegalArgumentException ignored) { } // not a file path at all try { storage.resolveCheckpoint("this-is_not/a#filepath.at.all"); fail("expected exception"); } catch (IOException ignored) { } // non-existing file try { storage.resolveCheckpoint(tmp.newFile().toURI().toString() + "_not_existing"); fail("expected exception"); } catch (IOException ignored) { } } // ------------------------------------------------------------------------ // checkpoints // ------------------------------------------------------------------------ /** * Validates that multiple checkpoints from different jobs with the same checkpoint ID do not * interfere with each other. */ @Test public void testPersistMultipleMetadataOnlyCheckpoints() throws Exception { final FileSystem fs = FileSystem.getLocalFileSystem(); final Path checkpointDir = new Path(tmp.newFolder().toURI()); final long checkpointId = 177; final CheckpointStorageAccess storage1 = createCheckpointStorage(checkpointDir); storage1.initializeBaseLocationsForCheckpoint(); final CheckpointStorageAccess storage2 = createCheckpointStorage(checkpointDir); storage2.initializeBaseLocationsForCheckpoint(); final CheckpointStorageLocation loc1 = storage1.initializeLocationForCheckpoint(checkpointId); final CheckpointStorageLocation loc2 = storage2.initializeLocationForCheckpoint(checkpointId); final byte[] data1 = {77, 66, 55, 99, 88}; final byte[] data2 = {1, 3, 2, 5, 4}; final CompletedCheckpointStorageLocation completedLocation1; try (CheckpointMetadataOutputStream out = loc1.createMetadataOutputStream()) { out.write(data1); completedLocation1 = out.closeAndFinalizeCheckpoint(); } final String result1 = completedLocation1.getExternalPointer(); final CompletedCheckpointStorageLocation completedLocation2; try (CheckpointMetadataOutputStream out = loc2.createMetadataOutputStream()) { out.write(data2); completedLocation2 = out.closeAndFinalizeCheckpoint(); } final String result2 = completedLocation2.getExternalPointer(); // check that this went to a file, but in a nested directory structure // one directory per storage FileStatus[] files = fs.listStatus(checkpointDir); assertEquals(2, files.length); // in each per-storage directory, one for the checkpoint FileStatus[] job1Files = fs.listStatus(files[0].getPath()); FileStatus[] job2Files = fs.listStatus(files[1].getPath()); assertTrue(job1Files.length >= 1); assertTrue(job2Files.length >= 1); assertTrue( fs.exists(new Path(result1, AbstractFsCheckpointStorageAccess.METADATA_FILE_NAME))); assertTrue( fs.exists(new Path(result2, AbstractFsCheckpointStorageAccess.METADATA_FILE_NAME))); // check that both storages can resolve each others contents validateContents(storage1.resolveCheckpoint(result1).getMetadataHandle(), data1); validateContents(storage1.resolveCheckpoint(result2).getMetadataHandle(), data2); validateContents(storage2.resolveCheckpoint(result1).getMetadataHandle(), data1); validateContents(storage2.resolveCheckpoint(result2).getMetadataHandle(), data2); } @Test public void writeToAlreadyExistingCheckpointFails() throws Exception { final byte[] data = {8, 8, 4, 5, 2, 6, 3}; final long checkpointId = 177; final CheckpointStorageAccess storage = createCheckpointStorage(randomTempPath()); storage.initializeBaseLocationsForCheckpoint(); final CheckpointStorageLocation loc = storage.initializeLocationForCheckpoint(checkpointId); // write to the metadata file for the checkpoint try (CheckpointMetadataOutputStream out = loc.createMetadataOutputStream()) { out.write(data); out.closeAndFinalizeCheckpoint(); } // create another writer to the metadata file for the checkpoint try { loc.createMetadataOutputStream(); fail("this should fail with an exception"); } catch (IOException ignored) { } } // ------------------------------------------------------------------------ // savepoints // ------------------------------------------------------------------------ @Test public void testSavepointPathConfiguredAndTarget() throws Exception { final Path savepointDir = randomTempPath(); final Path customDir = randomTempPath(); testSavepoint(savepointDir, customDir, customDir); } @Test public void testSavepointPathConfiguredNoTarget() throws Exception { final Path savepointDir = randomTempPath(); testSavepoint(savepointDir, null, savepointDir); } @Test public void testNoSavepointPathConfiguredAndTarget() throws Exception { final Path customDir = Path.fromLocalFile(tmp.newFolder()); testSavepoint(null, customDir, customDir); } @Test public void testNoSavepointPathConfiguredNoTarget() throws Exception { final CheckpointStorageAccess storage = createCheckpointStorage(randomTempPath()); try { storage.initializeLocationForSavepoint(1337, null); fail("this should fail with an exception"); } catch (IllegalArgumentException ignored) { } } private void testSavepoint( @Nullable Path savepointDir, @Nullable Path customDir, Path expectedParent) throws Exception { final CheckpointStorageAccess storage = savepointDir == null ? createCheckpointStorage(randomTempPath()) : createCheckpointStorageWithSavepointDir(randomTempPath(), savepointDir); final String customLocation = customDir == null ? null : customDir.toString(); final CheckpointStorageLocation savepointLocation = storage.initializeLocationForSavepoint(52452L, customLocation); final byte[] data = {77, 66, 55, 99, 88}; final CompletedCheckpointStorageLocation completed; try (CheckpointMetadataOutputStream out = savepointLocation.createMetadataOutputStream()) { out.write(data); completed = out.closeAndFinalizeCheckpoint(); } // we need to do this step to make sure we have a slash (or not) in the same way as the // expected path has it final Path normalizedWithSlash = Path.fromLocalFile( new File(new Path(completed.getExternalPointer()).getParent().getPath())); assertEquals(expectedParent, normalizedWithSlash); validateContents(completed.getMetadataHandle(), data); // validate that the correct directory was used FileStateHandle fileStateHandle = (FileStateHandle) completed.getMetadataHandle(); // we need to recreate that path in the same way as the "expected path" (via File and URI) // to make // sure the either both use '/' suffixes, or neither uses them (a bit of an annoying // ambiguity) Path usedSavepointDir = new Path( new File(fileStateHandle.getFilePath().getParent().getParent().getPath()) .toURI()); assertEquals(expectedParent, usedSavepointDir); } // ------------------------------------------------------------------------ // utilities // ------------------------------------------------------------------------ public Path randomTempPath() throws IOException { return Path.fromLocalFile(tmp.newFolder()); } private static void validateContents(StreamStateHandle handle, byte[] expected) throws IOException { try (FSDataInputStream in = handle.openInputStream()) { validateContents(in, expected); } } private static void validateContents(InputStream in, byte[] expected) throws IOException { final byte[] buffer = new byte[expected.length]; int pos = 0; int remaining = expected.length; while (remaining > 0) { int read = in.read(buffer, pos, remaining); if (read == -1) { throw new EOFException(); } pos += read; remaining -= read; } assertArrayEquals(expected, buffer); } }
apache-2.0
trekawek/jackrabbit-oak
oak-core/src/test/java/org/apache/jackrabbit/oak/security/authorization/composite/CompositeProviderCoverageTest.java
20007
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.security.authorization.composite; import java.security.Principal; import java.util.List; import java.util.Set; import javax.jcr.Session; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.api.JackrabbitSession; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Root; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.plugins.tree.TreeLocation; import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration; import org.apache.jackrabbit.oak.spi.security.authorization.permission.AggregatedPermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.permission.RepositoryPermission; import org.apache.jackrabbit.oak.spi.security.authorization.permission.TreePermission; import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; /** * Test the {@link org.apache.jackrabbit.oak.security.authorization.composite.CompositePermissionProvider} * where permissions are granted but not all permissions/privileges have been * covered by the call. This might be the case when combining different * providers that don't cover the full set of permissions/privileges. * * For simplicity the test only lists a single provider which only supports a * limited set of permissions|privileges. * * The expected result is: * - testing for the supported privileges|permissions must reveal that it is granted * - any other combination of permissions|privileges must be denied. */ public class CompositeProviderCoverageTest extends AbstractCompositeProviderTest { private CompositePermissionProvider cpp; private CompositePermissionProvider cppO; @Override public void before() throws Exception { super.before(); cpp = createPermissionProvider(); cppO = createPermissionProviderOR(); } @Override AggregatedPermissionProvider getTestPermissionProvider() { return new LimitCoverageProvider(root); } @Override List<AggregatedPermissionProvider> getAggregatedProviders(@NotNull String workspaceName, @NotNull AuthorizationConfiguration config, @NotNull Set<Principal> principals) { return ImmutableList.of(getTestPermissionProvider()); } @Override @Test public void testGetTreePermissionInstance() throws Exception { PermissionProvider pp = createPermissionProvider(EveryonePrincipal.getInstance()); TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = pp.getTreePermission(readOnlyRoot.getTree(path), parentPermission); assertTrue(tp instanceof LimitedTreePermission); parentPermission = tp; } } @Override @Test public void testGetTreePermissionInstanceOR() throws Exception { PermissionProvider pp = createPermissionProviderOR(EveryonePrincipal.getInstance()); TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = pp.getTreePermission(readOnlyRoot.getTree(path), parentPermission); assertTrue(tp instanceof LimitedTreePermission); parentPermission = tp; } } @Override @Test public void testTreePermissionGetChild() throws Exception { List<String> childNames = ImmutableList.of("test", "a", "b", "c", "nonexisting"); Tree rootTree = readOnlyRoot.getTree(ROOT_PATH); NodeState ns = getTreeProvider().asNodeState(rootTree); TreePermission tp = createPermissionProvider().getTreePermission(rootTree, TreePermission.EMPTY); for (String cName : childNames) { ns = ns.getChildNode(cName); tp = tp.getChildPermission(cName, ns); assertTrue(tp instanceof LimitedTreePermission); } } @Override @Test public void testTreePermissionGetChildOR() throws Exception { List<String> childNames = ImmutableList.of("test", "a", "b", "c", "nonexisting"); Tree rootTree = readOnlyRoot.getTree(ROOT_PATH); NodeState ns = getTreeProvider().asNodeState(rootTree); TreePermission tp = createPermissionProviderOR().getTreePermission(rootTree, TreePermission.EMPTY); for (String cName : childNames) { ns = ns.getChildNode(cName); tp = tp.getChildPermission(cName, ns); assertTrue(tp instanceof LimitedTreePermission); } } @Test public void testGetPrivileges() throws Exception { for (String p : NODE_PATHS) { assertEquals(ImmutableSet.of(REP_READ_NODES), cpp.getPrivileges(readOnlyRoot.getTree(p))); assertEquals(ImmutableSet.of(REP_READ_NODES), cppO.getPrivileges(readOnlyRoot.getTree(p))); } } @Test public void testGetPrivilegesOnRepo() throws Exception { assertEquals(ImmutableSet.of(JCR_NAMESPACE_MANAGEMENT), cpp.getPrivileges(null)); assertEquals(ImmutableSet.of(JCR_NAMESPACE_MANAGEMENT), cppO.getPrivileges(null)); } @Test public void testHasPrivileges() throws Exception { for (String p : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(p); assertTrue(cpp.hasPrivileges(tree, REP_READ_NODES)); assertFalse(cpp.hasPrivileges(tree, JCR_READ)); assertFalse(cpp.hasPrivileges(tree, JCR_WRITE)); assertFalse(cpp.hasPrivileges(tree, JCR_ALL)); assertTrue(cppO.hasPrivileges(tree, REP_READ_NODES)); assertFalse(cppO.hasPrivileges(tree, JCR_READ)); assertFalse(cppO.hasPrivileges(tree, JCR_WRITE)); assertFalse(cppO.hasPrivileges(tree, JCR_ALL)); } } @Test public void testHasPrivilegesOnRepo() throws Exception { assertTrue(cpp.hasPrivileges(null, JCR_NAMESPACE_MANAGEMENT)); assertFalse(cpp.hasPrivileges(null, JCR_NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(cpp.hasPrivileges(null, JCR_ALL)); assertTrue(cppO.hasPrivileges(null, JCR_NAMESPACE_MANAGEMENT)); assertFalse(cppO.hasPrivileges(null, JCR_NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(cppO.hasPrivileges(null, JCR_ALL)); } @Test public void testIsGranted() throws Exception { for (String p : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(p); assertTrue(cpp.isGranted(tree, null, Permissions.READ_NODE)); assertFalse(cpp.isGranted(tree, null, Permissions.LOCK_MANAGEMENT)); assertFalse(cpp.isGranted(tree, null, Permissions.ALL)); assertFalse(cpp.isGranted(tree, null, Permissions.READ_NODE | Permissions.LOCK_MANAGEMENT)); assertTrue(cppO.isGranted(tree, null, Permissions.READ_NODE)); assertFalse(cppO.isGranted(tree, null, Permissions.LOCK_MANAGEMENT)); assertFalse(cppO.isGranted(tree, null, Permissions.ALL)); assertFalse(cppO.isGranted(tree, null, Permissions.READ_NODE | Permissions.LOCK_MANAGEMENT)); } } @Test public void testIsGrantedProperty() throws Exception { for (String p : NODE_PATHS) { Tree tree = readOnlyRoot.getTree(p); assertTrue(cpp.isGranted(tree, PROPERTY_STATE, Permissions.READ_NODE)); assertFalse(cpp.isGranted(tree, PROPERTY_STATE, Permissions.READ_PROPERTY)); assertFalse(cpp.isGranted(tree, PROPERTY_STATE, Permissions.LOCK_MANAGEMENT)); assertFalse(cpp.isGranted(tree, PROPERTY_STATE, Permissions.ALL)); assertFalse(cpp.isGranted(tree, PROPERTY_STATE, Permissions.READ_NODE | Permissions.LOCK_MANAGEMENT)); assertTrue(cppO.isGranted(tree, PROPERTY_STATE, Permissions.READ_NODE)); assertFalse(cppO.isGranted(tree, PROPERTY_STATE, Permissions.READ_PROPERTY)); assertFalse(cppO.isGranted(tree, PROPERTY_STATE, Permissions.LOCK_MANAGEMENT)); assertFalse(cppO.isGranted(tree, PROPERTY_STATE, Permissions.ALL)); assertFalse(cppO.isGranted(tree, PROPERTY_STATE, Permissions.READ_NODE | Permissions.LOCK_MANAGEMENT)); } } @Test public void testIsGrantedAction() throws Exception { for (String nodePath : NODE_PATHS) { String propPath = PathUtils.concat(nodePath, JcrConstants.JCR_PRIMARYTYPE); assertTrue(cpp.isGranted(nodePath, Session.ACTION_READ)); assertFalse(cpp.isGranted(propPath, Session.ACTION_READ)); assertFalse(cpp.isGranted(nodePath, Session.ACTION_REMOVE)); assertFalse(cpp.isGranted(propPath, JackrabbitSession.ACTION_MODIFY_PROPERTY)); assertFalse(cpp.isGranted(nodePath, getActionString(JackrabbitSession.ACTION_MODIFY_ACCESS_CONTROL, JackrabbitSession.ACTION_READ_ACCESS_CONTROL))); assertTrue(cppO.isGranted(nodePath, Session.ACTION_READ)); assertFalse(cppO.isGranted(propPath, Session.ACTION_READ)); assertFalse(cppO.isGranted(nodePath, Session.ACTION_REMOVE)); assertFalse(cppO.isGranted(propPath, JackrabbitSession.ACTION_MODIFY_PROPERTY)); assertFalse(cppO.isGranted(nodePath, getActionString(JackrabbitSession.ACTION_MODIFY_ACCESS_CONTROL, JackrabbitSession.ACTION_READ_ACCESS_CONTROL))); String nonExisting = PathUtils.concat(nodePath, "nonExisting"); assertFalse(cpp.isGranted(nonExisting, Session.ACTION_READ)); assertFalse(cpp.isGranted(nonExisting, JackrabbitSession.ACTION_ADD_PROPERTY)); assertFalse(cpp.isGranted(nonExisting, Session.ACTION_ADD_NODE)); assertFalse(cppO.isGranted(nonExisting, Session.ACTION_READ)); assertFalse(cppO.isGranted(nonExisting, JackrabbitSession.ACTION_ADD_PROPERTY)); assertFalse(cppO.isGranted(nonExisting, Session.ACTION_ADD_NODE)); } } @Test public void testRepositoryPermissionsIsGranted() throws Exception { RepositoryPermission rp = cpp.getRepositoryPermission(); assertTrue(rp.isGranted(Permissions.NAMESPACE_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(rp.isGranted(Permissions.ALL)); RepositoryPermission rpO = cppO.getRepositoryPermission(); assertTrue(rpO.isGranted(Permissions.NAMESPACE_MANAGEMENT)); assertFalse(rpO.isGranted(Permissions.NODE_TYPE_DEFINITION_MANAGEMENT)); assertFalse(rpO.isGranted(Permissions.ALL)); } @Test public void testTreePermissionIsGranted() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = cpp.getTreePermission(readOnlyRoot.getTree(path), parentPermission); assertTrue(tp.isGranted(Permissions.READ_NODE)); assertFalse(tp.isGranted(Permissions.REMOVE_NODE)); assertFalse(tp.isGranted(Permissions.READ)); assertFalse(tp.isGranted(Permissions.ALL)); parentPermission = tp; } } @Test public void testTreePermissionIsGrantedOR() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = cppO.getTreePermission(readOnlyRoot.getTree(path), parentPermission); assertTrue(tp.isGranted(Permissions.READ_NODE)); assertFalse(tp.isGranted(Permissions.REMOVE_NODE)); assertFalse(tp.isGranted(Permissions.READ)); assertFalse(tp.isGranted(Permissions.ALL)); parentPermission = tp; } } @Test public void testTreePermissionIsGrantedProperty() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = cpp.getTreePermission(readOnlyRoot.getTree(path), parentPermission); assertFalse(tp.isGranted(Permissions.READ_PROPERTY, PROPERTY_STATE)); assertFalse(tp.isGranted(Permissions.REMOVE_PROPERTY, PROPERTY_STATE)); assertFalse(tp.isGranted(Permissions.READ, PROPERTY_STATE)); assertFalse(tp.isGranted(Permissions.ALL, PROPERTY_STATE)); parentPermission = tp; } } @Test public void testTreePermissionIsGrantedPropertyOR() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { TreePermission tp = cppO.getTreePermission(readOnlyRoot.getTree(path), parentPermission); assertFalse(tp.isGranted(Permissions.READ_PROPERTY, PROPERTY_STATE)); assertFalse(tp.isGranted(Permissions.REMOVE_PROPERTY, PROPERTY_STATE)); assertFalse(tp.isGranted(Permissions.READ, PROPERTY_STATE)); assertFalse(tp.isGranted(Permissions.ALL, PROPERTY_STATE)); parentPermission = tp; } } @Test public void testTreePermissionCanRead() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { Tree t = readOnlyRoot.getTree(path); TreePermission tp = cpp.getTreePermission(t, parentPermission); assertTrue(tp.canRead()); parentPermission = tp; } } @Test public void testTreePermissionCanReadOR() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { Tree t = readOnlyRoot.getTree(path); TreePermission tp = cppO.getTreePermission(t, parentPermission); assertTrue(tp.canRead()); parentPermission = tp; } } @Test public void testTreePermissionCanReadProperty() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { Tree t = readOnlyRoot.getTree(path); TreePermission tp = cpp.getTreePermission(t, parentPermission); assertFalse(tp.canRead(PROPERTY_STATE)); parentPermission = tp; } } @Test public void testTreePermissionCanReadPropertyOR() throws Exception { TreePermission parentPermission = TreePermission.EMPTY; for (String path : TP_PATHS) { Tree t = readOnlyRoot.getTree(path); TreePermission tp = cppO.getTreePermission(t, parentPermission); assertFalse(tp.canRead(PROPERTY_STATE)); parentPermission = tp; } } private final class LimitCoverageProvider extends AbstractAggrProvider { LimitCoverageProvider(Root root) { super(root); } @NotNull @Override public PrivilegeBits supportedPrivileges(@Nullable Tree tree, @Nullable PrivilegeBits privilegeBits) { PrivilegeBits supported = PrivilegeBits.getInstance( PrivilegeBits.BUILT_IN.get(JCR_NAMESPACE_MANAGEMENT), PrivilegeBits.BUILT_IN.get(REP_READ_NODES)); if (privilegeBits != null) { return PrivilegeBits.getInstance(privilegeBits).retain(supported); } else { return supported; } } @Override public long supportedPermissions(@Nullable Tree tree, @Nullable PropertyState property, long permissions) { if (tree == null) { return permissions & Permissions.NAMESPACE_MANAGEMENT; } else { return permissions & Permissions.READ_NODE; } } @Override public long supportedPermissions(@NotNull TreeLocation location, long permissions) { return permissions & Permissions.READ_NODE; } @Override public long supportedPermissions(@NotNull TreePermission treePermission, @Nullable PropertyState property, long permissions) { return permissions & Permissions.READ_NODE; } @Override public boolean isGranted(@NotNull TreeLocation location, long permissions) { return permissions == Permissions.READ_NODE; } @NotNull @Override public Set<String> getPrivileges(@Nullable Tree tree) { return (tree == null) ? ImmutableSet.of(JCR_NAMESPACE_MANAGEMENT) : ImmutableSet.of(REP_READ_NODES); } @Override public boolean hasPrivileges(@Nullable Tree tree, @NotNull String... privilegeNames) { return true; } @NotNull @Override public RepositoryPermission getRepositoryPermission() { return new RepositoryPermission() { @Override public boolean isGranted(long repositoryPermissions) { return Permissions.NAMESPACE_MANAGEMENT == repositoryPermissions; } }; } @NotNull @Override public TreePermission getTreePermission(@NotNull Tree tree, @NotNull TreePermission parentPermission) { return new LimitedTreePermission(); } @Override public boolean isGranted(@NotNull Tree tree, @Nullable PropertyState property, long permissions) { return permissions == Permissions.READ_NODE; } @Override public boolean isGranted(@NotNull String oakPath, @NotNull String jcrActions) { return true; } } private static final class LimitedTreePermission implements TreePermission { @NotNull @Override public TreePermission getChildPermission(@NotNull String childName, @NotNull NodeState childState) { return this; } @Override public boolean canRead() { return true; } @Override public boolean canRead(@NotNull PropertyState property) { return false; } @Override public boolean canReadAll() { return false; } @Override public boolean canReadProperties() { return false; } @Override public boolean isGranted(long permissions) { return Permissions.READ_NODE == permissions; } @Override public boolean isGranted(long permissions, @NotNull PropertyState property) { return false; } } }
apache-2.0
MichaelEvans/assertj-android
assertj-android/src/main/java/org/assertj/android/api/view/WindowAssert.java
3985
package org.assertj.android.api.view; import android.annotation.TargetApi; import android.view.View; import android.view.Window; import org.assertj.core.api.AbstractAssert; import static android.os.Build.VERSION_CODES.HONEYCOMB; import static android.view.Window.FEATURE_ACTION_BAR; import static android.view.Window.FEATURE_ACTION_BAR_OVERLAY; import static android.view.Window.FEATURE_ACTION_MODE_OVERLAY; import static android.view.Window.FEATURE_CONTEXT_MENU; import static android.view.Window.FEATURE_CUSTOM_TITLE; import static android.view.Window.FEATURE_INDETERMINATE_PROGRESS; import static android.view.Window.FEATURE_LEFT_ICON; import static android.view.Window.FEATURE_NO_TITLE; import static android.view.Window.FEATURE_OPTIONS_PANEL; import static android.view.Window.FEATURE_PROGRESS; import static android.view.Window.FEATURE_RIGHT_ICON; import static android.view.Window.FEATURE_SWIPE_TO_DISMISS; import static org.assertj.android.internal.IntegerUtils.buildNamedValueString; import static org.assertj.core.api.Assertions.assertThat; /** Assertions for {@link Window} instances. */ public class WindowAssert extends AbstractAssert<WindowAssert, Window> { public WindowAssert(Window actual) { super(actual, WindowAssert.class); } public WindowAssert hasCurrentFocus(View view) { isNotNull(); View actualView = actual.getCurrentFocus(); assertThat(actualView) // .overridingErrorMessage("Expected current focused view <%s> but was <%s>.", view, actualView) // .isSameAs(view); return this; } public WindowAssert hasChildren() { isNotNull(); assertThat(actual.hasChildren()) // .overridingErrorMessage("Expected children but had no children.") // .isTrue(); return this; } public WindowAssert hasNoChildren() { isNotNull(); assertThat(actual.hasChildren()) // .overridingErrorMessage("Expected no children but had children.") // .isFalse(); return this; } @TargetApi(HONEYCOMB) public WindowAssert hasFeature(@WindowFeature int feature) { isNotNull(); //noinspection ResourceType assertThat(actual.hasFeature(feature)) // .overridingErrorMessage("Expected feature <%s> but was not present.", featureToString(feature)) // .isTrue(); return this; } public WindowAssert isActive() { isNotNull(); assertThat(actual.isActive()) // .overridingErrorMessage("Expected to be active but was not active.") // .isTrue(); return this; } public WindowAssert isNotActive() { isNotNull(); assertThat(actual.isActive()) // .overridingErrorMessage("Expected to not be active but was active.") // .isFalse(); return this; } public WindowAssert isFloating() { isNotNull(); assertThat(actual.isFloating()) // .overridingErrorMessage("Expected to be floating but was not floating.") // .isTrue(); return this; } public WindowAssert isNotFloating() { isNotNull(); assertThat(actual.isFloating()) // .overridingErrorMessage("Expected to not be floating but was floating.") // .isFalse(); return this; } public static String featureToString(@WindowFeature int feature) { return buildNamedValueString(feature) .value(FEATURE_ACTION_BAR, "actionBar") .value(FEATURE_ACTION_BAR_OVERLAY, "actionBarOverlay") .value(FEATURE_ACTION_MODE_OVERLAY, "actionModeOverlay") .value(FEATURE_CONTEXT_MENU, "contextMenu") .value(FEATURE_CUSTOM_TITLE, "customTitle") .value(FEATURE_INDETERMINATE_PROGRESS, "indeterminateProgress") .value(FEATURE_LEFT_ICON, "leftIcon") .value(FEATURE_NO_TITLE, "noTitle") .value(FEATURE_OPTIONS_PANEL, "optionalPanel") .value(FEATURE_PROGRESS, "progress") .value(FEATURE_RIGHT_ICON, "rightIcon") .value(FEATURE_SWIPE_TO_DISMISS, "swipeToDismiss") .get(); } }
apache-2.0
lincoln-lil/flink
flink-tests/src/test/java/org/apache/flink/test/state/StatefulOperatorChainedTaskTest.java
10989
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.test.state; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.common.typeutils.base.LongSerializer; import org.apache.flink.api.common.typeutils.base.StringSerializer; import org.apache.flink.configuration.Configuration; import org.apache.flink.runtime.checkpoint.CheckpointMetaData; import org.apache.flink.runtime.checkpoint.CheckpointOptions; import org.apache.flink.runtime.checkpoint.JobManagerTaskRestore; import org.apache.flink.runtime.checkpoint.TaskStateSnapshot; import org.apache.flink.runtime.jobgraph.OperatorID; import org.apache.flink.runtime.operators.testutils.MockInputSplitProvider; import org.apache.flink.runtime.state.StateInitializationContext; import org.apache.flink.runtime.state.StateSnapshotContext; import org.apache.flink.runtime.state.TestTaskStateManager; import org.apache.flink.runtime.util.TestingTaskManagerRuntimeInfo; import org.apache.flink.streaming.api.operators.AbstractStreamOperator; import org.apache.flink.streaming.api.operators.OneInputStreamOperator; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.runtime.tasks.OneInputStreamTask; import org.apache.flink.streaming.runtime.tasks.OneInputStreamTaskTestHarness; import org.apache.flink.streaming.runtime.tasks.StreamMockEnvironment; import org.apache.flink.streaming.util.TestHarnessUtil; import org.junit.Before; import org.junit.Test; import org.junit.rules.TemporaryFolder; import java.io.File; import java.io.IOException; import java.util.Arrays; import java.util.HashSet; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentLinkedQueue; import static org.apache.flink.configuration.CheckpointingOptions.CHECKPOINTS_DIRECTORY; import static org.apache.flink.configuration.CheckpointingOptions.INCREMENTAL_CHECKPOINTS; import static org.apache.flink.configuration.StateBackendOptions.STATE_BACKEND; import static org.junit.Assert.assertEquals; /** Test for StatefulOperatorChainedTaskTest. */ public class StatefulOperatorChainedTaskTest { private static final Set<OperatorID> RESTORED_OPERATORS = ConcurrentHashMap.newKeySet(); private TemporaryFolder temporaryFolder; @Before public void setup() throws IOException { RESTORED_OPERATORS.clear(); temporaryFolder = new TemporaryFolder(); temporaryFolder.create(); } @Test public void testMultipleStatefulOperatorChainedSnapshotAndRestore() throws Exception { OperatorID headOperatorID = new OperatorID(42L, 42L); OperatorID tailOperatorID = new OperatorID(44L, 44L); JobManagerTaskRestore restore = createRunAndCheckpointOperatorChain( headOperatorID, new CounterOperator("head"), tailOperatorID, new CounterOperator("tail"), Optional.empty()); TaskStateSnapshot stateHandles = restore.getTaskStateSnapshot(); assertEquals(2, stateHandles.getSubtaskStateMappings().size()); createRunAndCheckpointOperatorChain( headOperatorID, new CounterOperator("head"), tailOperatorID, new CounterOperator("tail"), Optional.of(restore)); assertEquals( new HashSet<>(Arrays.asList(headOperatorID, tailOperatorID)), RESTORED_OPERATORS); } private JobManagerTaskRestore createRunAndCheckpointOperatorChain( OperatorID headId, OneInputStreamOperator<String, String> headOperator, OperatorID tailId, OneInputStreamOperator<String, String> tailOperator, Optional<JobManagerTaskRestore> restore) throws Exception { File localRootDir = temporaryFolder.newFolder(); final OneInputStreamTaskTestHarness<String, String> testHarness = new OneInputStreamTaskTestHarness<>( OneInputStreamTask::new, 1, 1, BasicTypeInfo.STRING_TYPE_INFO, BasicTypeInfo.STRING_TYPE_INFO, localRootDir); testHarness .setupOperatorChain(headId, headOperator) .chain(tailId, tailOperator, StringSerializer.INSTANCE, true) .finish(); if (restore.isPresent()) { JobManagerTaskRestore taskRestore = restore.get(); testHarness.setTaskStateSnapshot( taskRestore.getRestoreCheckpointId(), taskRestore.getTaskStateSnapshot()); } StreamMockEnvironment environment = new StreamMockEnvironment( testHarness.jobConfig, testHarness.taskConfig, testHarness.getExecutionConfig(), testHarness.memorySize, new MockInputSplitProvider(), testHarness.bufferSize, testHarness.getTaskStateManager()); Configuration configuration = new Configuration(); configuration.setString(STATE_BACKEND.key(), "rocksdb"); File file = temporaryFolder.newFolder(); configuration.setString(CHECKPOINTS_DIRECTORY.key(), file.toURI().toString()); configuration.setString(INCREMENTAL_CHECKPOINTS.key(), "true"); environment.setTaskManagerInfo( new TestingTaskManagerRuntimeInfo( configuration, System.getProperty("java.io.tmpdir").split(",|" + File.pathSeparator))); testHarness.invoke(environment); testHarness.waitForTaskRunning(); OneInputStreamTask<String, String> streamTask = testHarness.getTask(); processRecords(testHarness); triggerCheckpoint(testHarness, streamTask); TestTaskStateManager taskStateManager = testHarness.getTaskStateManager(); JobManagerTaskRestore jobManagerTaskRestore = new JobManagerTaskRestore( taskStateManager.getReportedCheckpointId(), taskStateManager.getLastJobManagerTaskStateSnapshot()); testHarness.endInput(); testHarness.waitForTaskCompletion(); return jobManagerTaskRestore; } private void triggerCheckpoint( OneInputStreamTaskTestHarness<String, String> testHarness, OneInputStreamTask<String, String> streamTask) throws Exception { long checkpointId = 1L; CheckpointMetaData checkpointMetaData = new CheckpointMetaData(checkpointId, 1L); testHarness.getTaskStateManager().getWaitForReportLatch().reset(); while (!streamTask .triggerCheckpointAsync( checkpointMetaData, CheckpointOptions.forCheckpointWithDefaultLocation()) .get()) {} testHarness.getTaskStateManager().getWaitForReportLatch().await(); long reportedCheckpointId = testHarness.getTaskStateManager().getReportedCheckpointId(); assertEquals(checkpointId, reportedCheckpointId); } private void processRecords(OneInputStreamTaskTestHarness<String, String> testHarness) throws Exception { ConcurrentLinkedQueue<Object> expectedOutput = new ConcurrentLinkedQueue<>(); testHarness.processElement(new StreamRecord<>("10"), 0, 0); testHarness.processElement(new StreamRecord<>("20"), 0, 0); testHarness.processElement(new StreamRecord<>("30"), 0, 0); testHarness.waitForInputProcessing(); expectedOutput.add(new StreamRecord<>("10")); expectedOutput.add(new StreamRecord<>("20")); expectedOutput.add(new StreamRecord<>("30")); TestHarnessUtil.assertOutputEquals( "Output was not correct.", expectedOutput, testHarness.getOutput()); } private abstract static class RestoreWatchOperator<IN, OUT> extends AbstractStreamOperator<OUT> implements OneInputStreamOperator<IN, OUT> { @Override public void initializeState(StateInitializationContext context) throws Exception { if (context.isRestored()) { RESTORED_OPERATORS.add(getOperatorID()); } } } /** Operator that counts processed messages and keeps result on state. */ private static class CounterOperator extends RestoreWatchOperator<String, String> { private static final long serialVersionUID = 2048954179291813243L; private static long snapshotOutData = 0L; private ValueState<Long> counterState; private long counter = 0; private String prefix; CounterOperator(String prefix) { this.prefix = prefix; } @Override public void processElement(StreamRecord<String> element) throws Exception { counter++; output.collect(element); } @Override public void initializeState(StateInitializationContext context) throws Exception { super.initializeState(context); counterState = context.getKeyedStateStore() .getState( new ValueStateDescriptor<>( prefix + "counter-state", LongSerializer.INSTANCE)); // set key manually to make RocksDBListState get the serialized key. setCurrentKey("10"); if (context.isRestored()) { counter = counterState.value(); assertEquals(snapshotOutData, counter); counterState.clear(); } } @Override public void snapshotState(StateSnapshotContext context) throws Exception { counterState.update(counter); snapshotOutData = counter; } } }
apache-2.0
dahlstrom-g/intellij-community
platform/lang-impl/src/com/intellij/psi/impl/source/tree/injected/InjectedCaret.java
7344
// Copyright 2000-2017 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.psi.impl.source.tree.injected; import com.intellij.injected.editor.EditorWindow; import com.intellij.openapi.editor.*; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.ProperTextRange; import com.intellij.openapi.util.TextRange; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; public class InjectedCaret implements Caret { private final EditorWindow myEditorWindow; final Caret myDelegate; InjectedCaret(EditorWindow window, Caret delegate) { myEditorWindow = window; myDelegate = delegate; } @NotNull @Override public Editor getEditor() { return myEditorWindow; } @NotNull @Override public CaretModel getCaretModel() { return myEditorWindow.getCaretModel(); } public Caret getDelegate() { return myDelegate; } @Override public boolean isValid() { return myDelegate.isValid(); } @Override public void moveCaretRelatively(int columnShift, int lineShift, boolean withSelection, boolean scrollToCaret) { myDelegate.moveCaretRelatively(columnShift, lineShift, withSelection, scrollToCaret); } @Override public void moveToLogicalPosition(@NotNull LogicalPosition pos) { LogicalPosition hostPos = myEditorWindow.injectedToHost(pos); myDelegate.moveToLogicalPosition(hostPos); } @Override public void moveToVisualPosition(@NotNull VisualPosition pos) { LogicalPosition hostPos = myEditorWindow.injectedToHost(myEditorWindow.visualToLogicalPosition(pos)); myDelegate.moveToLogicalPosition(hostPos); } @Override public void moveToOffset(int offset) { moveToOffset(offset, false); } @Override public void moveToOffset(int offset, boolean locateBeforeSoftWrap) { int hostOffset = myEditorWindow.getDocument().injectedToHost(offset); myDelegate.moveToOffset(hostOffset, locateBeforeSoftWrap); } @Override public boolean isUpToDate() { return myDelegate.isUpToDate(); } @NotNull @Override public LogicalPosition getLogicalPosition() { LogicalPosition hostPos = myDelegate.getLogicalPosition(); return myEditorWindow.hostToInjected(hostPos); } @NotNull @Override public VisualPosition getVisualPosition() { LogicalPosition logicalPosition = getLogicalPosition(); return myEditorWindow.logicalToVisualPosition(logicalPosition); } @Override public int getOffset() { return myEditorWindow.getDocument().hostToInjected(myDelegate.getOffset()); } @Override public int getVisualLineStart() { return myEditorWindow.getDocument().hostToInjected(myDelegate.getVisualLineStart()); } @Override public int getVisualLineEnd() { return myEditorWindow.getDocument().hostToInjected(myDelegate.getVisualLineEnd()); } @Override public int getSelectionStart() { return myEditorWindow.getDocument().hostToInjected(myDelegate.getSelectionStart()); } @NotNull @Override public VisualPosition getSelectionStartPosition() { return myDelegate.getSelectionStartPosition(); } @Override public int getSelectionEnd() { return myEditorWindow.getDocument().hostToInjected(myDelegate.getSelectionEnd()); } @NotNull @Override public VisualPosition getSelectionEndPosition() { return myDelegate.getSelectionEndPosition(); } @Nullable @Override public String getSelectedText() { return myDelegate.getSelectedText(); } @Override public int getLeadSelectionOffset() { return myEditorWindow.getDocument().hostToInjected(myDelegate.getLeadSelectionOffset()); } @NotNull @Override public VisualPosition getLeadSelectionPosition() { return myDelegate.getLeadSelectionPosition(); } @Override public boolean hasSelection() { return myDelegate.hasSelection(); } @Override public void setSelection(int startOffset, int endOffset) { TextRange hostRange = myEditorWindow.getDocument().injectedToHost(new ProperTextRange(startOffset, endOffset)); myDelegate.setSelection(hostRange.getStartOffset(), hostRange.getEndOffset()); } @Override public void setSelection(int startOffset, int endOffset, boolean updateSystemSelection) { TextRange hostRange = myEditorWindow.getDocument().injectedToHost(new ProperTextRange(startOffset, endOffset)); myDelegate.setSelection(hostRange.getStartOffset(), hostRange.getEndOffset(), updateSystemSelection); } @Override public void setSelection(int startOffset, @Nullable VisualPosition endPosition, int endOffset) { TextRange hostRange = myEditorWindow.getDocument().injectedToHost(new ProperTextRange(startOffset, endOffset)); myDelegate.setSelection(hostRange.getStartOffset(), endPosition, hostRange.getEndOffset()); } @Override public void setSelection(@Nullable VisualPosition startPosition, int startOffset, @Nullable VisualPosition endPosition, int endOffset) { TextRange hostRange = myEditorWindow.getDocument().injectedToHost(new ProperTextRange(startOffset, endOffset)); myDelegate.setSelection(startPosition, hostRange.getStartOffset(), endPosition, hostRange.getEndOffset()); } @Override public void setSelection(@Nullable VisualPosition startPosition, int startOffset, @Nullable VisualPosition endPosition, int endOffset, boolean updateSystemSelection) { TextRange hostRange = myEditorWindow.getDocument().injectedToHost(new ProperTextRange(startOffset, endOffset)); myDelegate.setSelection(startPosition, hostRange.getStartOffset(), endPosition, hostRange.getEndOffset(), updateSystemSelection); } @Override public void removeSelection() { myDelegate.removeSelection(); } @Override public void selectLineAtCaret() { myDelegate.selectLineAtCaret(); } @Override public void selectWordAtCaret(boolean honorCamelWordsSettings) { myDelegate.selectWordAtCaret(honorCamelWordsSettings); } @Nullable @Override public Caret clone(boolean above) { Caret clone = myDelegate.clone(above); return clone == null ? null : new InjectedCaret(myEditorWindow, clone); } @Override public void dispose() { //noinspection SSBasedInspection myDelegate.dispose(); } @NotNull @Override public <T> T putUserDataIfAbsent(@NotNull Key<T> key, @NotNull T value) { return myDelegate.putUserDataIfAbsent(key, value); } @Override public <T> boolean replace(@NotNull Key<T> key, @Nullable T oldValue, @Nullable T newValue) { return myDelegate.replace(key, oldValue, newValue); } @Nullable @Override public <T> T getUserData(@NotNull Key<T> key) { return myDelegate.getUserData(key); } @Override public <T> void putUserData(@NotNull Key<T> key, @Nullable T value) { myDelegate.putUserData(key, value); } @Override public boolean isAtRtlLocation() { return myDelegate.isAtRtlLocation(); } @Override public boolean isAtBidiRunBoundary() { return myDelegate.isAtBidiRunBoundary(); } @NotNull @Override public CaretVisualAttributes getVisualAttributes() { return myDelegate.getVisualAttributes(); } @Override public void setVisualAttributes(@NotNull CaretVisualAttributes attributes) { myDelegate.setVisualAttributes(attributes); } }
apache-2.0
markcoble/droolsjbpm-integration
kie-osgi/kie-osgi-integration/src/main/test/org/kie/osgi/compiler/OsgiKieModuleTest.java
4248
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kie.osgi.compiler; import org.assertj.core.api.Assertions; import org.junit.Assert; import org.junit.Test; import org.kie.api.KieServices; import org.kie.api.builder.ReleaseId; import org.mockito.Mockito; import org.osgi.framework.Bundle; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Enumeration; import java.util.List; public class OsgiKieModuleTest { @Test public void testAcceptsValidOsgiBundleURLs() { // Felix format assertAcceptsStringAsOsgiBundleUrL("bundle://something"); // Equinox format assertAcceptsStringAsOsgiBundleUrL("bundleresource://something"); // Equinox + blueprint format assertAcceptsStringAsOsgiBundleUrL("bundleentry://something"); // other possible formats starting with "bundle" assertAcceptsStringAsOsgiBundleUrL("bundle-something://something-else"); } @Test public void testRejectsInvalidOsgiBundleUrls() { assertRejectsStringAsOsgiBundleUrRL("something://else"); assertRejectsStringAsOsgiBundleUrRL("mybundle://invalid"); } @Test public void testParsingOfBundleIdFromOsgiURL() { // Felix bundle URL assertBundleIdCorrectlyParsed("bundle://130.0:1/", "130"); // Equinox bundle URL assertBundleIdCorrectlyParsed("bundleresource://151.fwk495985218/", "151"); // invalid bundle URL results in "null" assertBundleIdCorrectlyParsed("invalid-bundle-url", null); } @Test public void testGatherResourcesFromWARBundle() throws Exception { KieServices kieServices = KieServices.Factory.get(); Bundle bundleMock = Mockito.mock(Bundle.class); final List<URL> urls = new ArrayList<URL>(); // we only care about the path, the file does not have to exist // can't mock java.net.URL, because it is a final class urls.add(new URL("file:///META-INF/kmodule.xml")); urls.add(new URL("file:///WEB-INF/classes/org/org.kie/osgi/SomePOJO.class")); urls.add(new URL("file:///WEB-INF/classes/org/org.kie/osgi/some-process.bpmn2")); Enumeration<URL> resourcesEnumMock = new Enumeration<URL>() { int currentIndex = 0; @Override public boolean hasMoreElements() { return currentIndex < urls.size(); } @Override public URL nextElement() { return urls.get(currentIndex++); } }; Mockito.when(bundleMock.findEntries("", "*", true)).thenReturn(resourcesEnumMock); ReleaseId releaseId = kieServices.newReleaseId("org.org.kie.osgi.compiler", "osgi-kie-module-test", "1.0.0.Final"); OsgiKieModule kmodule = OsgiKieModule.create(releaseId, null, bundleMock); Collection<String> fileNames = kmodule.getFileNames(); Assertions.assertThat(fileNames).hasSize(3); Assertions.assertThat(fileNames).contains("META-INF/kmodule.xml", "org/drools/osgi/SomePOJO.class", "org/drools/osgi/some-process.bpmn2"); } private void assertAcceptsStringAsOsgiBundleUrL(String str) { Assert.assertTrue("String '" + str + "' not recognized as OSGi bundle URL!", OsgiKieModule.isOsgiBundleUrl(str)); } private void assertRejectsStringAsOsgiBundleUrRL(String str) { Assert.assertFalse("Invalid string '" + str + "' recognized as OSGi bundle URL!", OsgiKieModule.isOsgiBundleUrl(str)); } private void assertBundleIdCorrectlyParsed(String bundleUrl, String expectedBundleId) { Assert.assertEquals(expectedBundleId, OsgiKieModule.parseBundleId(bundleUrl)); } }
apache-2.0
openweave/openweave-core
third_party/android/platform-libcore/android-platform-libcore/luni/src/main/java/java/awt/font/TextAttribute.java
10893
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @author Ilya S. Okomin * @version $Revision$ */ package java.awt.font; import java.io.InvalidObjectException; import java.text.AttributedCharacterIterator.Attribute; import java.util.HashMap; import java.util.Map; /** * The TextAttribute class defines attribute keys and attribute values * for text rendering. Each TextAttributes should have the following * information: * <ul> * <li>key name;</li> * <li>valid values;</li> * <li>relevant constants;</li> * <li>default action if the attribute is absent;</li> * <li>default action's description.</li> * </ul> */ public final class TextAttribute extends Attribute { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 7744112784117861702L; // set of available text attributes /** The Constant attrMap. */ private static final Map<String, TextAttribute> attrMap = new HashMap<String, TextAttribute>(); /** * Instantiates a new TextAttribute with the specified name. * * @param name the name of TextAttribute. */ protected TextAttribute(String name) { super(name); attrMap.put(name, this); } /** * Resolves the instance being deserialized. * * @return the Object. * * @throws InvalidObjectException the InvalidObjectException. */ @Override protected Object readResolve() throws InvalidObjectException { TextAttribute result = attrMap.get(this.getName()); if (result != null) { return result; } throw new InvalidObjectException("Unknown attribute name"); } /** * The BACKGROUND text attribute. */ public static final TextAttribute BACKGROUND = new TextAttribute("background"); /** The BIDI_EMBEDDING text attribute key. */ public static final TextAttribute BIDI_EMBEDDING = new TextAttribute("bidi_embedding"); /** The CHAR_REPLACEMENT text attribute key. */ public static final TextAttribute CHAR_REPLACEMENT = new TextAttribute("char_replacement"); /** The FAMILY text attribute key. */ public static final TextAttribute FAMILY = new TextAttribute("family"); /** The FONT text attribute key. */ public static final TextAttribute FONT = new TextAttribute("font"); /** The FOREGROUND text attribute key. */ public static final TextAttribute FOREGROUND = new TextAttribute("foreground"); /** The INPUT_METHOD_HIGHLIGHT text attribute key. */ public static final TextAttribute INPUT_METHOD_HIGHLIGHT = new TextAttribute( "input method highlight"); /** The INPUT_METHOD_UNDERLINE text attribute key. */ public static final TextAttribute INPUT_METHOD_UNDERLINE = new TextAttribute( "input method underline"); /** The JUSTIFICATION text attribute key. */ public static final TextAttribute JUSTIFICATION = new TextAttribute("justification"); /** The Constant JUSTIFICATION_indicates the full requested width. */ public static final Float JUSTIFICATION_FULL = new Float(1.0f); /** * The Constant JUSTIFICATION_NONE indicates that line is not * allowed for justification. */ public static final Float JUSTIFICATION_NONE = new Float(0.0f); /** * The KERNING text attribute key. * * @since 1.6 */ public static final TextAttribute KERNING = new TextAttribute("kerning"); /** @since 1.6 */ public static final Integer KERNING_ON = 1; /** * The LIGATURES text attribute key. * * @since 1.6 */ public static final TextAttribute LIGATURES = new TextAttribute("ligatures"); /** @since 1.6 */ public static final Integer LIGATURES_ON = 1; /** The NUMERIC_SHAPING text attribute. */ public static final TextAttribute NUMERIC_SHAPING = new TextAttribute("numeric_shaping"); /** The POSTURE text attribute. */ public static final TextAttribute POSTURE = new TextAttribute("posture"); /** The Constant POSTURE_REGULAR indicates regular posture. */ public static final Float POSTURE_REGULAR = new Float(0.0f); /** The Constant POSTURE_OBLIQUE indicates italic posture. */ public static final Float POSTURE_OBLIQUE = new Float(0.20f); /** The RUN_DIRECTION text attribute. */ public static final TextAttribute RUN_DIRECTION = new TextAttribute("run_direction"); /** * The Constant RUN_DIRECTION_LTR indicates left-to-right run * direction. */ public static final Boolean RUN_DIRECTION_LTR = false; /** * The Constant RUN_DIRECTION_RTL indicates right-to-left run * direction. */ public static final Boolean RUN_DIRECTION_RTL = true; /** The SIZE text attribute. */ public static final TextAttribute SIZE = new TextAttribute("size"); /** The STRIKETHROUGH text attribute. */ public static final TextAttribute STRIKETHROUGH = new TextAttribute("strikethrough"); /** The Constant STRIKETHROUGH_ON indicates a single strikethrough. */ public static final Boolean STRIKETHROUGH_ON = true; /** The SUPERSCRIPT text attribute. */ public static final TextAttribute SUPERSCRIPT = new TextAttribute("superscript"); /** The Constant SUPERSCRIPT_SUB indicates a standard subscript. */ public static final Integer SUPERSCRIPT_SUB = -1; /** The Constant SUPERSCRIPT_SUPER indicates a standard superscript. */ public static final Integer SUPERSCRIPT_SUPER = 1; /** The SWAP_COLORS text attribute. */ public static final TextAttribute SWAP_COLORS = new TextAttribute("swap_colors"); /** * The Constant SWAP_COLORS_ON indicates a swap of foreground * and background. */ public static final Boolean SWAP_COLORS_ON = true; /** * The TRACKING text attribute key. * * @since 1.6 */ public static final TextAttribute TRACKING = new TextAttribute("tracking"); /** @since 1.6 */ public static final Float TRACKING_LOOSE = -1f; /** @since 1.6 */ public static final Float TRACKING_TIGHT = -1f; /** The TRANSFORM text attribute. */ public static final TextAttribute TRANSFORM = new TextAttribute("transform"); /** The Constant UNDERLINE text attribute. */ public static final TextAttribute UNDERLINE = new TextAttribute("underline"); /** * The Constant UNDERLINE_ON indicates a standard underline * at the roman baseline for roman text. */ public static final Integer UNDERLINE_ON = 0; /** * The Constant UNDERLINE_LOW_ONE_PIXEL indicates a single * pixel solid low underline. */ public static final Integer UNDERLINE_LOW_ONE_PIXEL = 1; /** * The Constant UNDERLINE_LOW_TWO_PIXEL indicates a double * pixel solid low underline. */ public static final Integer UNDERLINE_LOW_TWO_PIXEL = 2; /** * The Constant UNDERLINE_LOW_DOTTED indicates a * single pixel dotted low underline. */ public static final Integer UNDERLINE_LOW_DOTTED = 3; /** * The Constant UNDERLINE_LOW_GRAY indicates double pixel * gray low underline. */ public static final Integer UNDERLINE_LOW_GRAY = 4; /** * The Constant UNDERLINE_LOW_DASHED indicates single pixel dashed * low underline. */ public static final Integer UNDERLINE_LOW_DASHED = 5; /** The WEIGHT text attribute. */ public static final TextAttribute WEIGHT = new TextAttribute("weight"); /** * The Constant WEIGHT_EXTRA_LIGHT indicates the lightest * predefined weight. */ public static final Float WEIGHT_EXTRA_LIGHT = new Float(0.5f); /** * The Constant WEIGHT_LIGHT indicates the standard light weight. */ public static final Float WEIGHT_LIGHT = new Float(0.75f); /** * The Constant WEIGHT_DEMILIGHT indicates an intermediate weight * between LIGHT and STANDARD. */ public static final Float WEIGHT_DEMILIGHT = new Float(0.875f); /** * The Constant WEIGHT_REGULAR indicates the standart weight. */ public static final Float WEIGHT_REGULAR = new Float(1.0f); /** * The Constant WEIGHT_SEMIBOLD indicates a semi weight * of REGULAR. */ public static final Float WEIGHT_SEMIBOLD = new Float(1.25f); /** The Constant WEIGHT_MEDIUM indicates average weight * between the REGULAR and BOLD. */ public static final Float WEIGHT_MEDIUM = new Float(1.5f); /** The Constant WEIGHT_DEMIBOLD indicates * a lighter weight than BOLD. */ public static final Float WEIGHT_DEMIBOLD = new Float(1.75f); /** The Constant WEIGHT_BOLD indicates the standard bold weight. */ public static final Float WEIGHT_BOLD = new Float(2.0f); /** The Constant WEIGHT_HEAVY indicates a heavier weight than BOLD. */ public static final Float WEIGHT_HEAVY = new Float(2.25f); /** The Constant WEIGHT_EXTRABOLD indicates an extra heavy weight. */ public static final Float WEIGHT_EXTRABOLD = new Float(2.5f); /** * The Constant WEIGHT_ULTRABOLD indicates the heaviest predefined * weight. */ public static final Float WEIGHT_ULTRABOLD = new Float(2.75f); /** The WIDTH text attribute. */ public static final TextAttribute WIDTH = new TextAttribute("width"); /** * The Constant WIDTH_CONDENSED indicates the most condensed * predefined width. */ public static final Float WIDTH_CONDENSED = new Float(0.75f); /** * The Constant WIDTH_SEMI_CONDENSED indicates * a semi condensed width. */ public static final Float WIDTH_SEMI_CONDENSED = new Float(0.875f); /** * The Constant WIDTH_REGULAR indicates the standard width. */ public static final Float WIDTH_REGULAR = new Float(1.0f); /** * The Constant WIDTH_SEMI_EXTENDED indicates semi extended width. */ public static final Float WIDTH_SEMI_EXTENDED = new Float(1.25f); /** * The Constant WIDTH_EXTENDED indicates extended width. */ public static final Float WIDTH_EXTENDED = new Float(1.5f); }
apache-2.0