repo_name
stringlengths
5
108
path
stringlengths
6
333
size
stringlengths
1
6
content
stringlengths
4
977k
license
stringclasses
15 values
AstromechZA/PLYMeshSplitterJava
src/org/uct/cs/simplify/gui/cropper/ConvexHullCalculator.java
3729
package org.uct.cs.simplify.gui.cropper; import java.awt.geom.Point2D; import java.util.ArrayList; public class ConvexHullCalculator { public static ArrayList<Point2D> quickHull(ArrayList<Point2D> candidates) { ArrayList<Point2D> points = (ArrayList<Point2D>) candidates.clone(); if (candidates.size() < 3) return points; ArrayList<Point2D> convexHull = new ArrayList<>(); // find extremals int minPoint = -1, maxPoint = -1; double minX = Integer.MAX_VALUE; double maxX = Integer.MIN_VALUE; for (int i = 0; i < points.size(); i++) { if (points.get(i).getX() < minX) { minX = points.get(i).getX(); minPoint = i; } if (points.get(i).getX() > maxX) { maxX = points.get(i).getX(); maxPoint = i; } } Point2D A = points.get(minPoint); Point2D B = points.get(maxPoint); convexHull.add(A); convexHull.add(B); points.remove(A); points.remove(B); ArrayList<Point2D> leftSet = new ArrayList<>(); ArrayList<Point2D> rightSet = new ArrayList<>(); for (int i = 0; i < points.size(); i++) { Point2D p = points.get(i); if (pointLocation(A, B, p) == -1) leftSet.add(p); else rightSet.add(p); } hullSet(A, B, rightSet, convexHull); hullSet(B, A, leftSet, convexHull); return convexHull; } public static int pointLocation(Point2D A, Point2D B, Point2D P) { double cp1 = (B.getX() - A.getX()) * (P.getY() - A.getY()) - (B.getY() - A.getY()) * (P.getX() - A.getX()); return (cp1 > 0) ? 1 : -1; } public static double distance(Point2D A, Point2D B, Point2D C) { double ABx = B.getX() - A.getX(); double ABy = B.getY() - A.getY(); double num = ABx * (A.getY() - C.getY()) - ABy * (A.getX() - C.getX()); if (num < 0) num = -num; return num; } public static void hullSet(Point2D A, Point2D B, ArrayList<Point2D> set, ArrayList<Point2D> hull) { int insertPosition = hull.indexOf(B); if (set.size() == 0) return; if (set.size() == 1) { Point2D p = set.get(0); set.remove(p); hull.add(insertPosition, p); return; } double dist = Integer.MIN_VALUE; int furthestPoint = -1; for (int i = 0; i < set.size(); i++) { Point2D p = set.get(i); double distance = distance(A, B, p); if (distance > dist) { dist = distance; furthestPoint = i; } } Point2D P = set.get(furthestPoint); set.remove(furthestPoint); hull.add(insertPosition, P); // Determine who's to the left of AP ArrayList<Point2D> leftSetAP = new ArrayList<>(); for (int i = 0; i < set.size(); i++) { Point2D M = set.get(i); if (pointLocation(A, P, M) == 1) { //set.remove(M); leftSetAP.add(M); } } // Determine who's to the left of PB ArrayList<Point2D> leftSetPB = new ArrayList<>(); for (int i = 0; i < set.size(); i++) { Point2D M = set.get(i); if (pointLocation(P, B, M) == 1) { //set.remove(M); leftSetPB.add(M); } } hullSet(A, P, leftSetAP, hull); hullSet(P, B, leftSetPB, hull); } }
apache-2.0
China-ls/wechat4java
src/main/java/weixin/popular/bean/shakearound/user/getshakeinfo/UserGetShakeInfoResultData.java
2164
/** * */ package weixin.popular.bean.shakearound.user.getshakeinfo; import com.google.gson.annotations.SerializedName; /** * 微信摇一摇周边-获取设备及用户信息-响应参数-设备及用户信息 * * @author Moyq5 * @date 2016年7月30日 */ public class UserGetShakeInfoResultData { /** * 摇周边页面唯一ID */ @SerializedName("page_id") private Integer pageId; /** * 设备信息,包括UUID、major、minor,以及距离 */ @SerializedName("beacon_info") private UserGetShakeInfoResultDataBeaconInfo beaconInfo; /** * 商户AppID下用户的唯一标识 */ @SerializedName("openid") private String openId; /** * 门店ID,有的话则返回,反之不会在JSON格式内 */ @SerializedName("poi_id") private Integer poiId; /** * @return 摇周边页面唯一ID */ public Integer getPageId() { return pageId; } /** * @param pageId 摇周边页面唯一ID */ public void setPageId(Integer pageId) { this.pageId = pageId; } /** * 设备信息,包括UUID、major、minor,以及距离 * * @return 设备信息 */ public UserGetShakeInfoResultDataBeaconInfo getBeaconInfo() { return beaconInfo; } /** * 设备信息,包括UUID、major、minor,以及距离 * * @param beaconInfo 设备信息 */ public void setBeaconInfo(UserGetShakeInfoResultDataBeaconInfo beaconInfo) { this.beaconInfo = beaconInfo; } /** * @return 商户AppID下用户的唯一标识 */ public String getOpenId() { return openId; } /** * @param openId 商户AppID下用户的唯一标识 */ public void setOpenId(String openId) { this.openId = openId; } /** * 门店ID,有的话则返回,反之不会在JSON格式内 * * @return 门店ID */ public Integer getPoiId() { return poiId; } /** * @param poiId 门店ID */ public void setPoiId(Integer poiId) { this.poiId = poiId; } }
apache-2.0
SmartInfrastructures/dreamer
apps/icona/src/main/java/org/onosproject/icona/BFSTree.java
2817
package org.onosproject.icona; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import org.onosproject.icona.store.Cluster; import org.onosproject.icona.store.IconaStoreService; import org.onosproject.icona.store.InterLink; public class BFSTree { LinkedList<Cluster> clusterQueue = new LinkedList<>(); HashSet<String> clusterSearched = new HashSet<>(); HashMap<String, InterLink> upstreamInterLinks = new HashMap<>(); HashMap<String, InterClusterPath> interClusterPaths = new HashMap<>(); Cluster rootCluster; IconaStoreService iconaStoreService; // PathIntentMap intents = null; // double bandwidth = 0.0; // 0.0 means no limit for bandwidth (normal BFS // tree) public BFSTree(Cluster rootCluster, IconaStoreService iconaStoreService, InterLink primaryIL) { this.rootCluster = rootCluster; this.iconaStoreService = iconaStoreService; calcTree(primaryIL); } protected final void calcTree(InterLink primaryIL) { clusterQueue.add(rootCluster); clusterSearched.add(rootCluster.getClusterName()); while (!clusterQueue.isEmpty()) { Cluster cluster = clusterQueue.poll(); for (InterLink interLink : cluster.getInterLinks()) { // If the primary IL is not null, remove from the tree all the // ILs that are using the same // switches if (primaryIL == null || (!primaryIL.src().deviceId() .equals(interLink.src().deviceId()) && !primaryIL .dst().deviceId() .equals(interLink.dst().deviceId()))) { String reachedCluster = interLink.dstClusterName(); if (clusterSearched.contains(reachedCluster)) { continue; } // if (intents != null && // intents.getAvailableBandwidth(link) < bandwidth) { // continue; // } clusterQueue.add(iconaStoreService .getCluster(reachedCluster)); clusterSearched.add(reachedCluster); upstreamInterLinks.put(reachedCluster, interLink); } } } } public InterClusterPath getPath(Cluster leafCluster) { InterClusterPath interClusterPath = interClusterPaths.get(leafCluster .getClusterName()); if (interClusterPath == null && clusterSearched.contains(leafCluster .getClusterName())) { interClusterPath = new InterClusterPath(); String cluster = leafCluster.getClusterName(); while (!cluster.equals(rootCluster.getClusterName())) { InterLink upstreamLink = upstreamInterLinks.get(cluster); interClusterPath.addInterlinks(upstreamLink); cluster = upstreamLink.srcClusterName(); } interClusterPaths.put(leafCluster.getClusterName(), interClusterPath); } return interClusterPath; } }
apache-2.0
freeVM/freeVM
enhanced/archive/classlib/java6/modules/text/src/test/java/org/apache/harmony/text/tests/java/text/DecimalFormatTest.java
69330
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.text.tests.java.text; import java.io.ObjectInputStream; import java.math.BigDecimal; import java.math.BigInteger; import java.text.AttributedCharacterIterator; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; import java.text.FieldPosition; import java.text.NumberFormat; import java.text.ParsePosition; import java.util.Currency; import java.util.Locale; import junit.framework.TestCase; import org.apache.harmony.testframework.serialization.SerializationTest; import tests.support.Support_BitSet; import tests.support.Support_DecimalFormat; public class DecimalFormatTest extends TestCase { public void testAttributedCharacterIterator() throws Exception { // Regression for http://issues.apache.org/jira/browse/HARMONY-333 AttributedCharacterIterator iterator = new DecimalFormat().formatToCharacterIterator(new Integer(1)); assertNotNull(iterator); assertFalse("attributes should exist", iterator.getAttributes().isEmpty()); } /* * Test the getter and setter of parseBigDecimal and parseIntegerOnly and * test the default value of them. */ public void test_isParseBigDecimalLjava_lang_Boolean_isParseIntegerOnlyLjava_lang_Boolean() { // parseBigDecimal default to false DecimalFormat form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); assertFalse(form.isParseBigDecimal()); form.setParseBigDecimal(true); assertTrue(form.isParseBigDecimal()); form.setParseBigDecimal(false); assertFalse(form.isParseBigDecimal()); // parseIntegerOnly default to false assertFalse(form.isParseIntegerOnly()); } // Test the type of the returned object public void test_parseLjava_lang_String_Ljava_text_ParsePosition() { DecimalFormat form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); Number number = form.parse("23.1", new ParsePosition(0)); assertTrue(number instanceof Double); // Test parsed object of type double when // parseBigDecimal is set to true form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); number = form.parse("23.1", new ParsePosition(0)); assertTrue(number instanceof Double); form.setParseBigDecimal(true); number = form.parse("23.1", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); assertEquals(new BigDecimal("23.1"), number); // When parseIntegerOnly set to true, all float numbers will be parsed // into Long. // With the exception that, the value is out of the bound of Long or // some special values such as NaN or Infinity. form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); form.setParseIntegerOnly(true); number = form.parse("23.1f", new ParsePosition(0)); assertTrue(number instanceof Long); number = form.parse("23.0", new ParsePosition(0)); assertTrue(number instanceof Long); number = form.parse("-0.0", new ParsePosition(0)); assertTrue(number instanceof Long); assertTrue(new Long(0).equals(number)); number = form.parse("-9,223,372,036,854,775,8080.00", new ParsePosition(0)); assertTrue(number instanceof Double); // Even if parseIntegerOnly is set to true, NaN will be parsed to Double form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); form.setParseIntegerOnly(true); DecimalFormatSymbols symbols = new DecimalFormatSymbols(); number = form.parse(symbols.getNaN(), new ParsePosition(0)); assertTrue(number instanceof Double); // Even if parseIntegerOnly is set to true, Infinity will still be // parsed to Double form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); form.setParseIntegerOnly(true); symbols = new DecimalFormatSymbols(); number = form.parse(symbols.getInfinity(), new ParsePosition(0)); assertTrue(number instanceof Double); // ParseBigDecimal take precedence of parseBigInteger form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); form.setParseIntegerOnly(true); form.setParseBigDecimal(true); number = form.parse("23.1f", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); number = form.parse("23.0", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); number = form.parse("-9,223,372,036,854,775,8080.00", new ParsePosition(0)); assertFalse(number instanceof BigInteger); assertTrue(number instanceof BigDecimal); // Test whether the parsed object is of type float. (To be specific, // they are of type Double) form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); number = form.parse("23.1f", new ParsePosition(0)); assertTrue(number instanceof Double); form.setParseBigDecimal(true); number = form.parse("23.1f", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); assertEquals(new BigDecimal("23.1"), number); // Integer will be parsed to Long, unless parseBigDecimal is set to true form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); number = form.parse("123", new ParsePosition(0)); assertTrue(number instanceof Long); form.setParseBigDecimal(true); number = form.parse("123", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); assertEquals(new BigDecimal("123"), number); // NaN will be parsed to Double, no matter parseBigDecimal set or not. form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); symbols = new DecimalFormatSymbols(); number = form.parse(symbols.getNaN() + "", new ParsePosition(0)); assertTrue(number instanceof Double); form.setParseBigDecimal(true); number = form.parse(symbols.getNaN() + "", new ParsePosition(0)); assertTrue(number instanceof Double); // Infinity will be parsed to Double, no matter parseBigDecimal set or // not. form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); symbols = new DecimalFormatSymbols(); number = form.parse(symbols.getInfinity(), new ParsePosition(0)); assertTrue(number instanceof Double); assertEquals("Infinity", number.toString()); // When set bigDecimal to true, the result of parsing infinity form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); symbols = new DecimalFormatSymbols(); form.setParseBigDecimal(true); number = form.parse(symbols.getInfinity(), new ParsePosition(0)); assertTrue(number instanceof Double); assertEquals("Infinity", number.toString()); // Negative infinity will be parsed to double no matter parseBigDecimal // set or not form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); symbols = new DecimalFormatSymbols(); number = form.parse("-" + symbols.getInfinity(), new ParsePosition(0)); assertTrue(number instanceof Double); assertEquals("-Infinity", number.toString()); // When set bigDecimal to true, the result of parsing minus infinity form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); symbols = new DecimalFormatSymbols(); form.setParseBigDecimal(true); number = form.parse("-" + symbols.getInfinity(), new ParsePosition(0)); assertTrue(number instanceof Double); assertEquals("-Infinity", number.toString()); // -0.0 will be parsed to different type according to the combination of // parseBigDecimal and parseIntegerOnly form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); // parseBigDecimal == true; // parseIntegerOnly == false; form.setParseBigDecimal(true); number = form.parse("-0", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); number = form.parse("-0.0", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); // parseBigDecimal == false; // parseIntegerOnly == true; form.setParseBigDecimal(false); form.setParseIntegerOnly(true); number = form.parse("-0", new ParsePosition(0)); assertTrue(number instanceof Long); number = form.parse("-0.0", new ParsePosition(0)); assertTrue(number instanceof Long); // parseBigDecimal == false; // parseIntegerOnly == false; form.setParseBigDecimal(false); form.setParseIntegerOnly(false); number = form.parse("-0", new ParsePosition(0)); assertTrue(number instanceof Double); number = form.parse("-0.0", new ParsePosition(0)); assertTrue(number instanceof Double); // parseBigDecimal == true; // parseIntegerOnly == true; // parseBigDecimal take precedence of parseBigInteger form.setParseBigDecimal(true); form.setParseIntegerOnly(true); number = form.parse("-0", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); number = form.parse("-0.0", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); number = form.parse("12.4", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); // When parseBigDecimal is set to false, no matter how massive the // mantissa part of a number is, the number will be parsed into Double form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); number = form.parse("9,223,372,036,854,775,808.00", new ParsePosition(0)); assertTrue(number instanceof Double); assertEquals("9.223372036854776E18", number.toString()); number = form.parse("-9,223,372,036,854,775,8080.00", new ParsePosition(0)); assertTrue(number instanceof Double); assertEquals("-9.223372036854776E19", number.toString()); // When parseBigDecimal is set to true, if mantissa part of number // exceeds Long.MAX_VALUE, the number will be parsed into BigDecimal form = (DecimalFormat) DecimalFormat.getInstance(Locale.US); form.setParseBigDecimal(true); number = form.parse("9,223,372,036,854,775,808.00", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); assertEquals(9.223372036854776E18, number.doubleValue(), 0); number = form.parse("-9,223,372,036,854,775,8080.00", new ParsePosition(0)); assertTrue(number instanceof BigDecimal); assertEquals(-9.223372036854776E19, number.doubleValue(), 0); // The minimum value of Long will be parsed to Long when parseBigDecimal // is not set ParsePosition pos = new ParsePosition(0); DecimalFormat df = new DecimalFormat(); pos = new ParsePosition(0); Number nb = df.parse("" + Long.MIN_VALUE, pos); assertTrue(nb instanceof Long); // The maximum value of Long will be parsed to Long when parseBigDecimal // is set pos = new ParsePosition(0); df = new DecimalFormat(); pos = new ParsePosition(0); nb = df.parse("" + Long.MAX_VALUE, pos); assertTrue(nb instanceof Long); // When parsing invalid string( which is neither consist of digits nor // NaN/Infinity), a null will be returned. pos = new ParsePosition(0); df = new DecimalFormat(); try { nb = df.parse("invalid", pos); assertNull(nb); } catch (NullPointerException e) { fail("Should not throw NPE"); } } public void test_getMaximumFractionDigits() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; // getMaximumFractionDigits of NumberFormat default to 3 // getMaximumFractionDigits of DecimalFormat default to 3 assertEquals(3, nform.getMaximumFractionDigits()); assertEquals(3, form.getMaximumFractionDigits()); // Greater than 340 (critical number used to distinguish // BigInteger and BigDecimal) nform.setMaximumFractionDigits(500); assertEquals(500, nform.getMaximumFractionDigits()); assertEquals(500, form.getMaximumFractionDigits()); form.setMaximumFractionDigits(500); assertEquals(500, nform.getMaximumFractionDigits()); assertEquals(500, form.getMaximumFractionDigits()); form.format(12.3); assertEquals(500, nform.getMaximumFractionDigits()); assertEquals(500, form.getMaximumFractionDigits()); } public void test_getMinimumFractionDigits() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; // getMinimumFractionDigits from NumberFormat (default to 0) // getMinimumFractionDigits from DecimalFormat (default to 0) assertEquals(0, nform.getMinimumFractionDigits()); assertEquals(0, form.getMinimumFractionDigits()); // Greater than 340 (critical number used to distinguish // BigInteger and BigDecimal) nform.setMinimumFractionDigits(500); assertEquals(500, nform.getMinimumFractionDigits()); assertEquals(500, form.getMinimumFractionDigits()); form.setMaximumFractionDigits(400); assertEquals(400, nform.getMinimumFractionDigits()); assertEquals(400, form.getMinimumFractionDigits()); } //FIXME This test fails on Harmony ClassLibrary public void test_getMaximumIntegerDigits() { final int maxIntDigit = 309; // When use default locale, in this case zh_CN // the returned instance of NumberFormat is a DecimalFormat DecimalFormat form = new DecimalFormat("00.###E0"); assertEquals(2, form.getMaximumIntegerDigits()); NumberFormat nform = DecimalFormat.getInstance(Locale.US); form = null; if (nform instanceof DecimalFormat) { form = (DecimalFormat) nform; } // Greater than 309 (critical number used to distinguish // BigInteger and BigDecimal) nform.setMaximumIntegerDigits(500); assertEquals(500, nform.getMaximumIntegerDigits()); assertEquals(500, form.getMaximumIntegerDigits()); form = new DecimalFormat("00.###E0"); assertEquals(2, form.getMaximumIntegerDigits()); form.setMaximumIntegerDigits(500); assertEquals(500, nform.getMaximumIntegerDigits()); assertEquals(500, form.getMaximumIntegerDigits()); form.format(12.3); assertEquals(500, nform.getMaximumIntegerDigits()); assertEquals(500, form.getMaximumIntegerDigits()); nform = DecimalFormat.getInstance(Locale.US); form = null; if (nform instanceof DecimalFormat) { form = (DecimalFormat) nform; } // getMaximumIntegerDigits from NumberFormat default to 309 // getMaximumIntegerDigits from DecimalFormat default to 309 // the following 2 assertions will fail on RI implementation, since the // implementation of ICU and RI are not identical. RI does not give // DecimalFormat an initial bound about its maximumIntegerDigits // (default to Integer.MAX_VALUE: 2147483647 ) assertEquals(maxIntDigit, nform.getMaximumIntegerDigits()); assertEquals(maxIntDigit, form.getMaximumIntegerDigits()); // regression test for HARMONY-878 assertTrue(new DecimalFormat("0\t0").getMaximumIntegerDigits() > 0); } public void test_getMinimumIntegerDigits() { final int minIntDigit = 1; NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; // getMaximumIntegerDigits from NumberFormat (default to 1) // getMaximumIntegerDigits from DecimalFormat (default to 1) assertEquals(minIntDigit, nform.getMinimumIntegerDigits()); assertEquals(minIntDigit, form.getMinimumIntegerDigits()); // Greater than 309 (critical number used to distinguish // BigInteger and BigDecimal) nform.setMinimumIntegerDigits(500); assertEquals(500, nform.getMinimumIntegerDigits()); assertEquals(500, form.getMinimumIntegerDigits()); form.setMaximumIntegerDigits(400); assertEquals(400, nform.getMinimumIntegerDigits()); assertEquals(400, form.getMinimumIntegerDigits()); } public void test_formatLjava_lang_Obj_Ljava_StringBuffer_Ljava_text_FieldPosition() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; // If Object(including null) is not of type Number, // IllegalArgumentException will be thrown out try { form.format(new Object(), new StringBuffer(), new FieldPosition(0)); fail("Should throw IAE"); } catch (IllegalArgumentException e) { // expected } try { form.format(null, new StringBuffer(), new FieldPosition(0)); fail("Should throw IAE"); } catch (IllegalArgumentException e) { // expected } // When StringBuffer == null || FieldPosition == null // NullPointerException will be thrown out. try { form.format(new Double(1.9), null, new FieldPosition(0)); fail("Should throw NPE"); } catch (NullPointerException e) { // expected } try { form.format(new Double(1.3), new StringBuffer(), null); fail("Should throw NPE"); } catch (NullPointerException e) { // expected } try { form.format(new Double(1.4), null, null); fail("Should throw NPE"); } catch (NullPointerException e) { // expected } try { form.format(new Object(), null, null); fail("Should throw IllegalArgumentException"); } catch (IllegalArgumentException e) { // expected } FieldPosition pos; StringBuffer out; DecimalFormat format = (DecimalFormat) NumberFormat .getInstance(Locale.US); // format maxLong pos = new FieldPosition(0); out = format.format(new Long(Long.MAX_VALUE), new StringBuffer(), pos); assertTrue("Wrong result L1: " + out, out.toString().equals( "9,223,372,036,854,775,807")); // format minLong pos = new FieldPosition(0); out = format.format(new Long(Long.MIN_VALUE), new StringBuffer(), pos); assertTrue("Wrong result L2: " + out, out.toString().equals( "-9,223,372,036,854,775,808")); // format maxLong of type BigInteger pos = new FieldPosition(0); out = format.format(new java.math.BigInteger(String .valueOf(Long.MAX_VALUE)), new StringBuffer(), pos); assertTrue("Wrong result BI1: " + out, out.toString().equals( "9,223,372,036,854,775,807")); // format minLong of type BigInteger pos = new FieldPosition(0); out = format.format(new java.math.BigInteger(String .valueOf(Long.MIN_VALUE)), new StringBuffer(), pos); assertTrue("Wrong result BI2: " + out, out.toString().equals( "-9,223,372,036,854,775,808")); // format maxLong + 1 java.math.BigInteger big; pos = new FieldPosition(0); big = new java.math.BigInteger(String.valueOf(Long.MAX_VALUE)) .add(new java.math.BigInteger("1")); out = format.format(big, new StringBuffer(), pos); assertTrue("Wrong result BI3: " + out, out.toString().equals( "9,223,372,036,854,775,808")); // format minLong - 1 pos = new FieldPosition(0); big = new java.math.BigInteger(String.valueOf(Long.MIN_VALUE)) .add(new java.math.BigInteger("-1")); out = format.format(big, new StringBuffer(), pos); assertTrue("Wrong result BI4: " + out, out.toString().equals( "-9,223,372,036,854,775,809")); // format big decimal pos = new FieldPosition(0); out = format.format(new java.math.BigDecimal("51.348"), new StringBuffer(), pos); assertTrue("Wrong result BD1: " + out, out.toString().equals("51.348")); // format big decimal pos = new FieldPosition(0); out = format.format(new java.math.BigDecimal("51"), new StringBuffer(), pos); assertTrue("Wrong result BD2: " + out, out.toString().equals("51")); // format big decimal Double.MAX_VALUE * 2 java.math.BigDecimal bigDecimal; pos = new FieldPosition(0); final String doubleMax2 = "359,538,626,972,463,141,629,054,847,463,408," + "713,596,141,135,051,689,993,197,834,953,606,314,521,560,057,077," + "521,179,117,265,533,756,343,080,917,907,028,764,928,468,642,653," + "778,928,365,536,935,093,407,075,033,972,099,821,153,102,564,152," + "490,980,180,778,657,888,151,737,016,910,267,884,609,166,473,806," + "445,896,331,617,118,664,246,696,549,595,652,408,289,446,337,476," + "354,361,838,599,762,500,808,052,368,249,716,736"; bigDecimal = new BigDecimal(Double.MAX_VALUE).add(new BigDecimal( Double.MAX_VALUE)); out = format.format(bigDecimal, new StringBuffer(), pos); assertTrue("Wrong result BDmax2: " + out, out.toString().equals( doubleMax2)); // format big decimal Double.MIN_VALUE + Double.MIN_VALUE // and Double.MIN_VALUE - Double.MIN_VALUE pos = new FieldPosition(0); bigDecimal = new BigDecimal(Double.MIN_VALUE).add(new BigDecimal( Double.MIN_VALUE)); out = format.format(bigDecimal, new StringBuffer(), pos); bigDecimal = new BigDecimal(Float.MAX_VALUE).add(new BigDecimal( Float.MAX_VALUE)); out = format.format(bigDecimal, new StringBuffer(), pos); final String BDFloatMax2 = "680,564,693,277,057,719,623,408,366,969,033,850,880"; assertTrue("Wrong result BDFloatMax2: " + out, out.toString().equals( BDFloatMax2)); // format big decimal Float.MIN_VALUE + Float.MIN_VALUE // and Float.MIN_VALUE - Float.MIN_VALUE bigDecimal = new BigDecimal(Float.MIN_VALUE).add(new BigDecimal( Float.MIN_VALUE)); out = format.format(bigDecimal, new StringBuffer(), pos); final String BDFloatMin2 = "0"; bigDecimal = new BigDecimal(Float.MIN_VALUE).subtract(new BigDecimal( Float.MIN_VALUE)); out = format.format(bigDecimal, new StringBuffer(), pos); assertTrue("Wrong result BDFloatMax2: " + out, out.toString().equals( BDFloatMin2)); } public void test_setMaximumFractionDigitsLjava_lang_Integer() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; form.setMaximumFractionDigits(-2); assertEquals(0, form.getMaximumFractionDigits()); form.setMaximumFractionDigits(341); assertEquals(341, form.getMaximumFractionDigits()); } public void test_setMinimumFractionDigitsLjava_lang_Integer() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; form.setMinimumFractionDigits(-3); assertEquals(0, form.getMinimumFractionDigits()); form.setMinimumFractionDigits(310); assertEquals(310, form.getMinimumFractionDigits()); } public void test_setMaximumIntegerDigitsLjava_lang_Integer() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; form.setMaximumIntegerDigits(-3); assertEquals(0, form.getMaximumIntegerDigits()); form.setMaximumIntegerDigits(310); assertEquals(310, form.getMaximumIntegerDigits()); } public void test_setMinimumIntegerDigitsLjava_lang_Integer() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; form.setMinimumIntegerDigits(-3); assertEquals(0, form.getMinimumIntegerDigits()); form.setMinimumIntegerDigits(310); assertEquals(310, form.getMinimumIntegerDigits()); } // When MaxFractionDigits is set first and less than MinFractionDigits, max // will be changed to min value public void test_setMinimumFactionDigitsLjava_lang_Integer_setMaximumFractionDigitsLjava_lang_Integer() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; form.setMaximumFractionDigits(100); form.setMinimumFractionDigits(200); assertEquals(200, form.getMaximumFractionDigits()); assertEquals(200, form.getMinimumFractionDigits()); form.setMaximumIntegerDigits(100); form.setMinimumIntegerDigits(200); assertEquals(200, form.getMaximumIntegerDigits()); assertEquals(200, form.getMinimumIntegerDigits()); } // When MinFractionDigits is set first and less than MaxFractionDigits, min // will be changed to max value public void test_setMaximumFactionDigitsLjava_lang_Integer_setMinimumFractionDigitsLjava_lang_Integer() { NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; form.setMinimumFractionDigits(200); form.setMaximumFractionDigits(100); assertEquals(100, form.getMaximumFractionDigits()); assertEquals(100, form.getMinimumFractionDigits()); form.setMinimumIntegerDigits(200); form.setMaximumIntegerDigits(100); assertEquals(100, form.getMaximumIntegerDigits()); assertEquals(100, form.getMinimumIntegerDigits()); } public void test_equalsLjava_lang_Object() { DecimalFormat format = (DecimalFormat) DecimalFormat.getInstance(Locale.US); DecimalFormat cloned = (DecimalFormat) format.clone(); cloned.setDecimalFormatSymbols(new DecimalFormatSymbols(Locale.US)); assertEquals(format, cloned); Currency c = Currency.getInstance(Locale.US); cloned.setCurrency(c); assertEquals(format, cloned); } public void test_setPositivePrefixLjava_lang_String() { DecimalFormat format = new DecimalFormat(); assertEquals("", format.getPositivePrefix()); } public void test_setPositiveSuffixLjava_lang_String() { DecimalFormat format = new DecimalFormat(); assertEquals("", format.getPositiveSuffix()); } public void test_setNegativePrefixLjava_lang_String() { DecimalFormat format = new DecimalFormat(); assertEquals("-", format.getNegativePrefix()); } public void test_setNegativeSuffixLjava_lang_String() { DecimalFormat format = new DecimalFormat(); assertEquals("", format.getNegativeSuffix()); } public void test_setGroupingUse() { DecimalFormat format = new DecimalFormat(); StringBuffer buf = new StringBuffer(); format.setGroupingUsed(false); format.format(new Long(1970), buf, new FieldPosition(0)); assertEquals("1970", buf.toString()); assertFalse(format.isGroupingUsed()); } /** * @tests java.text.DecimalFormat#DecimalFormat(java.lang.String) */ public void test_ConstructorLjava_lang_String() { // Test for method java.text.DecimalFormat(java.lang.String) // the constructor form that specifies a pattern is equal to the form // constructed with no pattern and applying that pattern using the // applyPattern call DecimalFormat format = new DecimalFormat("'$'0000.0000"); DecimalFormat format1 = new DecimalFormat(); format1.applyPattern("'$'0000.0000"); assertTrue("Constructed format did not match applied format object", format.equals(format1)); } /** * @tests java.text.DecimalFormat#applyPattern(java.lang.String) */ public void test_applyPatternLjava_lang_String() { DecimalFormat format = new DecimalFormat("#.#"); assertEquals("Wrong pattern 1", "#0.#", format.toPattern()); format = new DecimalFormat("#."); assertEquals("Wrong pattern 2", "#0.", format.toPattern()); format = new DecimalFormat("#"); assertEquals("Wrong pattern 3", "#", format.toPattern()); format = new DecimalFormat(".#"); assertEquals("Wrong pattern 4", "#.0", format.toPattern()); } /** * @tests java.text.DecimalFormat#clone() */ public void test_clone() { DecimalFormat format = (DecimalFormat) DecimalFormat.getInstance(Locale.US); DecimalFormat cloned = (DecimalFormat) format.clone(); assertEquals(cloned.getDecimalFormatSymbols(), format .getDecimalFormatSymbols()); format = new DecimalFormat("'$'0000.0000"); DecimalFormat format1 = (DecimalFormat) (format.clone()); // make sure the objects are equal assertTrue("Object's clone isn't equal!", format.equals(format1)); // change the content of the clone and make sure it's not equal anymore // verifies that it's data is now distinct from the original format1.applyPattern("'$'0000.####"); assertTrue("Object's changed clone should not be equal!", !format .equals(format1)); } private void compare(String testName, String format, String expected) { assertTrue(testName + " got: " + format + " expected: " + expected, format.equals(expected)); } private boolean compare(int count, String format, String expected) { boolean result = format.equals(expected); if (!result) System.out.println("Failure test: " + count + " got: " + format + " expected: " + expected); return result; } /** * @tests java.text.DecimalFormat#format(double, java.lang.StringBuffer, * java.text.FieldPosition) */ //FIXME This test fails on Harmony ClassLibrary public void test_formatDLjava_lang_StringBufferLjava_text_FieldPosition() { new Support_DecimalFormat( "test_formatDLjava_lang_StringBufferLjava_text_FieldPosition") .t_format_with_FieldPosition(); int failCount = 0; Support_BitSet failures = new Support_BitSet(); final DecimalFormatSymbols dfs = new DecimalFormatSymbols(Locale.US); DecimalFormat df = new DecimalFormat("00.0#E0", dfs); compare("00.0#E0: 0.0", df.format(0.0), "00.0E0"); compare("00.0#E0: 1.0", df.format(1.0), "10.0E-1"); compare("00.0#E0: 12.0", df.format(12.0), "12.0E0"); compare("00.0#E0: 123.0", df.format(123.0), "12.3E1"); compare("00.0#E0: 1234.0", df.format(1234.0), "12.34E2"); compare("00.0#E0: 12346.0", df.format(12346.0), "12.35E3"); compare("00.0#E0: 99999.0", df.format(99999.0), "10.0E4"); compare("00.0#E0: 1.2", df.format(1.2), "12.0E-1"); compare("00.0#E0: 12.3", df.format(12.3), "12.3E0"); compare("00.0#E0: 123.4", df.format(123.4), "12.34E1"); compare("00.0#E0: 1234.6", df.format(1234.6), "12.35E2"); compare("00.0#E0: 9999.9", df.format(9999.9), "10.0E3"); compare("00.0#E0: 0.1", df.format(0.1), "10.0E-2"); compare("00.0#E0: 0.12", df.format(0.12), "12.0E-2"); compare("00.0#E0: 0.123", df.format(0.123), "12.3E-2"); compare("00.0#E0: 0.1234", df.format(0.1234), "12.34E-2"); compare("00.0#E0: 0.12346", df.format(0.12346), "12.35E-2"); compare("00.0#E0: 0.99999", df.format(0.99999), "10.0E-1"); compare("00.0#E0: -0.0", df.format(-0.0), "-00.0E0"); compare("00.0#E0: -1.0", df.format(-1.0), "-10.0E-1"); compare("00.0#E0: -12.0", df.format(-12.0), "-12.0E0"); compare("00.0#E0: -123.0", df.format(-123.0), "-12.3E1"); compare("00.0#E0: -1234.0", df.format(-1234.0), "-12.34E2"); compare("00.0#E0: -12346.0", df.format(-12346.0), "-12.35E3"); compare("00.0#E0: -99999.0", df.format(-99999.0), "-10.0E4"); df = new DecimalFormat("##0.0E0", dfs); compare("##0.0E0: -0.0", df.format(-0.0), "-0.0E0"); compare("##0.0E0: 0.0", df.format(0.0), "0.0E0"); compare("##0.0E0: 1.0", df.format(1.0), "1.0E0"); compare("##0.0E0: 12.0", df.format(12.0), "12E0"); compare("##0.0E0: 123.0", df.format(123.0), "123E0"); compare("##0.0E0: 1234.0", df.format(1234.0), "1.234E3"); compare("##0.0E0: 12346.0", df.format(12346.0), "12.35E3"); // Fails in JDK 1.2.2 if (!compare(failCount, df.format(99999.0), "100E3")) failures.set(failCount); failCount++; compare("##0.0E0: 999999.0", df.format(999999.0), "1.0E6"); df = new DecimalFormat("#00.0##E0", dfs); compare("#00.0##E0: 0.1", df.format(0.1), ".100E0"); compare("#00.0##E0: 0.12", df.format(0.12), ".120E0"); compare("#00.0##E0: 0.123", df.format(0.123), ".123E0"); compare("#00.0##E0: 0.1234", df.format(0.1234), ".1234E0"); compare("#00.0##E0: 0.1234567", df.format(0.1234567), ".123457E0"); compare("#00.0##E0: 0.01", df.format(0.01), "10.0E-3"); compare("#00.0##E0: 0.012", df.format(0.012), "12.0E-3"); compare("#00.0##E0: 0.0123", df.format(0.0123), "12.3E-3"); compare("#00.0##E0: 0.01234", df.format(0.01234), "12.34E-3"); compare("#00.0##E0: 0.01234567", df.format(0.01234567), "12.3457E-3"); compare("#00.0##E0: 0.001", df.format(0.001), "1.00E-3"); compare("#00.0##E0: 0.0012", df.format(0.0012), "1.20E-3"); compare("#00.0##E0: 0.00123", df.format(0.00123), "1.23E-3"); compare("#00.0##E0: 0.001234", df.format(0.001234), "1.234E-3"); compare("#00.0##E0: 0.001234567", df.format(0.001234567), "1.23457E-3"); compare("#00.0##E0: 0.0001", df.format(0.0001), "100E-6"); compare("#00.0##E0: 0.00012", df.format(0.00012), "120E-6"); compare("#00.0##E0: 0.000123", df.format(0.000123), "123E-6"); compare("#00.0##E0: 0.0001234", df.format(0.0001234), "123.4E-6"); compare("#00.0##E0: 0.0001234567", df.format(0.0001234567), "123.457E-6"); // Fails in JDK 1.2.2 if (!compare(failCount, df.format(0.0), "0.00E0")) failures.set(failCount); failCount++; compare("#00.0##E0: 1.0", df.format(1.0), "1.00E0"); compare("#00.0##E0: 12.0", df.format(12.0), "12.0E0"); compare("#00.0##E0: 123.0", df.format(123.0), "123E0"); compare("#00.0##E0: 1234.0", df.format(1234.0), "1.234E3"); compare("#00.0##E0: 12345.0", df.format(12345.0), "12.345E3"); compare("#00.0##E0: 123456.0", df.format(123456.0), "123.456E3"); compare("#00.0##E0: 1234567.0", df.format(1234567.0), "1.23457E6"); compare("#00.0##E0: 12345678.0", df.format(12345678.0), "12.3457E6"); compare("#00.0##E0: 99999999.0", df.format(99999999.0), "100E6"); df = new DecimalFormat("#.0E0", dfs); compare("#.0E0: -0.0", df.format(-0.0), "-.0E0"); compare("#.0E0: 0.0", df.format(0.0), ".0E0"); compare("#.0E0: 1.0", df.format(1.0), ".1E1"); compare("#.0E0: 12.0", df.format(12.0), ".12E2"); compare("#.0E0: 123.0", df.format(123.0), ".12E3"); compare("#.0E0: 1234.0", df.format(1234.0), ".12E4"); compare("#.0E0: 9999.0", df.format(9999.0), ".1E5"); df = new DecimalFormat("0.#E0", dfs); compare("0.#E0: -0.0", df.format(-0.0), "-0E0"); compare("0.#E0: 0.0", df.format(0.0), "0E0"); compare("0.#E0: 1.0", df.format(1.0), "1E0"); compare("0.#E0: 12.0", df.format(12.0), "1.2E1"); compare("0.#E0: 123.0", df.format(123.0), "1.2E2"); compare("0.#E0: 1234.0", df.format(1234.0), "1.2E3"); compare("0.#E0: 9999.0", df.format(9999.0), "1E4"); df = new DecimalFormat(".0E0", dfs); compare(".0E0: -0.0", df.format(-0.0), "-.0E0"); compare(".0E0: 0.0", df.format(0.0), ".0E0"); compare(".0E0: 1.0", df.format(1.0), ".1E1"); compare(".0E0: 12.0", df.format(12.0), ".1E2"); compare(".0E0: 123.0", df.format(123.0), ".1E3"); compare(".0E0: 1234.0", df.format(1234.0), ".1E4"); compare(".0E0: 9999.0", df.format(9999.0), ".1E5"); df = new DecimalFormat("0.E0", dfs); // Fails in JDK 1.2.2 if (!compare(failCount, df.format(0.0), "0.E0")) failures.set(failCount); failCount++; if (!compare(failCount, df.format(1.0), "1.E0")) failures.set(failCount); failCount++; if (!compare(failCount, df.format(12.0), "1.E1")) failures.set(failCount); failCount++; if (!compare(failCount, df.format(123.0), "1.E2")) failures.set(failCount); failCount++; if (!compare(failCount, df.format(1234.0), "1.E3")) failures.set(failCount); failCount++; if (!compare(failCount, df.format(9999.0), "1.E4")) failures.set(failCount); failCount++; df = new DecimalFormat("##0.00#E0", dfs); compare("##0.00#E0: 0.1", df.format(0.1), ".100E0"); compare("##0.00#E0: 0.1234567", df.format(0.1234567), ".123457E0"); compare("##0.00#E0: 0.9999999", df.format(0.9999999), "1.00E0"); compare("##0.00#E0: 0.01", df.format(0.01), "10.0E-3"); compare("##0.00#E0: 0.01234567", df.format(0.01234567), "12.3457E-3"); compare("##0.00#E0: 0.09999999", df.format(0.09999999), ".100E0"); compare("##0.00#E0: 0.001", df.format(0.001), "1.00E-3"); compare("##0.00#E0: 0.001234567", df.format(0.001234567), "1.23457E-3"); compare("##0.00#E0: 0.009999999", df.format(0.009999999), "10.0E-3"); compare("##0.00#E0: 0.0001", df.format(0.0001), "100E-6"); compare("##0.00#E0: 0.0001234567", df.format(0.0001234567), "123.457E-6"); compare("##0.00#E0: 0.0009999999", df.format(0.0009999999), "1.00E-3"); df = new DecimalFormat("###0.00#E0", dfs); compare("###0.00#E0: 0.1", df.format(0.1), ".100E0"); compare("###0.00#E0: 0.12345678", df.format(0.12345678), ".1234568E0"); compare("###0.00#E0: 0.99999999", df.format(0.99999999), "1.00E0"); compare("###0.00#E0: 0.01", df.format(0.01), "100E-4"); compare("###0.00#E0: 0.012345678", df.format(0.012345678), "123.4568E-4"); compare("###0.00#E0: 0.099999999", df.format(0.099999999), ".100E0"); compare("###0.00#E0: 0.001", df.format(0.001), "10.0E-4"); compare("###0.00#E0: 0.0012345678", df.format(0.0012345678), "12.34568E-4"); compare("###0.00#E0: 0.0099999999", df.format(0.0099999999), "100E-4"); compare("###0.00#E0: 0.0001", df.format(0.0001), "1.00E-4"); compare("###0.00#E0: 0.00012345678", df.format(0.00012345678), "1.234568E-4"); compare("###0.00#E0: 0.00099999999", df.format(0.00099999999), "10.0E-4"); // Fails in JDK 1.2.2 if (!compare(failCount, df.format(0.00001), "1000E-8")) failures.set(failCount); failCount++; compare("###0.00#E0: 0.000012345678", df.format(0.000012345678), "1234.568E-8"); compare("###0.00#E0: 0.000099999999", df.format(0.000099999999), "1.00E-4"); df = new DecimalFormat("###0.0#E0", dfs); compare("###0.0#E0: 0.1", df.format(0.1), ".10E0"); compare("###0.0#E0: 0.1234567", df.format(0.1234567), ".123457E0"); compare("###0.0#E0: 0.9999999", df.format(0.9999999), "1.0E0"); // Fails in JDK 1.2.2 if (!compare(failCount, df.format(0.01), "100E-4")) failures.set(failCount); failCount++; compare("###0.0#E0: 0.01234567", df.format(0.01234567), "123.457E-4"); compare("###0.0#E0: 0.09999999", df.format(0.09999999), ".10E0"); compare("###0.0#E0: 0.001", df.format(0.001), "10E-4"); compare("###0.0#E0: 0.001234567", df.format(0.001234567), "12.3457E-4"); // Fails in JDK 1.2.2 if (!compare(failCount, df.format(0.009999999), "100E-4")) failures.set(failCount); failCount++; compare("###0.0#E0: 0.0001", df.format(0.0001), "1.0E-4"); compare("###0.0#E0: 0.0001234567", df.format(0.0001234567), "1.23457E-4"); compare("###0.0#E0: 0.0009999999", df.format(0.0009999999), "10E-4"); // Fails in JDK 1.2.2 if (!compare(failCount, df.format(0.00001), "1000E-8")) failures.set(failCount); failCount++; compare("###0.0#E0: 0.00001234567", df.format(0.00001234567), "1234.57E-8"); compare("###0.0#E0: 0.00009999999", df.format(0.00009999999), "1.0E-4"); assertTrue("Failed " + failures + " of " + failCount, failures.length() == 0); String formatString = "##0.#"; df = new DecimalFormat(formatString, dfs); df.setMinimumFractionDigits(30); compare(formatString + ": 0.000000000000000000000000000000", df .format(0.0), "0.000000000000000000000000000000"); compare(formatString + ": -0.000000000000000000000000000000", df .format(-0.0), "-0.000000000000000000000000000000"); compare(formatString + ": 1.000000000000000000000000000000", df .format(1.0), "1.000000000000000000000000000000"); compare(formatString + ": -1.000000000000000000000000000000", df .format(-1.0), "-1.000000000000000000000000000000"); df = new DecimalFormat(formatString); df.setMaximumFractionDigits(30); compare(formatString + ": 0", df.format(0.0), "0"); compare(formatString + ": -0", df.format(-0.0), "-0"); compare(formatString + ": 1", df.format(1.0), "1"); compare(formatString + ": -1", df.format(-1.0), "-1"); } /** * @tests java.text.DecimalFormat#format(long, java.lang.StringBuffer, * java.text.FieldPosition) */ //FIXME This test fails on Harmony ClassLibrary public void test_formatJLjava_lang_StringBufferLjava_text_FieldPosition() { int failCount = 0; Support_BitSet failures = new Support_BitSet(); final DecimalFormatSymbols dfs = new DecimalFormatSymbols(Locale.US); DecimalFormat df = new DecimalFormat("00.0#E0", dfs); assertEquals("00.0#E0: 0", "00.0E0", df.format(0)); assertEquals("00.0#E0: 1", "10.0E-1", df.format(1)); assertEquals("00.0#E0: 12", "12.0E0", df.format(12)); assertEquals("00.0#E0: 123", "12.3E1", df.format(123)); assertEquals("00.0#E0: 1234", "12.34E2", df.format(1234)); assertEquals("00.0#E0: 12346", "12.35E3", df.format(12346)); assertEquals("00.0#E0: 99999", "10.0E4", df.format(99999)); assertEquals("00.0#E0: -1", "-10.0E-1", df.format(-1)); assertEquals("00.0#E0: -12", "-12.0E0", df.format(-12)); assertEquals("00.0#E0: -123", "-12.3E1", df.format(-123)); assertEquals("00.0#E0: -1234", "-12.34E2", df.format(-1234)); assertEquals("00.0#E0: -12346", "-12.35E3", df.format(-12346)); assertEquals("00.0#E0: -99999", "-10.0E4", df.format(-99999)); df = new DecimalFormat("##0.0E0", dfs); assertEquals("##0.0E0: 0", "0.0E0", df.format(0)); assertEquals("##0.0E0: 1", "1.0E0", df.format(1)); assertEquals("##0.0E0: 12", "12E0", df.format(12)); assertEquals("##0.0E0: 123", "123E0", df.format(123)); assertEquals("##0.0E0: 1234", "1.234E3", df.format(1234)); assertEquals("##0.0E0: 12346", "12.35E3", df.format(12346)); // Fails in JDK 1.2.2 if (!df.format(99999).equals("100E3")) failures.set(failCount); failCount++; assertEquals("##0.0E0: 999999", "1.0E6", df.format(999999)); df = new DecimalFormat("#00.0##E0", dfs); // Fails in JDK 1.2.2 if (!df.format(0).equals("0.00E0")) failures.set(failCount); failCount++; assertEquals("#00.0##E0: 1", "1.00E0", df.format(1)); assertEquals("#00.0##E0: 12", "12.0E0", df.format(12)); assertEquals("#00.0##E0: 123", "123E0", df.format(123)); assertEquals("#00.0##E0: 1234", "1.234E3", df.format(1234)); assertEquals("#00.0##E0: 12345", "12.345E3", df.format(12345)); assertEquals("#00.0##E0: 123456", "123.456E3", df.format(123456)); assertEquals("#00.0##E0: 1234567", "1.23457E6", df.format(1234567)); assertEquals("#00.0##E0: 12345678", "12.3457E6", df.format(12345678)); assertEquals("#00.0##E0: 99999999", "100E6", df.format(99999999)); df = new DecimalFormat("#.0E0", dfs); assertEquals("#.0E0: 0", ".0E0", df.format(0)); assertEquals("#.0E0: 1", ".1E1", df.format(1)); assertEquals("#.0E0: 12", ".12E2", df.format(12)); assertEquals("#.0E0: 123", ".12E3", df.format(123)); assertEquals("#.0E0: 1234", ".12E4", df.format(1234)); assertEquals("#.0E0: 9999", ".1E5", df.format(9999)); df = new DecimalFormat("0.#E0", dfs); assertEquals("0.#E0: 0", "0E0", df.format(0)); assertEquals("0.#E0: 1", "1E0", df.format(1)); assertEquals("0.#E0: 12", "1.2E1", df.format(12)); assertEquals("0.#E0: 123", "1.2E2", df.format(123)); assertEquals("0.#E0: 1234", "1.2E3", df.format(1234)); assertEquals("0.#E0: 9999", "1E4", df.format(9999)); assertTrue("Failed " + failures + " of " + failCount, failures.length() == 0); } /** * @tests java.text.DecimalFormat#formatToCharacterIterator(java.lang.Object) */ //FIXME This test fails on Harmony ClassLibrary public void test_formatToCharacterIteratorLjava_lang_Object() { try { // Regression for HARMONY-466 new DecimalFormat().formatToCharacterIterator(null); fail("NullPointerException expected"); } catch (NullPointerException e) { // expected } new Support_DecimalFormat( "test_formatToCharacterIteratorLjava_lang_Object") .t_formatToCharacterIterator(); } /** * @tests java.text.DecimalFormat#format(double) */ public void test_formatD() { DecimalFormat format = (DecimalFormat) NumberFormat .getInstance(Locale.ENGLISH); format.setGroupingUsed(false); format.setMaximumFractionDigits(400); for (int i = 0; i < 309; i++) { String tval = "1"; for (int j = 0; j < i; j++) tval += "0"; double d = Double.parseDouble(tval); String result = format.format(d); assertEquals(i + ") e:" + tval + " r:" + result, tval, result); } for (int i = 0; i < 322; i++) { String tval = "0."; for (int j = 0; j < i; j++) tval += "0"; tval += "1"; double d = Double.parseDouble(tval); String result = format.format(d); assertEquals(i + ") e:" + tval + " r:" + result, tval, result); } assertEquals("999999999999999", format.format(999999999999999.)); assertEquals("1", "999999999999999.9", format.format(999999999999999.9)); assertEquals("2", "99999999999999.98", format.format(99999999999999.99)); assertEquals("3", "9999999999999.998", format.format(9999999999999.999)); assertEquals("4", "999999999999.9999", format.format(999999999999.9999)); assertEquals("5", "99999999999.99998", format.format(99999999999.99999)); assertEquals("6", "9999999999.999998", format.format(9999999999.999999)); assertEquals("7", "999999999.9999999", format.format(999999999.9999999)); assertEquals("8", "99999999.99999999", format.format(99999999.99999999)); assertEquals("9", "9999999.999999998", format.format(9999999.999999999)); assertEquals("10", "99999.99999999999", format .format(99999.99999999999)); assertEquals("11", "9999.999999999998", format .format(9999.999999999999)); assertEquals("12", "999.9999999999999", format .format(999.9999999999999)); assertEquals("13", "99.99999999999999", format .format(99.99999999999999)); assertEquals("14", "9.999999999999998", format .format(9.999999999999999)); assertEquals("15", "0.9999999999999999", format .format(.9999999999999999)); } /** * @tests java.text.DecimalFormat#getDecimalFormatSymbols() */ public void test_getDecimalFormatSymbols() { DecimalFormat df = (DecimalFormat) NumberFormat .getInstance(Locale.ENGLISH); DecimalFormatSymbols dfs = df.getDecimalFormatSymbols(); assertTrue("Identical symbols", dfs != df.getDecimalFormatSymbols()); } /** * @tests java.text.DecimalFormat#getCurrency() */ //FIXME This test fails on Harmony ClassLibrary public void test_getCurrency() { Currency currK = Currency.getInstance("KRW"); Currency currX = Currency.getInstance("XXX"); Currency currE = Currency.getInstance("EUR"); Currency curr01; DecimalFormat df = (DecimalFormat) NumberFormat .getCurrencyInstance(new Locale("ko", "KR")); assertTrue("Test1: Returned incorrect currency", df.getCurrency() == currK); df = (DecimalFormat) NumberFormat.getCurrencyInstance(new Locale("", "KR")); assertTrue("Test2: Returned incorrect currency", df.getCurrency() == currK); df = (DecimalFormat) NumberFormat.getCurrencyInstance(new Locale("ko", "")); assertTrue("Test3: Returned incorrect currency", df.getCurrency() == currX); df = (DecimalFormat) NumberFormat.getCurrencyInstance(new Locale("fr", "FR")); assertTrue("Test4: Returned incorrect currency", df.getCurrency() == currE); // Regression for HARMONY-1351 df = (DecimalFormat) NumberFormat.getCurrencyInstance(new Locale("QWERTY")); assertTrue("Test5: Returned incorrect currency", df.getCurrency() == currX); // JDK fails these tests since it doesn't have the PREEURO variant // df = (DecimalFormat)NumberFormat.getCurrencyInstance(new Locale("fr", // "FR","PREEURO")); // assertTrue("Test5: Returned incorrect currency", df.getCurrency() == // currF); } /** * @tests java.text.DecimalFormat#getGroupingSize() */ public void test_getGroupingSize() { DecimalFormat df = new DecimalFormat("###0.##"); assertEquals("Wrong unset size", 0, df.getGroupingSize()); df = new DecimalFormat("#,##0.##"); assertEquals("Wrong set size", 3, df.getGroupingSize()); df = new DecimalFormat("#,###,###0.##"); assertEquals("Wrong multiple set size", 4, df.getGroupingSize()); } /** * @tests java.text.DecimalFormat#getMultiplier() */ public void test_getMultiplier() { final int defaultMultiplier = 1; NumberFormat nform = DecimalFormat.getInstance(Locale.US); DecimalFormat form = (DecimalFormat) nform; assertEquals(defaultMultiplier, form.getMultiplier()); DecimalFormat df = new DecimalFormat("###0.##"); assertEquals("Wrong unset multiplier", 1, df.getMultiplier()); df = new DecimalFormat("###0.##%"); assertEquals("Wrong percent multiplier", 100, df.getMultiplier()); df = new DecimalFormat("###0.##\u2030"); assertEquals("Wrong mille multiplier", 1000, df.getMultiplier()); } /** * @tests java.text.DecimalFormat#isDecimalSeparatorAlwaysShown() */ public void test_isDecimalSeparatorAlwaysShown() { DecimalFormat df = new DecimalFormat("###0.##"); assertTrue("Wrong unset value", !df.isDecimalSeparatorAlwaysShown()); df = new DecimalFormat("###0.00"); assertTrue("Wrong unset2 value", !df.isDecimalSeparatorAlwaysShown()); df = new DecimalFormat("###0."); assertTrue("Wrong set value", df.isDecimalSeparatorAlwaysShown()); } /** * @tests java.text.DecimalFormat#parse(java.lang.String, * java.text.ParsePosition) */ //FIXME This test fails on Harmony ClassLibrary public void test_parseLjava_lang_StringLjava_text_ParsePosition() { DecimalFormat format = (DecimalFormat) NumberFormat .getNumberInstance(Locale.ENGLISH); ParsePosition pos = new ParsePosition(0); Number result = format.parse("9223372036854775807", pos); assertTrue("Wrong result type for Long.MAX_VALUE", result.getClass() == Long.class); assertTrue("Wrong result Long.MAX_VALUE", result.longValue() == Long.MAX_VALUE); pos = new ParsePosition(0); result = format.parse("-9223372036854775808", pos); assertTrue("Wrong result type for Long.MIN_VALUE", result.getClass() == Long.class); assertTrue("Wrong result Long.MIN_VALUE: " + result.longValue(), result .longValue() == Long.MIN_VALUE); pos = new ParsePosition(0); result = format.parse("9223372036854775808", pos); assertTrue("Wrong result type for Long.MAX_VALUE+1", result.getClass() == Double.class); assertTrue("Wrong result Long.MAX_VALUE + 1", result.doubleValue() == (double) Long.MAX_VALUE + 1); pos = new ParsePosition(0); result = format.parse("-9223372036854775809", pos); assertTrue("Wrong result type for Long.MIN_VALUE+1", result.getClass() == Double.class); assertTrue("Wrong result Long.MIN_VALUE - 1", result.doubleValue() == (double) Long.MIN_VALUE - 1); pos = new ParsePosition(0); result = format.parse("18446744073709551629", pos); assertTrue("Wrong result type for overflow", result.getClass() == Double.class); assertTrue("Wrong result for overflow", result.doubleValue() == 18446744073709551629d); pos = new ParsePosition(0); result = format.parse("42325917317067571199", pos); assertTrue("Wrong result type for overflow a: " + result, result .getClass() == Double.class); assertTrue("Wrong result for overflow a: " + result, result .doubleValue() == 42325917317067571199d); pos = new ParsePosition(0); result = format.parse("4232591731706757119E1", pos); assertTrue("Wrong result type for overflow b: " + result, result .getClass() == Double.class); assertTrue("Wrong result for overflow b: " + result, result .doubleValue() == 42325917317067571190d); pos = new ParsePosition(0); result = format.parse(".42325917317067571199E20", pos); assertTrue("Wrong result type for overflow c: " + result, result .getClass() == Double.class); assertTrue("Wrong result for overflow c: " + result, result .doubleValue() == 42325917317067571199d); pos = new ParsePosition(0); result = format.parse("922337203685477580.9E1", pos); assertTrue("Wrong result type for overflow d: " + result, result .getClass() == Double.class); assertTrue("Wrong result for overflow d: " + result, result .doubleValue() == 9223372036854775809d); pos = new ParsePosition(0); result = format.parse("9.223372036854775809E18", pos); assertTrue("Wrong result type for overflow e: " + result, result .getClass() == Double.class); assertTrue("Wrong result for overflow e: " + result, result .doubleValue() == 9223372036854775809d); // test parse with multipliers format.setMultiplier(100); result = format.parse("9223372036854775807", new ParsePosition(0)); assertTrue("Wrong result type multiplier 100: " + result, result .getClass() == Long.class); assertTrue("Wrong result for multiplier 100: " + result, result .longValue() == 92233720368547758L); format.setMultiplier(1000); result = format.parse("9223372036854775807", new ParsePosition(0)); assertTrue("Wrong result type multiplier 1000: " + result, result .getClass() == Long.class); assertTrue("Wrong result for multiplier 1000: " + result, result .longValue() == 9223372036854776L); format.setMultiplier(10000); result = format.parse("9223372036854775807", new ParsePosition(0)); assertTrue("Wrong result type multiplier 10000: " + result, result .getClass() == Double.class); assertTrue("Wrong result for multiplier 10000: " + result, result .doubleValue() == 922337203685477.5807d); } /** * @tests java.text.DecimalFormat#setDecimalFormatSymbols(java.text.DecimalFormatSymbols) */ public void test_setDecimalFormatSymbolsLjava_text_DecimalFormatSymbols() { DecimalFormat df = new DecimalFormat("###0.##"); DecimalFormatSymbols dfs = new DecimalFormatSymbols(); dfs.setDecimalSeparator('@'); df.setDecimalFormatSymbols(dfs); assertTrue("Not set", df.getDecimalFormatSymbols().equals(dfs)); assertEquals("Symbols not used", "1@2", df.format(1.2)); // The returned symbols may be cloned in two spots // 1. When set // 2. When returned DecimalFormat format = new DecimalFormat(); DecimalFormatSymbols symbols = new DecimalFormatSymbols(); format.setDecimalFormatSymbols(symbols); DecimalFormatSymbols symbolsOut = format.getDecimalFormatSymbols(); assertNotSame(symbols, symbolsOut); } /** * @tests java.text.DecimalFormat#setDecimalSeparatorAlwaysShown(boolean) */ public void test_setDecimalSeparatorAlwaysShownZ() { DecimalFormat df = new DecimalFormat("###0.##", new DecimalFormatSymbols(Locale.US)); assertEquals("Wrong default result", "5", df.format(5)); df.setDecimalSeparatorAlwaysShown(true); assertTrue("Not set", df.isDecimalSeparatorAlwaysShown()); assertEquals("Wrong set result", "7.", df.format(7)); } /** * @tests java.text.DecimalFormat#setCurrency(java.util.Currency) */ public void test_setCurrencyLjava_util_Currency() { Locale locale = Locale.CANADA; DecimalFormat df = ((DecimalFormat) NumberFormat .getCurrencyInstance(locale)); try { df.setCurrency(null); fail("Expected NullPointerException"); } catch (NullPointerException e) { } Currency currency = Currency.getInstance("AED"); df.setCurrency(currency); assertTrue("Returned incorrect currency", currency == df.getCurrency()); assertTrue("Returned incorrect currency symbol", currency.getSymbol( locale) .equals(df.getDecimalFormatSymbols().getCurrencySymbol())); assertTrue("Returned incorrect international currency symbol", currency .getCurrencyCode().equals( df.getDecimalFormatSymbols() .getInternationalCurrencySymbol())); } /** * @tests java.text.DecimalFormat#setGroupingSize(int) */ public void test_setGroupingSizeI() { DecimalFormat df = new DecimalFormat("###0.##", new DecimalFormatSymbols(Locale.ENGLISH)); df.setGroupingUsed(true); df.setGroupingSize(2); assertEquals("Value not set", 2, df.getGroupingSize()); String result = df.format(123); assertTrue("Invalid format:" + result, result.equals("1,23")); } /** * @tests java.text.DecimalFormat#setMaximumFractionDigits(int) */ public void test_setMaximumFractionDigitsI() { DecimalFormat df = new DecimalFormat("###0.##", new DecimalFormatSymbols(Locale.US)); df.setMaximumFractionDigits(3); assertEquals("Not set", 3, df.getMaximumFractionDigits()); assertEquals("Wrong maximum", "1.235", df.format(1.23456)); df.setMinimumFractionDigits(4); assertEquals("Not changed", 4, df.getMaximumFractionDigits()); assertEquals("Incorrect fraction", "456.0000", df.format(456)); } /** * @tests java.text.DecimalFormat#setMaximumIntegerDigits(int) */ public void test_setMaximumIntegerDigitsI() { DecimalFormat df = new DecimalFormat("###0.##"); df.setMaximumIntegerDigits(2); assertEquals("Not set", 2, df.getMaximumIntegerDigits()); assertEquals("Wrong maximum", "34", df.format(1234)); df.setMinimumIntegerDigits(4); assertEquals("Not changed", 4, df.getMaximumIntegerDigits()); assertEquals("Incorrect integer", "0026", df.format(26)); } /** * @tests java.text.DecimalFormat#setMinimumFractionDigits(int) */ public void test_setMinimumFractionDigitsI() { DecimalFormat df = new DecimalFormat("###0.##", new DecimalFormatSymbols(Locale.US)); df.setMinimumFractionDigits(4); assertEquals("Not set", 4, df.getMinimumFractionDigits()); assertEquals("Wrong minimum", "1.2300", df.format(1.23)); df.setMaximumFractionDigits(2); assertEquals("Not changed", 2, df.getMinimumFractionDigits()); assertEquals("Incorrect fraction", "456.00", df.format(456)); } /** * @tests java.text.DecimalFormat#setMinimumIntegerDigits(int) */ public void test_setMinimumIntegerDigitsI() { DecimalFormat df = new DecimalFormat("###0.##", new DecimalFormatSymbols(Locale.US)); df.setMinimumIntegerDigits(3); assertEquals("Not set", 3, df.getMinimumIntegerDigits()); assertEquals("Wrong minimum", "012", df.format(12)); df.setMaximumIntegerDigits(2); assertEquals("Not changed", 2, df.getMinimumIntegerDigits()); assertEquals("Incorrect integer", "00.7", df.format(0.7)); } /** * @tests java.text.DecimalFormat#setMultiplier(int) */ //FIXME This test fails on Harmony ClassLibrary public void test_setMultiplierI() { DecimalFormat df = new DecimalFormat("###0.##"); df.setMultiplier(10); assertEquals("Wrong multiplier", 10, df.getMultiplier()); assertEquals("Wrong format", "50", df.format(5)); assertEquals("Wrong parse", 5, df.parse("50", new ParsePosition(0)) .intValue()); // regression test for HARMONY-879 df.setMultiplier(-1); assertEquals("Wrong multiplier for negative value", -1, df.getMultiplier()); } /** * @tests serialization/deserialization compatibility. */ public void testSerializationSelf() throws Exception { SerializationTest.verifySelf(new DecimalFormat()); } /** * @tests serialization compatibility with RI */ public void test_serializationHarmonyRICompatible() { NumberFormat nf = NumberFormat.getInstance(Locale.FRANCE); DecimalFormat df = null; if (!(nf instanceof DecimalFormat)) { throw new Error("This NumberFormat is not a DecimalFormat"); } df = (DecimalFormat) nf; ObjectInputStream oinput = null; DecimalFormat deserializedDF = null; try { oinput = new ObjectInputStream(this.getClass().getResource( "/serialization/java/text/DecimalFormat.ser").openStream()); deserializedDF = (DecimalFormat) oinput.readObject(); } catch (Exception e) { fail("Error occurs during deserialization"); } finally { try { if (null != oinput) { oinput.close(); } } catch (Exception e) { // ignore } } assertEquals(df.getNegativePrefix(), deserializedDF.getNegativePrefix()); assertEquals(df.getNegativeSuffix(), deserializedDF.getNegativeSuffix()); assertEquals(df.getPositivePrefix(), deserializedDF.getPositivePrefix()); assertEquals(df.getPositiveSuffix(), deserializedDF.getPositiveSuffix()); assertEquals(df.getCurrency(), deserializedDF.getCurrency()); assertEquals(df.getDecimalFormatSymbols(), deserializedDF .getDecimalFormatSymbols()); assertEquals(df.getGroupingSize(), df.getGroupingSize()); assertEquals(df.getMaximumFractionDigits(), deserializedDF .getMaximumFractionDigits()); assertEquals(df.getMaximumIntegerDigits(), deserializedDF .getMaximumIntegerDigits()); assertEquals(df.getMinimumFractionDigits(), deserializedDF .getMinimumFractionDigits()); assertEquals(df.getMinimumIntegerDigits(), deserializedDF .getMinimumIntegerDigits()); assertEquals(df.getMultiplier(), deserializedDF.getMultiplier()); // Deliberately omitted this assertion. Since different data resource // will cause the assertion fail. // assertEquals(df, deserializedDF); } /** * Test whether DecimalFormat can parse Positive infinity correctly */ public void testParseInfinityBigDecimalFalse() { // Regression test for HARMONY-106 DecimalFormat format = (DecimalFormat) DecimalFormat.getInstance(); DecimalFormatSymbols symbols = new DecimalFormatSymbols(); Number number = format.parse(symbols.getInfinity(), new ParsePosition(0)); assertTrue(number instanceof Double); assertTrue(Double.isInfinite(number.doubleValue())); } /** * Test whether DecimalFormat can parse Negative infinity correctly */ public void testParseMinusInfinityBigDecimalFalse() { // Regression test for HARMONY-106 DecimalFormat format = (DecimalFormat) DecimalFormat.getInstance(); DecimalFormatSymbols symbols = new DecimalFormatSymbols(); Number number = format.parse("-" + symbols.getInfinity(), new ParsePosition(0)); assertTrue(number instanceof Double); assertTrue(Double.isInfinite(number.doubleValue())); } /** * Test if setDecimalFormatSymbols method wont throw NullPointerException * when it is called with null parameter. */ public void testSetDecimalFormatSymbolsAsNull(){ // Regression for HARMONY-1070 try { DecimalFormat format = (DecimalFormat)DecimalFormat.getInstance(); format.setDecimalFormatSymbols(null); } catch (Exception e) { fail("Unexpected exception caught: " + e); } } }
apache-2.0
spring-projects/spring-framework
spring-test/src/test/java/org/springframework/test/context/junit4/StandardJUnit4FeaturesSpringRunnerTests.java
1377
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.test.context.junit4; import org.junit.runner.RunWith; import org.springframework.test.context.TestExecutionListeners; /** * <p> * Simple unit test to verify that {@link SpringRunner} does not * hinder correct functionality of standard JUnit 4.4+ testing features. * </p> * <p> * Note that {@link TestExecutionListeners @TestExecutionListeners} is * explicitly configured with an empty list, thus disabling all default * listeners. * </p> * * @author Sam Brannen * @since 2.5 * @see StandardJUnit4FeaturesTests */ @RunWith(SpringRunner.class) @TestExecutionListeners({}) public class StandardJUnit4FeaturesSpringRunnerTests extends StandardJUnit4FeaturesTests { /* All tests are in the parent class... */ }
apache-2.0
GerritCodeReview/gerrit-attic
src/main/java/com/google/gerrit/server/http/GerritConfigProvider.java
4495
// Copyright (C) 2009 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.http; import com.google.gerrit.client.data.ApprovalTypes; import com.google.gerrit.client.data.GerritConfig; import com.google.gerrit.client.data.GitwebLink; import com.google.gerrit.client.reviewdb.Account; import com.google.gerrit.client.reviewdb.Project; import com.google.gerrit.server.account.Realm; import com.google.gerrit.server.config.AuthConfig; import com.google.gerrit.server.config.CanonicalWebUrl; import com.google.gerrit.server.config.GerritServerConfig; import com.google.gerrit.server.config.Nullable; import com.google.gerrit.server.config.WildProjectName; import com.google.gerrit.server.contact.ContactStore; import com.google.gerrit.server.mail.EmailSender; import com.google.gerrit.server.ssh.SshInfo; import com.google.inject.Inject; import com.google.inject.Provider; import org.spearce.jgit.lib.Config; import java.net.Inet6Address; import java.net.InetAddress; import java.net.InetSocketAddress; import java.util.HashSet; import java.util.Set; public class GerritConfigProvider implements Provider<GerritConfig> { private static boolean isIPv6(final InetAddress ip) { return ip instanceof Inet6Address && ip.getHostName().equals(ip.getHostAddress()); } private final Realm realm; private final Config cfg; private final String canonicalWebUrl; private final AuthConfig authConfig; private final Project.NameKey wildProject; private final SshInfo sshInfo; private final ApprovalTypes approvalTypes; private EmailSender emailSender; private final ContactStore contactStore; @Inject GerritConfigProvider(final Realm r, @GerritServerConfig final Config gsc, @CanonicalWebUrl @Nullable final String cwu, final AuthConfig ac, @WildProjectName final Project.NameKey wp, final SshInfo si, final ApprovalTypes at, final ContactStore cs) { realm = r; cfg = gsc; canonicalWebUrl = cwu; authConfig = ac; sshInfo = si; wildProject = wp; approvalTypes = at; contactStore = cs; } @Inject(optional = true) void setEmailSender(final EmailSender d) { emailSender = d; } private GerritConfig create() { final GerritConfig config = new GerritConfig(); config.setCanonicalUrl(canonicalWebUrl); config.setUseContributorAgreements(cfg.getBoolean("auth", "contributoragreements", false)); config.setGitDaemonUrl(cfg.getString("gerrit", null, "canonicalgiturl")); config.setUseRepoDownload(cfg.getBoolean("repo", null, "showdownloadcommand", false)); config.setUseContactInfo(contactStore != null && contactStore.isEnabled()); config.setAuthType(authConfig.getAuthType()); config.setWildProject(wildProject); config.setApprovalTypes(approvalTypes); final Set<Account.FieldName> fields = new HashSet<Account.FieldName>(); for (final Account.FieldName n : Account.FieldName.values()) { if (realm.allowsEdit(n)) { fields.add(n); } } if (emailSender != null && emailSender.isEnabled()) { fields.add(Account.FieldName.REGISTER_NEW_EMAIL); } config.setEditableAccountFields(fields); final String gitwebUrl = cfg.getString("gitweb", null, "url"); if (gitwebUrl != null) { config.setGitwebLink(new GitwebLink(gitwebUrl)); } final InetSocketAddress addr = sshInfo != null ? sshInfo.getAddress() : null; if (addr != null) { final InetAddress ip = addr.getAddress(); String host; if (ip != null && ip.isAnyLocalAddress()) { host = ""; } else if (isIPv6(ip)) { host = "[" + addr.getHostName() + "]"; } else { host = addr.getHostName(); } if (addr.getPort() != 22) { host += ":" + addr.getPort(); } config.setSshdAddress(host); } return config; } @Override public GerritConfig get() { return create(); } }
apache-2.0
googleads/googleads-java-lib
modules/dfp_axis/src/main/java/com/google/api/ads/admanager/axis/v202102/ForecastAdjustmentVolumeType.java
3944
// Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * ForecastAdjustmentVolumeType.java * * This file was auto-generated from WSDL * by the Apache Axis 1.4 Mar 02, 2009 (07:08:06 PST) WSDL2Java emitter. */ package com.google.api.ads.admanager.axis.v202102; public class ForecastAdjustmentVolumeType implements java.io.Serializable { private java.lang.String _value_; private static java.util.HashMap _table_ = new java.util.HashMap(); // Constructor protected ForecastAdjustmentVolumeType(java.lang.String value) { _value_ = value; _table_.put(_value_,this); } public static final java.lang.String _UNKNOWN = "UNKNOWN"; public static final java.lang.String _DAILY_VOLUME = "DAILY_VOLUME"; public static final java.lang.String _TOTAL_VOLUME = "TOTAL_VOLUME"; public static final java.lang.String _HISTORICAL_BASIS_VOLUME = "HISTORICAL_BASIS_VOLUME"; public static final ForecastAdjustmentVolumeType UNKNOWN = new ForecastAdjustmentVolumeType(_UNKNOWN); public static final ForecastAdjustmentVolumeType DAILY_VOLUME = new ForecastAdjustmentVolumeType(_DAILY_VOLUME); public static final ForecastAdjustmentVolumeType TOTAL_VOLUME = new ForecastAdjustmentVolumeType(_TOTAL_VOLUME); public static final ForecastAdjustmentVolumeType HISTORICAL_BASIS_VOLUME = new ForecastAdjustmentVolumeType(_HISTORICAL_BASIS_VOLUME); public java.lang.String getValue() { return _value_;} public static ForecastAdjustmentVolumeType fromValue(java.lang.String value) throws java.lang.IllegalArgumentException { ForecastAdjustmentVolumeType enumeration = (ForecastAdjustmentVolumeType) _table_.get(value); if (enumeration==null) throw new java.lang.IllegalArgumentException(); return enumeration; } public static ForecastAdjustmentVolumeType fromString(java.lang.String value) throws java.lang.IllegalArgumentException { return fromValue(value); } public boolean equals(java.lang.Object obj) {return (obj == this);} public int hashCode() { return toString().hashCode();} public java.lang.String toString() { return _value_;} public java.lang.Object readResolve() throws java.io.ObjectStreamException { return fromValue(_value_);} public static org.apache.axis.encoding.Serializer getSerializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumSerializer( _javaType, _xmlType); } public static org.apache.axis.encoding.Deserializer getDeserializer( java.lang.String mechType, java.lang.Class _javaType, javax.xml.namespace.QName _xmlType) { return new org.apache.axis.encoding.ser.EnumDeserializer( _javaType, _xmlType); } // Type metadata private static org.apache.axis.description.TypeDesc typeDesc = new org.apache.axis.description.TypeDesc(ForecastAdjustmentVolumeType.class); static { typeDesc.setXmlType(new javax.xml.namespace.QName("https://www.google.com/apis/ads/publisher/v202102", "ForecastAdjustmentVolumeType")); } /** * Return type metadata object */ public static org.apache.axis.description.TypeDesc getTypeDesc() { return typeDesc; } }
apache-2.0
i2geo/i2gCurriki
plugins/lucene/src/main/java/org/curriki/xwiki/plugin/lucene/IdentifierListAnalyzer.java
1071
package org.curriki.xwiki.plugin.lucene; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.Token; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.CharTokenizer; import java.io.Reader; import java.io.IOException; import java.io.StringReader; /** An analyzer that splits along a whitespace or a comma. */ class IdentifierListAnalyzer extends Analyzer { public IdentifierListAnalyzer() { } @Override public TokenStream reusableTokenStream(String fieldName, Reader reader) throws IOException { TokenStream tok = (TokenStream) getPreviousTokenStream(); if(tok==null) { tok = tokenStream(fieldName,reader); setPreviousTokenStream(tok); } return tok; } public TokenStream tokenStream(String fieldName, Reader reader) { return new CharTokenizer(reader) { protected boolean isTokenChar(char c) { return !( Character.isWhitespace(c) || c == ','); } }; } }
apache-2.0
zutherb/memory-leak-detection
shop/ui/src/main/java/com/comsysto/shop/ui/panel/product/TopSellerRecommendationPanel.java
2672
package com.comsysto.shop.ui.panel.product; import com.comsysto.shop.ui.event.AjaxEvent; import com.comsysto.shop.repository.product.model.ProductType; import com.comsysto.shop.service.product.model.ProductInfo; import com.comsysto.shop.service.recommendation.api.RecommendationService; import com.comsysto.shop.ui.event.basket.AddToBasketEvent; import com.comsysto.shop.ui.event.basket.RemoveFromBasketEvent; import com.comsysto.shop.ui.panel.base.AbstractPizzaShopBasePanel; import org.apache.wicket.Component; import org.apache.wicket.event.IEvent; import org.apache.wicket.model.IModel; import org.apache.wicket.model.LoadableDetachableModel; import org.apache.wicket.model.util.ListModel; import org.apache.wicket.spring.injection.annot.SpringBean; import java.util.ArrayList; import java.util.List; /** * @author zutherb */ public class TopSellerRecommendationPanel extends AbstractPizzaShopBasePanel { @SpringBean(name = "recommendationService") private RecommendationService recommendationService; private IModel<ProductType> productTypeModel; private IModel<ProductInfo> pizzaInfoModel; public TopSellerRecommendationPanel(String id, IModel<ProductType> productTypeModel) { super(id); this.productTypeModel = productTypeModel; pizzaInfoModel = productInfoModel(); add(recommendationItemPanel()); setOutputMarkupId(true); setOutputMarkupPlaceholderTag(true); } private Component recommendationItemPanel() { ArrayList<String> tagListe = new ArrayList<>(2); IModel<List<String>> tagsModel = new ListModel<>(tagListe); RecommendationItemPanel recommendationItemPanel = new RecommendationItemPanel("recPizzaItem", pizzaInfoModel, tagsModel); recommendationItemPanel.setOutputMarkupId(true); return recommendationItemPanel; } @Override protected void onConfigure() { super.onConfigure(); setVisible(pizzaInfoModel.getObject() != null); } private IModel<ProductInfo> productInfoModel() { return new LoadableDetachableModel<ProductInfo>() { @Override protected ProductInfo load() { List<ProductInfo> products = recommendationService.getTopsellerRecommendations(productTypeModel.getObject(), 1); return products.size() > 0 ? products.get(0) : null; } }; } @Override public void onEvent(IEvent<?> event) { if (event.getPayload() instanceof AddToBasketEvent || event.getPayload() instanceof RemoveFromBasketEvent) { ((AjaxEvent) event.getPayload()).getTarget().add(this); } } }
apache-2.0
WASdev/sample.mono-to-ms.pbw-monolith
PlantsByWebSphere/src/main/java/com/ibm/websphere/samples/pbw/war/Populate.java
13552
// // COPYRIGHT LICENSE: This information contains sample code provided in source code form. You may copy, // modify, and distribute these sample programs in any form without payment to IBM for the purposes of // developing, using, marketing or distributing application programs conforming to the application // programming interface for the operating platform for which the sample code is written. // Notwithstanding anything to the contrary, IBM PROVIDES THE SAMPLE SOURCE CODE ON AN "AS IS" BASIS // AND IBM DISCLAIMS ALL WARRANTIES, EXPRESS OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, ANY IMPLIED // WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY, FITNESS FOR A PARTICULAR PURPOSE, // TITLE, AND ANY WARRANTY OR CONDITION OF NON-INFRINGEMENT. IBM SHALL NOT BE LIABLE FOR ANY DIRECT, // INDIRECT, INCIDENTAL, SPECIAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR OPERATION OF THE // SAMPLE SOURCE CODE. IBM HAS NO OBLIGATION TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS // OR MODIFICATIONS TO THE SAMPLE SOURCE CODE. // // (C) COPYRIGHT International Business Machines Corp., 2004,2011 // All Rights Reserved * Licensed Materials - Property of IBM // package com.ibm.websphere.samples.pbw.war; import java.io.DataInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.IOException; import java.net.URL; import java.util.Vector; import com.ibm.websphere.samples.pbw.ejb.BackOrderMgr; import com.ibm.websphere.samples.pbw.ejb.CatalogMgr; import com.ibm.websphere.samples.pbw.ejb.CustomerMgr; import com.ibm.websphere.samples.pbw.ejb.ResetDBBean; import com.ibm.websphere.samples.pbw.ejb.ShoppingCartBean; import com.ibm.websphere.samples.pbw.ejb.SuppliersBean; import com.ibm.websphere.samples.pbw.jpa.Inventory; import com.ibm.websphere.samples.pbw.utils.Util; /** * A basic POJO class for resetting the database. */ public class Populate { private ResetDBBean resetDB; private CatalogMgr catalog; private CustomerMgr login; private ShoppingCartBean cart; private BackOrderMgr backOrderStock; private SuppliersBean suppliers; /** * */ public Populate() {} public Populate(ResetDBBean resetDB, CatalogMgr c, CustomerMgr l, BackOrderMgr b, SuppliersBean s){ this.resetDB=resetDB; this.catalog=c; this.login=l; this.backOrderStock=b; this.suppliers=s; } /** * @param itemID * @param fileName * @param catalog * @throws FileNotFoundException * @throws IOException */ public static void addImage(String itemID, String fileName, CatalogMgr catalog) throws FileNotFoundException, IOException { URL url = Thread.currentThread().getContextClassLoader().getResource("resources/images/" + fileName); Util.debug("URL: " + url); fileName = url.getPath(); Util.debug("Fully-qualified Filename: " + fileName); File imgFile = new File(fileName); // Open the input file as a stream of bytes FileInputStream fis = new FileInputStream(imgFile); DataInputStream dis = new DataInputStream(fis); int dataSize = dis.available(); byte[] data = new byte[dataSize]; dis.readFully(data); catalog.setItemImageBytes(itemID, data); } /** * */ public void doPopulate() { try { resetDB.deleteAll(); } catch (Exception e) { Util.debug("Populate:doPopulate() - Exception deleting data in database: " + e); e.printStackTrace(); } /** * Populate INVENTORY table with text */ Util.debug("Populating INVENTORY table with text..."); try { String[] values = Util.getProperties("inventory"); for (int index = 0; index < values.length; index++) { Util.debug("Found INVENTORY property values: " + values[index]); String[] fields = Util.readTokens(values[index], "|"); String id = fields[0]; String name = fields[1]; String heading = fields[2]; String descr = fields[3]; String pkginfo = fields[4]; String image = fields[5]; float price = new Float(fields[6]).floatValue(); float cost = new Float(fields[7]).floatValue(); int quantity = new Integer(fields[8]).intValue(); int category = new Integer(fields[9]).intValue(); String notes = fields[10]; boolean isPublic = new Boolean(fields[11]).booleanValue(); Util.debug("Populating INVENTORY with following values: "); Util.debug(fields[0]); Util.debug(fields[1]); Util.debug(fields[2]); Util.debug(fields[3]); Util.debug(fields[4]); Util.debug(fields[5]); Util.debug(fields[6]); Util.debug(fields[7]); Util.debug(fields[8]); Util.debug(fields[9]); Util.debug(fields[10]); Util.debug(fields[11]); Inventory storeItem = new Inventory(id, name, heading, descr, pkginfo, image, price, cost, quantity, category, notes, isPublic); catalog.addItem(storeItem); addImage(id, image, catalog); } Util.debug("INVENTORY table populated with text..."); } catch (Exception e) { Util.debug("Unable to populate INVENTORY table with text data: " + e); } /** * Populate CUSTOMER table with text */ Util.debug("Populating CUSTOMER table with default values..."); try { String[] values = Util.getProperties("customer"); Util.debug("Found CUSTOMER properties: " + values[0]); for (int index = 0; index < values.length; index++) { String[] fields = Util.readTokens(values[index], "|"); String customerID = fields[0]; String password = fields[1]; String firstName = fields[2]; String lastName = fields[3]; String addr1 = fields[4]; String addr2 = fields[5]; String addrCity = fields[6]; String addrState = fields[7]; String addrZip = fields[8]; String phone = fields[9]; Util.debug("Populating CUSTOMER with following values: "); Util.debug(fields[0]); Util.debug(fields[1]); Util.debug(fields[2]); Util.debug(fields[3]); Util.debug(fields[4]); Util.debug(fields[5]); Util.debug(fields[6]); Util.debug(fields[7]); Util.debug(fields[8]); Util.debug(fields[9]); login.createCustomer(customerID, password, firstName, lastName, addr1, addr2, addrCity, addrState, addrZip, phone); } } catch (Exception e) { Util.debug("Unable to populate CUSTOMER table with text data: " + e); } /** * Populate ORDER table with text */ Util.debug("Populating ORDER table with default values..."); try { String[] values = Util.getProperties("order"); Util.debug("Found ORDER properties: " + values[0]); if (values[0]!=null && values.length > 0){ for (int index = 0; index < values.length; index++) { String[] fields = Util.readTokens(values[index], "|"); if (fields != null && fields.length >= 21) { String customerID = fields[0]; String billName = fields[1]; String billAddr1 = fields[2]; String billAddr2 = fields[3]; String billCity = fields[4]; String billState = fields[5]; String billZip = fields[6]; String billPhone = fields[7]; String shipName = fields[8]; String shipAddr1 = fields[9]; String shipAddr2 = fields[10]; String shipCity = fields[11]; String shipState = fields[12]; String shipZip = fields[13]; String shipPhone = fields[14]; int shippingMethod = Integer.parseInt(fields[15]); String creditCard = fields[16]; String ccNum = fields[17]; String ccExpireMonth = fields[18]; String ccExpireYear = fields[19]; String cardHolder = fields[20]; Vector<Inventory> items = new Vector<Inventory>(); Util.debug("Populating ORDER with following values: "); Util.debug(fields[0]); Util.debug(fields[1]); Util.debug(fields[2]); Util.debug(fields[3]); Util.debug(fields[4]); Util.debug(fields[5]); Util.debug(fields[6]); Util.debug(fields[7]); Util.debug(fields[8]); Util.debug(fields[9]); Util.debug(fields[10]); Util.debug(fields[11]); Util.debug(fields[12]); Util.debug(fields[13]); Util.debug(fields[14]); Util.debug(fields[15]); Util.debug(fields[16]); Util.debug(fields[17]); Util.debug(fields[18]); Util.debug(fields[19]); Util.debug(fields[20]); cart.createOrder(customerID, billName, billAddr1, billAddr2, billCity, billState, billZip, billPhone, shipName, shipAddr1, shipAddr2, shipCity, shipState, shipZip, shipPhone, creditCard, ccNum, ccExpireMonth, ccExpireYear, cardHolder, shippingMethod, items); } else { Util.debug("Property does not contain enough fields: "+values[index]); Util.debug("Fields found were: "+fields); } } } //stmt.executeUpdate(" INSERT INTO ORDERITEM(INVENTORYID, NAME, PKGINFO, PRICE, COST, CATEGORY, QUANTITY, SELLDATE, ORDER_ORDERID) VALUES ('A0001', 'Bulb Digger', 'Assembled', 12.0, 5.0, 3, 900, '01054835419625', '1')"); } catch (Exception e) { Util.debug("Unable to populate ORDERITEM table with text data: " + e); e.printStackTrace(); } /** * Populate BACKORDER table with text */ Util.debug("Populating BACKORDER table with default values..."); try { String[] values = Util.getProperties("backorder"); Util.debug("Found BACKORDER properties: " + values[0]); // Inserting backorders for (int index = 0; index < values.length; index++) { String[] fields = Util.readTokens(values[index], "|"); String inventoryID = fields[0]; int amountToOrder = new Integer(fields[1]).intValue(); int maximumItems = new Integer(fields[2]).intValue(); Util.debug("Populating BACKORDER with following values: "); Util.debug(inventoryID); Util.debug("amountToOrder -> " + amountToOrder); Util.debug("maximumItems -> " + maximumItems); backOrderStock.createBackOrder(inventoryID, amountToOrder, maximumItems); } } catch (Exception e) { Util.debug("Unable to populate BACKORDER table with text data: " + e); } /** * Populate SUPPLIER table with text */ Util.debug("Populating SUPPLIER table with default values..."); try { String[] values = Util.getProperties("supplier"); Util.debug("Found SUPPLIER properties: " + values[0]); // Inserting Suppliers for (int index = 0; index < values.length; index++) { String[] fields = Util.readTokens(values[index], "|"); String supplierID = fields[0]; String name = fields[1]; String address = fields[2]; String city = fields[3]; String state = fields[4]; String zip = fields[5]; String phone = fields[6]; String url = fields[7]; Util.debug("Populating SUPPLIER with following values: "); Util.debug(fields[0]); Util.debug(fields[1]); Util.debug(fields[2]); Util.debug(fields[3]); Util.debug(fields[4]); Util.debug(fields[5]); Util.debug(fields[6]); Util.debug(fields[7]); suppliers.createSupplier(supplierID, name, address, city, state, zip, phone, url); } } catch (Exception e) { Util.debug("Unable to populate SUPPLIER table with text data: " + e); } } }
apache-2.0
McLeodMoores/starling
projects/analytics/src/test/java/com/opengamma/analytics/financial/provider/description/interestrate/MulticurveProviderDiscountTest.java
3006
/** * Copyright (C) 2013 - present by OpenGamma Inc. and the OpenGamma group of companies * * Please see distribution for license. */ package com.opengamma.analytics.financial.provider.description.interestrate; import static com.opengamma.analytics.financial.instrument.TestInstrumentDefinitionsAndDerivatives.IBOR_INDEX_1; import static com.opengamma.analytics.financial.instrument.TestInstrumentDefinitionsAndDerivatives.INDEX_ON; import java.util.List; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.opengamma.analytics.financial.model.interestrate.curve.YieldAndDiscountCurve; import com.opengamma.util.money.Currency; import com.opengamma.util.test.TestGroup; @Test(groups = TestGroup.UNIT) public class MulticurveProviderDiscountTest { private MulticurveProviderDiscount _provider; @BeforeMethod public void setup() { _provider = new MulticurveProviderDiscount(); } @Test(expectedExceptions = IllegalArgumentException.class) public void testAddingDifferentDiscountingCurveWithSameCurrencyFails() { _provider.setCurve(Currency.USD, mockCurve("test")); _provider.setCurve(Currency.USD, mockCurve("test2")); } @Test public void testAddingSameDiscountingCurveWithSameCurrencySucceeds() { final YieldAndDiscountCurve test = mockCurve("test"); _provider.setCurve(Currency.USD, test); _provider.setCurve(Currency.USD, test); } @Test(expectedExceptions = IllegalArgumentException.class) public void testAddingDifferentForwardIborCurveWithSameIndexFails() { _provider.setCurve(IBOR_INDEX_1, mockCurve("test")); _provider.setCurve(IBOR_INDEX_1, mockCurve("test2")); } @Test public void testAddingSameForwardIborCurveWithSameIndexSucceeds() { final YieldAndDiscountCurve test = mockCurve("test"); _provider.setCurve(IBOR_INDEX_1, test); _provider.setCurve(IBOR_INDEX_1, test); } @Test(expectedExceptions = IllegalArgumentException.class) public void testAddingDifferentForwardONCurveWithSameIndexFails() { _provider.setCurve(INDEX_ON, mockCurve("test")); _provider.setCurve(INDEX_ON, mockCurve("test2")); } @Test public void testAddingSameForwardONCurveWithSameIndexSucceeds() { final YieldAndDiscountCurve test = mockCurve("test"); _provider.setCurve(INDEX_ON, test); _provider.setCurve(INDEX_ON, test); } private YieldAndDiscountCurve mockCurve(final String name) { return new YieldAndDiscountCurve(name) { @Override public double getForwardRate(final double t) { return 0; } @Override public double[] getInterestRateParameterSensitivity(final double time) { return new double[0]; } @Override public int getNumberOfParameters() { return 0; } @Override public List<String> getUnderlyingCurvesNames() { return null; } @Override public double getInterestRate(final Double x) { return 0; } }; } }
apache-2.0
rsudev/c-geo-opensource
main/src/cgeo/geocaching/SearchActivity.java
15961
package cgeo.geocaching; import cgeo.geocaching.activity.AbstractActionBarActivity; import cgeo.geocaching.address.AddressListActivity; import cgeo.geocaching.connector.ConnectorFactory; import cgeo.geocaching.connector.IConnector; import cgeo.geocaching.connector.capability.ISearchByGeocode; import cgeo.geocaching.connector.trackable.TrackableBrand; import cgeo.geocaching.connector.trackable.TrackableTrackingCode; import cgeo.geocaching.databinding.SearchActivityBinding; import cgeo.geocaching.filters.core.GeocacheFilterContext; import cgeo.geocaching.filters.gui.GeocacheFilterActivity; import cgeo.geocaching.location.Geopoint; import cgeo.geocaching.location.GeopointFormatter; import cgeo.geocaching.search.AutoCompleteAdapter; import cgeo.geocaching.sensors.Sensors; import cgeo.geocaching.settings.Settings; import cgeo.geocaching.storage.DataStore; import cgeo.geocaching.ui.TextParam; import cgeo.geocaching.ui.dialog.CoordinatesInputDialog; import cgeo.geocaching.ui.dialog.SimpleDialog; import cgeo.geocaching.utils.ClipboardUtils; import cgeo.geocaching.utils.EditUtils; import cgeo.geocaching.utils.functions.Func1; import android.app.Activity; import android.app.SearchManager; import android.content.Intent; import android.content.res.Configuration; import android.os.Bundle; import android.text.Editable; import android.text.InputFilter; import android.text.TextWatcher; import android.view.Menu; import android.view.MenuItem; import android.widget.AutoCompleteTextView; import android.widget.Button; import androidx.annotation.NonNull; import androidx.annotation.Nullable; import java.util.Locale; import org.apache.commons.lang3.StringUtils; public class SearchActivity extends AbstractActionBarActivity implements CoordinatesInputDialog.CoordinateUpdate { private SearchActivityBinding binding; private static final String GOOGLE_NOW_SEARCH_ACTION = "com.google.android.gms.actions.SEARCH_ACTION"; @Override public final void onCreate(final Bundle savedInstanceState) { super.onCreate(savedInstanceState); final Intent intent = getIntent(); // search suggestion for a cache if (Intents.ACTION_GEOCACHE.equals(intent.getAction())) { CacheDetailActivity.startActivity(this, intent.getStringExtra(SearchManager.QUERY)); finish(); return; } // search suggestion for a trackable if (Intents.ACTION_TRACKABLE.equals(intent.getAction())) { TrackableActivity.startActivity(this, null, intent.getStringExtra(SearchManager.QUERY), null, null, TrackableBrand.UNKNOWN.getId()); finish(); return; } // search query, from search toolbar or from google now if (Intent.ACTION_SEARCH.equals(intent.getAction()) || GOOGLE_NOW_SEARCH_ACTION.equals(intent.getAction())) { hideKeyboard(); final String query = intent.getStringExtra(SearchManager.QUERY); final boolean keywordSearch = intent.getBooleanExtra(Intents.EXTRA_KEYWORD_SEARCH, true); if (instantSearch(query, keywordSearch)) { setResult(RESULT_OK); } else { // send intent back so query string is known setResult(RESULT_CANCELED, intent); } finish(); return; } setTheme(); binding = SearchActivityBinding.inflate(getLayoutInflater()); setContentView(binding.getRoot()); // set title in code, as the activity needs a hard coded title due to the intent filters setTitle(res.getString(R.string.search)); init(); } @Override public final void onConfigurationChanged(@NonNull final Configuration newConfig) { super.onConfigurationChanged(newConfig); init(); } @Override public final void onResume() { super.onResume(); init(); } /** * Performs a search for query either as geocode, trackable code or keyword * * @param nonTrimmedQuery * String to search for * @param keywordSearch * Set to true if keyword search should be performed if query isn't GC or TB * @return true if a search was performed, else false */ private boolean instantSearch(final String nonTrimmedQuery, final boolean keywordSearch) { final String query = StringUtils.trim(nonTrimmedQuery); // first check if this was a scanned URL String geocode = ConnectorFactory.getGeocodeFromURL(query); // otherwise see if this is a pure geocode if (StringUtils.isEmpty(geocode)) { geocode = StringUtils.upperCase(StringUtils.trim(query)); } final IConnector connector = ConnectorFactory.getConnector(geocode); if (connector instanceof ISearchByGeocode && geocode != null) { CacheDetailActivity.startActivity(this, geocode.toUpperCase(Locale.US)); return true; } // Check if the query is a TB code TrackableBrand trackableBrand = ConnectorFactory.getTrackableConnector(geocode).getBrand(); // check if the query contains a TB code if (trackableBrand == TrackableBrand.UNKNOWN) { final String tbCode = ConnectorFactory.getTrackableFromURL(query); if (StringUtils.isNotBlank(tbCode)) { trackableBrand = ConnectorFactory.getTrackableConnector(tbCode).getBrand(); geocode = tbCode; } } // check if the query contains a TB tracking code if (trackableBrand == TrackableBrand.UNKNOWN) { final TrackableTrackingCode tbTrackingCode = ConnectorFactory.getTrackableTrackingCodeFromURL(query); if (!tbTrackingCode.isEmpty()) { trackableBrand = tbTrackingCode.brand; geocode = tbTrackingCode.trackingCode; } } if (trackableBrand != TrackableBrand.UNKNOWN && geocode != null) { final Intent trackablesIntent = new Intent(this, TrackableActivity.class); trackablesIntent.putExtra(Intents.EXTRA_GEOCODE, geocode.toUpperCase(Locale.US)); trackablesIntent.putExtra(Intents.EXTRA_BRAND, trackableBrand.getId()); if (keywordSearch) { // keyword fallback, if desired by caller trackablesIntent.putExtra(Intents.EXTRA_KEYWORD, query.trim()); } startActivity(trackablesIntent); return true; } if (keywordSearch) { // keyword fallback, if desired by caller CacheListActivity.startActivityKeyword(this, query.trim()); return true; } return false; } private void init() { binding.buttonLatitude.setOnClickListener(v -> updateCoordinates()); binding.buttonLongitude.setOnClickListener(v -> updateCoordinates()); binding.searchCoordinates.setOnClickListener(arg0 -> findByCoordsFn()); setSearchAction(binding.address, binding.searchAddress, this::findByAddressFn, null); setSearchAction(binding.geocode, binding.displayGeocode, this::findByGeocodeFn, DataStore::getSuggestionsGeocode); setSearchAction(binding.keyword, binding.searchKeyword, this::findByKeywordFn, DataStore::getSuggestionsKeyword); setSearchAction(binding.owner, binding.searchOwner, this::findByOwnerFn, DataStore::getSuggestionsOwnerName); setSearchAction(null, binding.searchFilter, this::findByFilterFn, null); setSearchAction(binding.trackable, binding.displayTrackable, this::findTrackableFn, DataStore::getSuggestionsTrackableCode); binding.geocode.setFilters(new InputFilter[] { new InputFilter.AllCaps() }); binding.trackable.setFilters(new InputFilter[] { new InputFilter.AllCaps() }); binding.searchFilterInfo.setOnClickListener(v -> SimpleDialog.of(this).setMessage(TextParam.id(R.string.search_filter_info_message).setMarkdown(true)).show()); handlePotentialClipboardGeocode(); } /** * Detect geocodes in clipboard * * Needs to run async as clipboard access is blocked if activity is not yet created. */ private void handlePotentialClipboardGeocode() { binding.geocodeInputLayout.postDelayed(() -> { final String potentialGeocode = ClipboardUtils.getText(); if (ConnectorFactory.getConnector(potentialGeocode) instanceof ISearchByGeocode) { binding.geocode.setText(potentialGeocode); binding.geocodeInputLayout.setHelperText(getString(R.string.search_geocode_from_clipboard)); // clear hint if text input get changed binding.geocode.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(final CharSequence s, final int start, final int count, final int after) { // nothing } @Override public void onTextChanged(final CharSequence s, final int start, final int before, final int count) { // nothing } @Override public void afterTextChanged(final Editable s) { binding.geocodeInputLayout.setHelperText(null); binding.geocode.removeTextChangedListener(this); } }); } }, 500); } private static void setSearchAction(final AutoCompleteTextView editText, final Button button, @NonNull final Runnable runnable, @Nullable final Func1<String, String[]> suggestionFunction) { if (editText != null) { EditUtils.setActionListener(editText, runnable); } button.setOnClickListener(arg0 -> runnable.run()); if (suggestionFunction != null) { editText.setAdapter(new AutoCompleteAdapter(editText.getContext(), android.R.layout.simple_dropdown_item_1line, suggestionFunction)); } } private void updateCoordinates() { final CoordinatesInputDialog coordsDialog = CoordinatesInputDialog.getInstance(null, null); coordsDialog.setCancelable(true); coordsDialog.show(getSupportFragmentManager(), "wpedit_dialog"); } @Override public void updateCoordinates(final Geopoint gp) { binding.buttonLatitude.setText(gp.format(GeopointFormatter.Format.LAT_DECMINUTE)); binding.buttonLongitude.setText(gp.format(GeopointFormatter.Format.LON_DECMINUTE)); } @Override public boolean supportsNullCoordinates() { return false; } private void findByCoordsFn() { String[] latlonText = getCoordText(); if (StringUtils.isEmpty(latlonText[0]) || StringUtils.isEmpty(latlonText[1])) { final Geopoint gp = Sensors.getInstance().currentGeo().getCoords(); updateCoordinates(gp); latlonText = getCoordText(); } try { CacheListActivity.startActivityCoordinates(this, new Geopoint(latlonText[0], latlonText[1]), null); } catch (final Geopoint.ParseException e) { showToast(res.getString(e.resource)); } } private String[] getCoordText() { return new String[] { StringUtils.trim(binding.buttonLatitude.getText().toString()), StringUtils.trim(binding.buttonLongitude.getText().toString()) }; } private void findByKeywordFn() { // find caches by coordinates final String keyText = StringUtils.trim(binding.keyword.getText().toString()); if (StringUtils.isBlank(keyText)) { SimpleDialog.of(this).setTitle(R.string.warn_search_help_title).setMessage(R.string.warn_search_help_keyword).show(); return; } CacheListActivity.startActivityKeyword(this, keyText); } private void findByAddressFn() { final String addText = StringUtils.trim(binding.address.getText().toString()); if (StringUtils.isBlank(addText)) { SimpleDialog.of(this).setTitle(R.string.warn_search_help_title).setMessage(R.string.warn_search_help_address).show(); return; } final Intent addressesIntent = new Intent(this, AddressListActivity.class); addressesIntent.putExtra(Intents.EXTRA_KEYWORD, addText); startActivity(addressesIntent); } private void findByOwnerFn() { findByOwnerFn(binding.owner.getText().toString()); } private void findByOwnerFn(final String userName) { final String usernameText = StringUtils.trimToEmpty(userName); if (StringUtils.isBlank(usernameText)) { SimpleDialog.of(this).setTitle(R.string.warn_search_help_title).setMessage(R.string.warn_search_help_user).show(); return; } CacheListActivity.startActivityOwner(this, usernameText); } private void findByFilterFn() { GeocacheFilterActivity.selectFilter(this, new GeocacheFilterContext(GeocacheFilterContext.FilterType.LIVE), null, false); } @Override protected void onActivityResult(final int requestCode, final int resultCode, @Nullable final Intent data) { if (requestCode == GeocacheFilterActivity.REQUEST_SELECT_FILTER && resultCode == Activity.RESULT_OK) { CacheListActivity.startActivityFilter(this); } else { super.onActivityResult(requestCode, resultCode, data); } } private void findByGeocodeFn() { final String geocodeText = StringUtils.trimToEmpty(binding.geocode.getText().toString()); if (StringUtils.isBlank(geocodeText) || geocodeText.equalsIgnoreCase("GC")) { SimpleDialog.of(this).setTitle(R.string.warn_search_help_title).setMessage(R.string.warn_search_help_gccode).show(); return; } if (ConnectorFactory.anyConnectorActive()) { CacheDetailActivity.startActivity(this, geocodeText.toUpperCase(Locale.US)); } else { showToast(getString(R.string.warn_no_connector)); } } private void findTrackableFn() { final String trackableText = StringUtils.trimToEmpty(binding.trackable.getText().toString()); if (StringUtils.isBlank(trackableText) || trackableText.equalsIgnoreCase("TB")) { SimpleDialog.of(this).setTitle(R.string.warn_search_help_title).setMessage(R.string.warn_search_help_tb).show(); return; } // check temporaribly disabled due to #7617 // if (ConnectorFactory.anyTrackableConnectorActive()) { final Intent trackablesIntent = new Intent(this, TrackableActivity.class); trackablesIntent.putExtra(Intents.EXTRA_GEOCODE, trackableText.toUpperCase(Locale.US)); startActivity(trackablesIntent); /* } else { showToast(getString(R.string.warn_no_connector)); } */ } @Override public final boolean onCreateOptionsMenu(final Menu menu) { getMenuInflater().inflate(R.menu.search_activity_options, menu); return true; } @Override public final boolean onOptionsItemSelected(final MenuItem item) { if (item.getItemId() == R.id.menu_search_own_caches) { findByOwnerFn(Settings.getUserName()); return true; } return super.onOptionsItemSelected(item); } public static void startActivityScan(final String scan, final Activity fromActivity) { final Intent searchIntent = new Intent(fromActivity, SearchActivity.class); searchIntent.setAction(Intent.ACTION_SEARCH). putExtra(SearchManager.QUERY, scan). putExtra(Intents.EXTRA_KEYWORD_SEARCH, false); fromActivity.startActivityForResult(searchIntent, Intents.SEARCH_REQUEST_CODE); } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-waf/src/main/java/com/amazonaws/services/waf/model/waf/transform/UpdateRuleRequestMarshaller.java
2597
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.waf.model.waf.transform; import java.util.List; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.waf.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * UpdateRuleRequestMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class UpdateRuleRequestMarshaller { private static final MarshallingInfo<String> RULEID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("RuleId").build(); private static final MarshallingInfo<String> CHANGETOKEN_BINDING = MarshallingInfo.builder(MarshallingType.STRING) .marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ChangeToken").build(); private static final MarshallingInfo<List> UPDATES_BINDING = MarshallingInfo.builder(MarshallingType.LIST).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Updates").build(); private static final UpdateRuleRequestMarshaller instance = new UpdateRuleRequestMarshaller(); public static UpdateRuleRequestMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(UpdateRuleRequest updateRuleRequest, ProtocolMarshaller protocolMarshaller) { if (updateRuleRequest == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(updateRuleRequest.getRuleId(), RULEID_BINDING); protocolMarshaller.marshall(updateRuleRequest.getChangeToken(), CHANGETOKEN_BINDING); protocolMarshaller.marshall(updateRuleRequest.getUpdates(), UPDATES_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
apereo/cas
support/cas-server-support-saml-idp/src/test/java/org/apereo/cas/support/saml/services/MetadataEntityAttributesAttributeReleasePolicyTests.java
3076
package org.apereo.cas.support.saml.services; import org.apereo.cas.authentication.CoreAuthenticationTestUtils; import org.apereo.cas.services.RegisteredServiceAttributeReleasePolicyContext; import org.apereo.cas.support.saml.BaseSamlIdPConfigurationTests; import org.apereo.cas.support.saml.SamlIdPTestUtils; import org.apereo.cas.util.CollectionUtils; import org.apereo.cas.util.serialization.JacksonObjectMapperFactory; import com.fasterxml.jackson.databind.ObjectMapper; import lombok.val; import org.apache.commons.io.FileUtils; import org.junit.jupiter.api.Tag; import org.junit.jupiter.api.Test; import org.springframework.test.context.TestPropertySource; import java.io.File; import java.io.IOException; import java.util.UUID; import static org.junit.jupiter.api.Assertions.*; /** * This is {@link MetadataEntityAttributesAttributeReleasePolicyTests}. * * @author Misagh Moayyed * @since 6.5.0 */ @Tag("SAML2") @TestPropertySource(properties = { "cas.authn.saml-idp.core.entity-id=https://cas.example.org/idp", "cas.authn.saml-idp.metadata.file-system.location=${#systemProperties['java.io.tmpdir']}/idp-metadata2" }) public class MetadataEntityAttributesAttributeReleasePolicyTests extends BaseSamlIdPConfigurationTests { private static final File JSON_FILE = new File(FileUtils.getTempDirectoryPath(), "MetadataEntityAttributesAttributeReleasePolicyTests.json"); private static final ObjectMapper MAPPER = JacksonObjectMapperFactory.builder() .defaultTypingEnabled(true).build().toObjectMapper(); @Test public void verifySerializationToJson() throws IOException { val filter = new MetadataEntityAttributesAttributeReleasePolicy(); filter.setEntityAttribute("entity-attribute"); filter.setEntityAttributeFormat("entity-format"); filter.setEntityAttributeValues(CollectionUtils.wrapSet("one", "two")); MAPPER.writeValue(JSON_FILE, filter); val strategyRead = MAPPER.readValue(JSON_FILE, MetadataEntityAttributesAttributeReleasePolicy.class); assertEquals(filter, strategyRead); assertNotNull(strategyRead.toString()); } @Test public void verifyPredicateFails() { val filter = new MetadataEntityAttributesAttributeReleasePolicy(); filter.setEntityAttribute("entity-attribute"); filter.setEntityAttributeFormat("entity-format"); filter.setEntityAttributeValues(CollectionUtils.wrapSet("one", "two")); val registeredService = SamlIdPTestUtils.getSamlRegisteredService(); registeredService.setAttributeReleasePolicy(filter); val context = RegisteredServiceAttributeReleasePolicyContext.builder() .registeredService(registeredService) .service(CoreAuthenticationTestUtils.getService()) .principal(CoreAuthenticationTestUtils.getPrincipal("casuser", CollectionUtils.wrap("givenName", UUID.randomUUID().toString()))) .build(); val attributes = filter.getAttributes(context); assertTrue(attributes.isEmpty()); } }
apache-2.0
prasanthj/hyperloglog
benchmarks/src/main/java/com/github/prasanthj/hyperloglog/HyperLogLogAdd.java
3826
/* * Copyright 2014 Prasanth Jayachandran * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.prasanthj.hyperloglog; import java.util.ArrayList; import java.util.List; import java.util.Random; import java.util.concurrent.TimeUnit; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Fork; import org.openjdk.jmh.annotations.Measurement; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OperationsPerInvocation; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.annotations.Warmup; import org.openjdk.jmh.infra.Blackhole; import org.openjdk.jmh.profile.LinuxPerfAsmProfiler; import org.openjdk.jmh.profile.LinuxPerfNormProfiler; import org.openjdk.jmh.profile.LinuxPerfProfiler; import org.openjdk.jmh.runner.Runner; import org.openjdk.jmh.runner.RunnerException; import org.openjdk.jmh.runner.options.Options; import org.openjdk.jmh.runner.options.OptionsBuilder; import com.github.prasanthj.hll.HyperLogLog; @State(Scope.Benchmark) @Warmup(iterations = 10, time = 1) @Measurement(iterations = 10, time = 1) @Fork(1) @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.NANOSECONDS) public class HyperLogLogAdd { private static List<Long> hashcodes; static { hashcodes = new ArrayList<>(); Random random = new Random(123); for (int i = 0; i < 100; i++) { hashcodes.add(random.nextLong()); } } @Benchmark @OperationsPerInvocation(100) public void testHLLAdd(Blackhole blackhole) { final HyperLogLog hll = HyperLogLog .builder() .setNumRegisterIndexBits(10) .setEncoding(HyperLogLog.EncodingType.SPARSE) .build(); for (long hashcode : hashcodes) { hll.add(hashcode); } blackhole.consume(hll); } @Benchmark @OperationsPerInvocation(100) public void testHLLAddHive(Blackhole blackhole) { final org.apache.hadoop.hive.common.ndv.hll.HyperLogLog hiveHll = org.apache.hadoop.hive.common.ndv.hll .HyperLogLog .builder() .setNumRegisterIndexBits(10) .setEncoding(org.apache.hadoop.hive.common.ndv.hll.HyperLogLog.EncodingType.SPARSE) .build(); for (long hashcode : hashcodes) { hiveHll.add(hashcode); } blackhole.consume(hiveHll); } /* * ============================== HOW TO RUN THIS TEST: ==================================== * * You can run this test: * * a) Via the command line: * $ mvn clean install * $ java -jar target/benchmarks.jar HyperLogLogAdd -prof perf -f 1 (Linux) * $ java -jar target/benchmarks.jar HyperLogLogAdd -prof perfnorm -f 3 (Linux) * $ java -jar target/benchmarks.jar HyperLogLogAdd -prof perfasm -f 1 (Linux) * $ java -jar target/benchmarks.jar HyperLogLogAdd -prof perf -jvmArgsAppend "-XX:AllocatePrefetchStyle=2" */ public static void main(String[] args) throws RunnerException { Options opt = new OptionsBuilder() .include(HyperLogLogAdd.class.getSimpleName()) .addProfiler(LinuxPerfProfiler.class) .addProfiler(LinuxPerfNormProfiler.class) .addProfiler(LinuxPerfAsmProfiler.class) .build(); new Runner(opt).run(); } }
apache-2.0
EvilMcJerkface/crate
benchmarks/src/test/java/io/crate/execution/engine/collect/collectors/LuceneBatchIteratorBenchmark.java
3593
/* * Licensed to Crate under one or more contributor license agreements. * See the NOTICE file distributed with this work for additional * information regarding copyright ownership. Crate licenses this file * to you under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may * obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. * * However, if you have executed another commercial license agreement * with Crate these terms will supersede the license and you may use the * software solely pursuant to the terms of the relevant commercial * agreement. */ package io.crate.execution.engine.collect.collectors; import io.crate.expression.reference.doc.lucene.CollectorContext; import io.crate.expression.reference.doc.lucene.IntegerColumnReference; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.NumericDocValuesField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.ByteBuffersDirectory; import org.openjdk.jmh.annotations.Benchmark; import org.openjdk.jmh.annotations.BenchmarkMode; import org.openjdk.jmh.annotations.Mode; import org.openjdk.jmh.annotations.OutputTimeUnit; import org.openjdk.jmh.annotations.Scope; import org.openjdk.jmh.annotations.Setup; import org.openjdk.jmh.annotations.State; import org.openjdk.jmh.infra.Blackhole; import java.util.Collections; import java.util.List; import java.util.concurrent.TimeUnit; @BenchmarkMode(Mode.AverageTime) @OutputTimeUnit(TimeUnit.MILLISECONDS) @State(Scope.Benchmark) public class LuceneBatchIteratorBenchmark { private CollectorContext collectorContext; private IndexSearcher indexSearcher; private List<IntegerColumnReference> columnRefs; @Setup public void createLuceneBatchIterator() throws Exception { IndexWriter iw = new IndexWriter(new ByteBuffersDirectory(), new IndexWriterConfig(new StandardAnalyzer())); String columnName = "x"; for (int i = 0; i < 10_000_000; i++) { Document doc = new Document(); doc.add(new NumericDocValuesField(columnName, i)); iw.addDocument(doc); } iw.commit(); iw.forceMerge(1, true); indexSearcher = new IndexSearcher(DirectoryReader.open(iw)); IntegerColumnReference columnReference = new IntegerColumnReference(columnName); columnRefs = Collections.singletonList(columnReference); collectorContext = new CollectorContext(); } @Benchmark public void measureConsumeLuceneBatchIterator(Blackhole blackhole) throws Exception { LuceneBatchIterator it = new LuceneBatchIterator( indexSearcher, new MatchAllDocsQuery(), null, false, collectorContext, columnRefs, columnRefs ); while (it.moveNext()) { blackhole.consume(it.currentElement().get(0)); } } }
apache-2.0
darlyhellen/oto
shtr/src/main/java/cn/com/bean/DetailsGoodsShow.java
769
package cn.com.bean; public class DetailsGoodsShow { private Integer id; private String name; private String url; private Integer commodityid; public Integer getId() { return id; } public void setId(Integer id) { this.id = id; } public String getName() { return name; } public void setName(String name) { this.name = name == null ? null : name.trim(); } public String getUrl() { return url; } public void setUrl(String url) { this.url = url == null ? null : url.trim(); } public Integer getCommodityid() { return commodityid; } public void setCommodityid(Integer commodityid) { this.commodityid = commodityid; } }
apache-2.0
Netflix/astyanax
astyanax-cassandra/src/main/java/com/netflix/astyanax/util/ColumnarRecordWriter.java
4387
/******************************************************************************* * Copyright 2011 Netflix * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. ******************************************************************************/ package com.netflix.astyanax.util; import java.nio.ByteBuffer; import java.util.Iterator; import java.util.List; import com.netflix.astyanax.shaded.org.apache.cassandra.utils.Pair; import com.netflix.astyanax.ColumnListMutation; import com.netflix.astyanax.Keyspace; import com.netflix.astyanax.MutationBatch; import com.netflix.astyanax.SerializerPackage; import com.netflix.astyanax.connectionpool.exceptions.ConnectionException; import com.netflix.astyanax.model.ColumnFamily; import com.netflix.astyanax.serializers.ByteBufferSerializer; import com.netflix.astyanax.serializers.SerializerPackageImpl; import com.netflix.astyanax.serializers.UnknownComparatorException; /** * Writer rows where the first pair is the key and subsequent pairs are columns. * * @author elandau * */ public class ColumnarRecordWriter implements RecordWriter { private Keyspace keyspace; private SerializerPackage serializers; private ColumnFamily<ByteBuffer, ByteBuffer> cf; private int batchSize = 1; private MutationBatch mutation; public ColumnarRecordWriter(Keyspace keyspace, String cfName) { this.keyspace = keyspace; this.cf = new ColumnFamily<ByteBuffer, ByteBuffer>(cfName, ByteBufferSerializer.get(), ByteBufferSerializer.get()); try { this.serializers = keyspace.getSerializerPackage(cfName, true); } catch (ConnectionException e) { this.serializers = SerializerPackageImpl.DEFAULT_SERIALIZER_PACKAGE; } catch (UnknownComparatorException e) { // We should never get this } } public ColumnarRecordWriter(Keyspace keyspace, String cfName, SerializerPackage serializers) { this.keyspace = keyspace; this.serializers = serializers; this.cf = new ColumnFamily<ByteBuffer, ByteBuffer>(cfName, ByteBufferSerializer.get(), ByteBufferSerializer.get()); } public ColumnarRecordWriter setBatchSize(int size) { this.batchSize = size; return this; } @Override public void start() throws ConnectionException { this.mutation = keyspace.prepareMutationBatch(); } @Override public void write(List<Pair<String, String>> record) { if (record.size() <= 1) return; // Key is first field Iterator<Pair<String, String>> iter = record.iterator(); ByteBuffer rowKey = this.serializers.keyAsByteBuffer(iter.next().right); // Build row mutation for all columns ColumnListMutation<ByteBuffer> rowMutation = mutation.withRow(cf, rowKey); while (iter.hasNext()) { Pair<String, String> pair = iter.next(); try { rowMutation.putColumn( this.serializers.columnAsByteBuffer(pair.left), this.serializers.valueAsByteBuffer(pair.left, pair.right), null); } catch (Exception e) { throw new RuntimeException(e); } } // Execute a mutation if (batchSize == mutation.getRowCount()) { try { mutation.execute(); } catch (ConnectionException e) { mutation.discardMutations(); throw new RuntimeException(e); } } } @Override public void shutdown() { if (mutation.getRowCount() > 0) { try { mutation.execute(); } catch (ConnectionException e) { mutation.discardMutations(); } } } }
apache-2.0
rhauch/debezium-proto
debezium/src/test/java/org/debezium/assertions/BatchAssert.java
950
/* * Copyright 2014 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package org.debezium.assertions; import org.debezium.message.Batch; import org.debezium.model.Identifier; import org.fest.assertions.GenericAssert; import org.fest.assertions.IntAssert; import static org.fest.assertions.Assertions.assertThat; /** * A specialization of {@link GenericAssert} for Fest utilities. * * @author Randall Hauch */ public class BatchAssert extends GenericAssert<BatchAssert, Batch<? extends Identifier>> { /** * Creates a new {@link BatchAssert}. * * @param actual the target to verify. */ public BatchAssert(Batch<? extends Identifier> actual) { super(BatchAssert.class, actual); } public IntAssert hasSize() { isNotNull(); return assertThat(actual.patchCount()); } }
apache-2.0
vpavic/spring-session
spring-session-jdbc/src/main/java/org/springframework/session/jdbc/JdbcIndexedSessionRepository.java
31700
/* * Copyright 2014-2020 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.session.jdbc; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.time.Duration; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.function.Supplier; import java.util.stream.Collectors; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.core.convert.ConversionService; import org.springframework.core.convert.TypeDescriptor; import org.springframework.core.convert.support.GenericConversionService; import org.springframework.core.serializer.support.DeserializingConverter; import org.springframework.core.serializer.support.SerializingConverter; import org.springframework.dao.DataAccessException; import org.springframework.dao.DataIntegrityViolationException; import org.springframework.dao.DuplicateKeyException; import org.springframework.jdbc.core.BatchPreparedStatementSetter; import org.springframework.jdbc.core.JdbcOperations; import org.springframework.jdbc.core.ResultSetExtractor; import org.springframework.jdbc.support.lob.DefaultLobHandler; import org.springframework.jdbc.support.lob.LobCreator; import org.springframework.jdbc.support.lob.LobHandler; import org.springframework.session.DelegatingIndexResolver; import org.springframework.session.FindByIndexNameSessionRepository; import org.springframework.session.FlushMode; import org.springframework.session.IndexResolver; import org.springframework.session.MapSession; import org.springframework.session.PrincipalNameIndexResolver; import org.springframework.session.SaveMode; import org.springframework.session.Session; import org.springframework.transaction.support.TransactionOperations; import org.springframework.util.Assert; import org.springframework.util.StringUtils; /** * A {@link org.springframework.session.SessionRepository} implementation that uses * Spring's {@link JdbcOperations} to store sessions in a relational database. This * implementation does not support publishing of session events. * <p> * An example of how to create a new instance can be seen below: * * <pre class="code"> * JdbcTemplate jdbcTemplate = new JdbcTemplate(); * * // ... configure jdbcTemplate ... * * TransactionTemplate transactionTemplate = new TransactionTemplate(); * * // ... configure transactionTemplate ... * * JdbcIndexedSessionRepository sessionRepository = * new JdbcIndexedSessionRepository(jdbcTemplate, transactionTemplate); * </pre> * * For additional information on how to create and configure {@code JdbcTemplate} and * {@code TransactionTemplate}, refer to the <a href= * "https://docs.spring.io/spring/docs/current/spring-framework-reference/html/spring-data-tier.html"> * Spring Framework Reference Documentation</a>. * <p> * By default, this implementation uses <code>SPRING_SESSION</code> and * <code>SPRING_SESSION_ATTRIBUTES</code> tables to store sessions. Note that the table * name can be customized using the {@link #setTableName(String)} method. In that case the * table used to store attributes will be named using the provided table name, suffixed * with <code>_ATTRIBUTES</code>. * * Depending on your database, the table definition can be described as below: * * <pre class="code"> * CREATE TABLE SPRING_SESSION ( * PRIMARY_ID CHAR(36) NOT NULL, * SESSION_ID CHAR(36) NOT NULL, * CREATION_TIME BIGINT NOT NULL, * LAST_ACCESS_TIME BIGINT NOT NULL, * MAX_INACTIVE_INTERVAL INT NOT NULL, * EXPIRY_TIME BIGINT NOT NULL, * PRINCIPAL_NAME VARCHAR(100), * CONSTRAINT SPRING_SESSION_PK PRIMARY KEY (PRIMARY_ID) * ); * * CREATE UNIQUE INDEX SPRING_SESSION_IX1 ON SPRING_SESSION (SESSION_ID); * CREATE INDEX SPRING_SESSION_IX2 ON SPRING_SESSION (EXPIRY_TIME); * CREATE INDEX SPRING_SESSION_IX3 ON SPRING_SESSION (PRINCIPAL_NAME); * * CREATE TABLE SPRING_SESSION_ATTRIBUTES ( * SESSION_PRIMARY_ID CHAR(36) NOT NULL, * ATTRIBUTE_NAME VARCHAR(200) NOT NULL, * ATTRIBUTE_BYTES BYTEA NOT NULL, * CONSTRAINT SPRING_SESSION_ATTRIBUTES_PK PRIMARY KEY (SESSION_PRIMARY_ID, ATTRIBUTE_NAME), * CONSTRAINT SPRING_SESSION_ATTRIBUTES_FK FOREIGN KEY (SESSION_PRIMARY_ID) REFERENCES SPRING_SESSION(PRIMARY_ID) ON DELETE CASCADE * ); * * CREATE INDEX SPRING_SESSION_ATTRIBUTES_IX1 ON SPRING_SESSION_ATTRIBUTES (SESSION_PRIMARY_ID); * </pre> * * Due to the differences between the various database vendors, especially when it comes * to storing binary data, make sure to use SQL script specific to your database. Scripts * for most major database vendors are packaged as * <code>org/springframework/session/jdbc/schema-*.sql</code>, where <code>*</code> is the * target database type. * * @author Vedran Pavic * @author Craig Andrews * @since 2.2.0 */ public class JdbcIndexedSessionRepository implements FindByIndexNameSessionRepository<JdbcIndexedSessionRepository.JdbcSession> { /** * The default name of database table used by Spring Session to store sessions. */ public static final String DEFAULT_TABLE_NAME = "SPRING_SESSION"; private static final String SPRING_SECURITY_CONTEXT = "SPRING_SECURITY_CONTEXT"; // @formatter:off private static final String CREATE_SESSION_QUERY = "" + "INSERT INTO %TABLE_NAME% (PRIMARY_ID, SESSION_ID, CREATION_TIME, LAST_ACCESS_TIME, MAX_INACTIVE_INTERVAL, EXPIRY_TIME, PRINCIPAL_NAME) " + "VALUES (?, ?, ?, ?, ?, ?, ?)"; // @formatter:on // @formatter:off private static final String CREATE_SESSION_ATTRIBUTE_QUERY = "" + "INSERT INTO %TABLE_NAME%_ATTRIBUTES (SESSION_PRIMARY_ID, ATTRIBUTE_NAME, ATTRIBUTE_BYTES) " + "VALUES (?, ?, ?)"; // @formatter:on // @formatter:off private static final String GET_SESSION_QUERY = "" + "SELECT S.PRIMARY_ID, S.SESSION_ID, S.CREATION_TIME, S.LAST_ACCESS_TIME, S.MAX_INACTIVE_INTERVAL, SA.ATTRIBUTE_NAME, SA.ATTRIBUTE_BYTES " + "FROM %TABLE_NAME% S " + "LEFT JOIN %TABLE_NAME%_ATTRIBUTES SA ON S.PRIMARY_ID = SA.SESSION_PRIMARY_ID " + "WHERE S.SESSION_ID = ?"; // @formatter:on // @formatter:off private static final String UPDATE_SESSION_QUERY = "" + "UPDATE %TABLE_NAME% " + "SET SESSION_ID = ?, LAST_ACCESS_TIME = ?, MAX_INACTIVE_INTERVAL = ?, EXPIRY_TIME = ?, PRINCIPAL_NAME = ? " + "WHERE PRIMARY_ID = ?"; // @formatter:on // @formatter:off private static final String UPDATE_SESSION_ATTRIBUTE_QUERY = "" + "UPDATE %TABLE_NAME%_ATTRIBUTES " + "SET ATTRIBUTE_BYTES = ? " + "WHERE SESSION_PRIMARY_ID = ? " + "AND ATTRIBUTE_NAME = ?"; // @formatter:on // @formatter:off private static final String DELETE_SESSION_ATTRIBUTE_QUERY = "" + "DELETE FROM %TABLE_NAME%_ATTRIBUTES " + "WHERE SESSION_PRIMARY_ID = ? " + "AND ATTRIBUTE_NAME = ?"; // @formatter:on // @formatter:off private static final String DELETE_SESSION_QUERY = "" + "DELETE FROM %TABLE_NAME% " + "WHERE SESSION_ID = ? " + "AND MAX_INACTIVE_INTERVAL >= 0"; // @formatter:on // @formatter:off private static final String LIST_SESSIONS_BY_PRINCIPAL_NAME_QUERY = "" + "SELECT S.PRIMARY_ID, S.SESSION_ID, S.CREATION_TIME, S.LAST_ACCESS_TIME, S.MAX_INACTIVE_INTERVAL, SA.ATTRIBUTE_NAME, SA.ATTRIBUTE_BYTES " + "FROM %TABLE_NAME% S " + "LEFT JOIN %TABLE_NAME%_ATTRIBUTES SA ON S.PRIMARY_ID = SA.SESSION_PRIMARY_ID " + "WHERE S.PRINCIPAL_NAME = ?"; // @formatter:on // @formatter:off private static final String DELETE_SESSIONS_BY_EXPIRY_TIME_QUERY = "" + "DELETE FROM %TABLE_NAME% " + "WHERE EXPIRY_TIME < ?"; // @formatter:on private static final Log logger = LogFactory.getLog(JdbcIndexedSessionRepository.class); private final JdbcOperations jdbcOperations; private final TransactionOperations transactionOperations; private final ResultSetExtractor<List<JdbcSession>> extractor = new SessionResultSetExtractor(); /** * The name of database table used by Spring Session to store sessions. */ private String tableName = DEFAULT_TABLE_NAME; private String createSessionQuery; private String createSessionAttributeQuery; private String getSessionQuery; private String updateSessionQuery; private String updateSessionAttributeQuery; private String deleteSessionAttributeQuery; private String deleteSessionQuery; private String listSessionsByPrincipalNameQuery; private String deleteSessionsByExpiryTimeQuery; /** * If non-null, this value is used to override the default value for * {@link JdbcSession#setMaxInactiveInterval(Duration)}. */ private Integer defaultMaxInactiveInterval; private IndexResolver<Session> indexResolver = new DelegatingIndexResolver<>(new PrincipalNameIndexResolver<>()); private ConversionService conversionService = createDefaultConversionService(); private LobHandler lobHandler = new DefaultLobHandler(); private FlushMode flushMode = FlushMode.ON_SAVE; private SaveMode saveMode = SaveMode.ON_SET_ATTRIBUTE; /** * Create a new {@link JdbcIndexedSessionRepository} instance which uses the provided * {@link JdbcOperations} and {@link TransactionOperations} to manage sessions. * @param jdbcOperations the {@link JdbcOperations} to use * @param transactionOperations the {@link TransactionOperations} to use */ public JdbcIndexedSessionRepository(JdbcOperations jdbcOperations, TransactionOperations transactionOperations) { Assert.notNull(jdbcOperations, "jdbcOperations must not be null"); Assert.notNull(transactionOperations, "transactionOperations must not be null"); this.jdbcOperations = jdbcOperations; this.transactionOperations = transactionOperations; prepareQueries(); } /** * Set the name of database table used to store sessions. * @param tableName the database table name */ public void setTableName(String tableName) { Assert.hasText(tableName, "Table name must not be empty"); this.tableName = tableName.trim(); prepareQueries(); } /** * Set the custom SQL query used to create the session. * @param createSessionQuery the SQL query string */ public void setCreateSessionQuery(String createSessionQuery) { Assert.hasText(createSessionQuery, "Query must not be empty"); this.createSessionQuery = getQuery(createSessionQuery); } /** * Set the custom SQL query used to create the session attribute. * @param createSessionAttributeQuery the SQL query string */ public void setCreateSessionAttributeQuery(String createSessionAttributeQuery) { Assert.hasText(createSessionAttributeQuery, "Query must not be empty"); this.createSessionAttributeQuery = getQuery(createSessionAttributeQuery); } /** * Set the custom SQL query used to retrieve the session. * @param getSessionQuery the SQL query string */ public void setGetSessionQuery(String getSessionQuery) { Assert.hasText(getSessionQuery, "Query must not be empty"); this.getSessionQuery = getQuery(getSessionQuery); } /** * Set the custom SQL query used to update the session. * @param updateSessionQuery the SQL query string */ public void setUpdateSessionQuery(String updateSessionQuery) { Assert.hasText(updateSessionQuery, "Query must not be empty"); this.updateSessionQuery = getQuery(updateSessionQuery); } /** * Set the custom SQL query used to update the session attribute. * @param updateSessionAttributeQuery the SQL query string */ public void setUpdateSessionAttributeQuery(String updateSessionAttributeQuery) { Assert.hasText(updateSessionAttributeQuery, "Query must not be empty"); this.updateSessionAttributeQuery = getQuery(updateSessionAttributeQuery); } /** * Set the custom SQL query used to delete the session attribute. * @param deleteSessionAttributeQuery the SQL query string */ public void setDeleteSessionAttributeQuery(String deleteSessionAttributeQuery) { Assert.hasText(deleteSessionAttributeQuery, "Query must not be empty"); this.deleteSessionAttributeQuery = getQuery(deleteSessionAttributeQuery); } /** * Set the custom SQL query used to delete the session. * @param deleteSessionQuery the SQL query string */ public void setDeleteSessionQuery(String deleteSessionQuery) { Assert.hasText(deleteSessionQuery, "Query must not be empty"); this.deleteSessionQuery = getQuery(deleteSessionQuery); } /** * Set the custom SQL query used to retrieve the sessions by principal name. * @param listSessionsByPrincipalNameQuery the SQL query string */ public void setListSessionsByPrincipalNameQuery(String listSessionsByPrincipalNameQuery) { Assert.hasText(listSessionsByPrincipalNameQuery, "Query must not be empty"); this.listSessionsByPrincipalNameQuery = getQuery(listSessionsByPrincipalNameQuery); } /** * Set the custom SQL query used to delete the sessions by last access time. * @param deleteSessionsByExpiryTimeQuery the SQL query string */ public void setDeleteSessionsByExpiryTimeQuery(String deleteSessionsByExpiryTimeQuery) { Assert.hasText(deleteSessionsByExpiryTimeQuery, "Query must not be empty"); this.deleteSessionsByExpiryTimeQuery = getQuery(deleteSessionsByExpiryTimeQuery); } /** * Set the maximum inactive interval in seconds between requests before newly created * sessions will be invalidated. A negative time indicates that the session will never * timeout. The default is 1800 (30 minutes). * @param defaultMaxInactiveInterval the maximum inactive interval in seconds */ public void setDefaultMaxInactiveInterval(Integer defaultMaxInactiveInterval) { this.defaultMaxInactiveInterval = defaultMaxInactiveInterval; } /** * Set the {@link IndexResolver} to use. * @param indexResolver the index resolver */ public void setIndexResolver(IndexResolver<Session> indexResolver) { Assert.notNull(indexResolver, "indexResolver cannot be null"); this.indexResolver = indexResolver; } public void setLobHandler(LobHandler lobHandler) { Assert.notNull(lobHandler, "LobHandler must not be null"); this.lobHandler = lobHandler; } /** * Sets the {@link ConversionService} to use. * @param conversionService the converter to set */ public void setConversionService(ConversionService conversionService) { Assert.notNull(conversionService, "conversionService must not be null"); this.conversionService = conversionService; } /** * Set the flush mode. Default is {@link FlushMode#ON_SAVE}. * @param flushMode the flush mode */ public void setFlushMode(FlushMode flushMode) { Assert.notNull(flushMode, "flushMode must not be null"); this.flushMode = flushMode; } /** * Set the save mode. * @param saveMode the save mode */ public void setSaveMode(SaveMode saveMode) { Assert.notNull(saveMode, "saveMode must not be null"); this.saveMode = saveMode; } @Override public JdbcSession createSession() { MapSession delegate = new MapSession(); if (this.defaultMaxInactiveInterval != null) { delegate.setMaxInactiveInterval(Duration.ofSeconds(this.defaultMaxInactiveInterval)); } JdbcSession session = new JdbcSession(delegate, UUID.randomUUID().toString(), true); session.flushIfRequired(); return session; } @Override public void save(final JdbcSession session) { session.save(); } @Override public JdbcSession findById(final String id) { final JdbcSession session = this.transactionOperations.execute((status) -> { List<JdbcSession> sessions = JdbcIndexedSessionRepository.this.jdbcOperations.query( JdbcIndexedSessionRepository.this.getSessionQuery, (ps) -> ps.setString(1, id), JdbcIndexedSessionRepository.this.extractor); if (sessions.isEmpty()) { return null; } return sessions.get(0); }); if (session != null) { if (session.isExpired()) { deleteById(id); } else { return session; } } return null; } @Override public void deleteById(final String id) { this.transactionOperations.executeWithoutResult((status) -> JdbcIndexedSessionRepository.this.jdbcOperations .update(JdbcIndexedSessionRepository.this.deleteSessionQuery, id)); } @Override public Map<String, JdbcSession> findByIndexNameAndIndexValue(String indexName, final String indexValue) { if (!PRINCIPAL_NAME_INDEX_NAME.equals(indexName)) { return Collections.emptyMap(); } List<JdbcSession> sessions = this.transactionOperations .execute((status) -> JdbcIndexedSessionRepository.this.jdbcOperations.query( JdbcIndexedSessionRepository.this.listSessionsByPrincipalNameQuery, (ps) -> ps.setString(1, indexValue), JdbcIndexedSessionRepository.this.extractor)); Map<String, JdbcSession> sessionMap = new HashMap<>(sessions.size()); for (JdbcSession session : sessions) { sessionMap.put(session.getId(), session); } return sessionMap; } private void insertSessionAttributes(JdbcSession session, List<String> attributeNames) { Assert.notEmpty(attributeNames, "attributeNames must not be null or empty"); try (LobCreator lobCreator = this.lobHandler.getLobCreator()) { if (attributeNames.size() > 1) { try { this.jdbcOperations.batchUpdate(this.createSessionAttributeQuery, new BatchPreparedStatementSetter() { @Override public void setValues(PreparedStatement ps, int i) throws SQLException { String attributeName = attributeNames.get(i); ps.setString(1, session.primaryKey); ps.setString(2, attributeName); lobCreator.setBlobAsBytes(ps, 3, serialize(session.getAttribute(attributeName))); } @Override public int getBatchSize() { return attributeNames.size(); } }); } catch (DuplicateKeyException ex) { throw ex; } catch (DataIntegrityViolationException ex) { // parent record not found - we are ignoring this error because we // assume that a concurrent request has removed the session } } else { try { this.jdbcOperations.update(this.createSessionAttributeQuery, (ps) -> { String attributeName = attributeNames.get(0); ps.setString(1, session.primaryKey); ps.setString(2, attributeName); lobCreator.setBlobAsBytes(ps, 3, serialize(session.getAttribute(attributeName))); }); } catch (DuplicateKeyException ex) { throw ex; } catch (DataIntegrityViolationException ex) { // parent record not found - we are ignoring this error because we // assume that a concurrent request has removed the session } } } } private void updateSessionAttributes(JdbcSession session, List<String> attributeNames) { Assert.notEmpty(attributeNames, "attributeNames must not be null or empty"); try (LobCreator lobCreator = this.lobHandler.getLobCreator()) { if (attributeNames.size() > 1) { this.jdbcOperations.batchUpdate(this.updateSessionAttributeQuery, new BatchPreparedStatementSetter() { @Override public void setValues(PreparedStatement ps, int i) throws SQLException { String attributeName = attributeNames.get(i); lobCreator.setBlobAsBytes(ps, 1, serialize(session.getAttribute(attributeName))); ps.setString(2, session.primaryKey); ps.setString(3, attributeName); } @Override public int getBatchSize() { return attributeNames.size(); } }); } else { this.jdbcOperations.update(this.updateSessionAttributeQuery, (ps) -> { String attributeName = attributeNames.get(0); lobCreator.setBlobAsBytes(ps, 1, serialize(session.getAttribute(attributeName))); ps.setString(2, session.primaryKey); ps.setString(3, attributeName); }); } } } private void deleteSessionAttributes(JdbcSession session, List<String> attributeNames) { Assert.notEmpty(attributeNames, "attributeNames must not be null or empty"); if (attributeNames.size() > 1) { this.jdbcOperations.batchUpdate(this.deleteSessionAttributeQuery, new BatchPreparedStatementSetter() { @Override public void setValues(PreparedStatement ps, int i) throws SQLException { String attributeName = attributeNames.get(i); ps.setString(1, session.primaryKey); ps.setString(2, attributeName); } @Override public int getBatchSize() { return attributeNames.size(); } }); } else { this.jdbcOperations.update(this.deleteSessionAttributeQuery, (ps) -> { String attributeName = attributeNames.get(0); ps.setString(1, session.primaryKey); ps.setString(2, attributeName); }); } } public void cleanUpExpiredSessions() { Integer deletedCount = this.transactionOperations .execute((status) -> JdbcIndexedSessionRepository.this.jdbcOperations.update( JdbcIndexedSessionRepository.this.deleteSessionsByExpiryTimeQuery, System.currentTimeMillis())); if (logger.isDebugEnabled()) { logger.debug("Cleaned up " + deletedCount + " expired sessions"); } } private static GenericConversionService createDefaultConversionService() { GenericConversionService converter = new GenericConversionService(); converter.addConverter(Object.class, byte[].class, new SerializingConverter()); converter.addConverter(byte[].class, Object.class, new DeserializingConverter()); return converter; } private String getQuery(String base) { return StringUtils.replace(base, "%TABLE_NAME%", this.tableName); } private void prepareQueries() { this.createSessionQuery = getQuery(CREATE_SESSION_QUERY); this.createSessionAttributeQuery = getQuery(CREATE_SESSION_ATTRIBUTE_QUERY); this.getSessionQuery = getQuery(GET_SESSION_QUERY); this.updateSessionQuery = getQuery(UPDATE_SESSION_QUERY); this.updateSessionAttributeQuery = getQuery(UPDATE_SESSION_ATTRIBUTE_QUERY); this.deleteSessionAttributeQuery = getQuery(DELETE_SESSION_ATTRIBUTE_QUERY); this.deleteSessionQuery = getQuery(DELETE_SESSION_QUERY); this.listSessionsByPrincipalNameQuery = getQuery(LIST_SESSIONS_BY_PRINCIPAL_NAME_QUERY); this.deleteSessionsByExpiryTimeQuery = getQuery(DELETE_SESSIONS_BY_EXPIRY_TIME_QUERY); } private LobHandler getLobHandler() { return this.lobHandler; } private byte[] serialize(Object object) { return (byte[]) this.conversionService.convert(object, TypeDescriptor.valueOf(Object.class), TypeDescriptor.valueOf(byte[].class)); } private Object deserialize(byte[] bytes) { return this.conversionService.convert(bytes, TypeDescriptor.valueOf(byte[].class), TypeDescriptor.valueOf(Object.class)); } private enum DeltaValue { ADDED, UPDATED, REMOVED } private static <T> Supplier<T> value(T value) { return (value != null) ? () -> value : null; } private static <T> Supplier<T> lazily(Supplier<T> supplier) { Supplier<T> lazySupplier = new Supplier<T>() { private T value; @Override public T get() { if (this.value == null) { this.value = supplier.get(); } return this.value; } }; return (supplier != null) ? lazySupplier : null; } /** * The {@link Session} to use for {@link JdbcIndexedSessionRepository}. * * @author Vedran Pavic */ final class JdbcSession implements Session { private final Session delegate; private final String primaryKey; private boolean isNew; private boolean changed; private Map<String, DeltaValue> delta = new HashMap<>(); JdbcSession(MapSession delegate, String primaryKey, boolean isNew) { this.delegate = delegate; this.primaryKey = primaryKey; this.isNew = isNew; if (this.isNew || (JdbcIndexedSessionRepository.this.saveMode == SaveMode.ALWAYS)) { getAttributeNames().forEach((attributeName) -> this.delta.put(attributeName, DeltaValue.UPDATED)); } } boolean isNew() { return this.isNew; } boolean isChanged() { return this.changed; } Map<String, DeltaValue> getDelta() { return this.delta; } void clearChangeFlags() { this.isNew = false; this.changed = false; this.delta.clear(); } Instant getExpiryTime() { if (getMaxInactiveInterval().isNegative()) { return Instant.ofEpochMilli(Long.MAX_VALUE); } return getLastAccessedTime().plus(getMaxInactiveInterval()); } @Override public String getId() { return this.delegate.getId(); } @Override public String changeSessionId() { this.changed = true; return this.delegate.changeSessionId(); } @Override public <T> T getAttribute(String attributeName) { Supplier<T> supplier = this.delegate.getAttribute(attributeName); if (supplier == null) { return null; } T attributeValue = supplier.get(); if (attributeValue != null && JdbcIndexedSessionRepository.this.saveMode.equals(SaveMode.ON_GET_ATTRIBUTE)) { this.delta.put(attributeName, DeltaValue.UPDATED); } return attributeValue; } @Override public Set<String> getAttributeNames() { return this.delegate.getAttributeNames(); } @Override public void setAttribute(String attributeName, Object attributeValue) { boolean attributeExists = (this.delegate.getAttribute(attributeName) != null); boolean attributeRemoved = (attributeValue == null); if (!attributeExists && attributeRemoved) { return; } if (attributeExists) { if (attributeRemoved) { this.delta.merge(attributeName, DeltaValue.REMOVED, (oldDeltaValue, deltaValue) -> (oldDeltaValue == DeltaValue.ADDED) ? null : deltaValue); } else { this.delta.merge(attributeName, DeltaValue.UPDATED, (oldDeltaValue, deltaValue) -> (oldDeltaValue == DeltaValue.ADDED) ? oldDeltaValue : deltaValue); } } else { this.delta.merge(attributeName, DeltaValue.ADDED, (oldDeltaValue, deltaValue) -> (oldDeltaValue == DeltaValue.ADDED) ? oldDeltaValue : DeltaValue.UPDATED); } this.delegate.setAttribute(attributeName, value(attributeValue)); if (PRINCIPAL_NAME_INDEX_NAME.equals(attributeName) || SPRING_SECURITY_CONTEXT.equals(attributeName)) { this.changed = true; } flushIfRequired(); } @Override public void removeAttribute(String attributeName) { setAttribute(attributeName, null); } @Override public Instant getCreationTime() { return this.delegate.getCreationTime(); } @Override public void setLastAccessedTime(Instant lastAccessedTime) { this.delegate.setLastAccessedTime(lastAccessedTime); this.changed = true; flushIfRequired(); } @Override public Instant getLastAccessedTime() { return this.delegate.getLastAccessedTime(); } @Override public void setMaxInactiveInterval(Duration interval) { this.delegate.setMaxInactiveInterval(interval); this.changed = true; flushIfRequired(); } @Override public Duration getMaxInactiveInterval() { return this.delegate.getMaxInactiveInterval(); } @Override public boolean isExpired() { return this.delegate.isExpired(); } private void flushIfRequired() { if (JdbcIndexedSessionRepository.this.flushMode == FlushMode.IMMEDIATE) { save(); } } private void save() { if (this.isNew) { JdbcIndexedSessionRepository.this.transactionOperations.executeWithoutResult((status) -> { Map<String, String> indexes = JdbcIndexedSessionRepository.this.indexResolver .resolveIndexesFor(JdbcSession.this); JdbcIndexedSessionRepository.this.jdbcOperations .update(JdbcIndexedSessionRepository.this.createSessionQuery, (ps) -> { ps.setString(1, JdbcSession.this.primaryKey); ps.setString(2, getId()); ps.setLong(3, getCreationTime().toEpochMilli()); ps.setLong(4, getLastAccessedTime().toEpochMilli()); ps.setInt(5, (int) getMaxInactiveInterval().getSeconds()); ps.setLong(6, getExpiryTime().toEpochMilli()); ps.setString(7, indexes.get(PRINCIPAL_NAME_INDEX_NAME)); }); Set<String> attributeNames = getAttributeNames(); if (!attributeNames.isEmpty()) { insertSessionAttributes(JdbcSession.this, new ArrayList<>(attributeNames)); } }); } else { JdbcIndexedSessionRepository.this.transactionOperations.executeWithoutResult((status) -> { if (JdbcSession.this.changed) { Map<String, String> indexes = JdbcIndexedSessionRepository.this.indexResolver .resolveIndexesFor(JdbcSession.this); JdbcIndexedSessionRepository.this.jdbcOperations .update(JdbcIndexedSessionRepository.this.updateSessionQuery, (ps) -> { ps.setString(1, getId()); ps.setLong(2, getLastAccessedTime().toEpochMilli()); ps.setInt(3, (int) getMaxInactiveInterval().getSeconds()); ps.setLong(4, getExpiryTime().toEpochMilli()); ps.setString(5, indexes.get(PRINCIPAL_NAME_INDEX_NAME)); ps.setString(6, JdbcSession.this.primaryKey); }); } List<String> addedAttributeNames = JdbcSession.this.delta.entrySet().stream() .filter((entry) -> entry.getValue() == DeltaValue.ADDED).map(Map.Entry::getKey) .collect(Collectors.toList()); if (!addedAttributeNames.isEmpty()) { insertSessionAttributes(JdbcSession.this, addedAttributeNames); } List<String> updatedAttributeNames = JdbcSession.this.delta.entrySet().stream() .filter((entry) -> entry.getValue() == DeltaValue.UPDATED).map(Map.Entry::getKey) .collect(Collectors.toList()); if (!updatedAttributeNames.isEmpty()) { updateSessionAttributes(JdbcSession.this, updatedAttributeNames); } List<String> removedAttributeNames = JdbcSession.this.delta.entrySet().stream() .filter((entry) -> entry.getValue() == DeltaValue.REMOVED).map(Map.Entry::getKey) .collect(Collectors.toList()); if (!removedAttributeNames.isEmpty()) { deleteSessionAttributes(JdbcSession.this, removedAttributeNames); } }); } clearChangeFlags(); } } private class SessionResultSetExtractor implements ResultSetExtractor<List<JdbcSession>> { @Override public List<JdbcSession> extractData(ResultSet rs) throws SQLException, DataAccessException { List<JdbcSession> sessions = new ArrayList<>(); while (rs.next()) { String id = rs.getString("SESSION_ID"); JdbcSession session; if (sessions.size() > 0 && getLast(sessions).getId().equals(id)) { session = getLast(sessions); } else { MapSession delegate = new MapSession(id); String primaryKey = rs.getString("PRIMARY_ID"); delegate.setCreationTime(Instant.ofEpochMilli(rs.getLong("CREATION_TIME"))); delegate.setLastAccessedTime(Instant.ofEpochMilli(rs.getLong("LAST_ACCESS_TIME"))); delegate.setMaxInactiveInterval(Duration.ofSeconds(rs.getInt("MAX_INACTIVE_INTERVAL"))); session = new JdbcSession(delegate, primaryKey, false); } String attributeName = rs.getString("ATTRIBUTE_NAME"); if (attributeName != null) { byte[] bytes = getLobHandler().getBlobAsBytes(rs, "ATTRIBUTE_BYTES"); session.delegate.setAttribute(attributeName, lazily(() -> deserialize(bytes))); } sessions.add(session); } return sessions; } private JdbcSession getLast(List<JdbcSession> sessions) { return sessions.get(sessions.size() - 1); } } }
apache-2.0
artemprokopov/aprokopov
chapter_002/src/main/java/ru/job4j/accapplications/Tracker.java
3744
package ru.job4j.accapplications; import java.util.ArrayList; /** * Основной класс системы заявок Tracker, работает с массивом заявок, осуществляет добавление удаление заявок, * и прочие функции. * @author Artem Prokopov * @since 27.04.2017 * @version 1.0 */ public class Tracker { /** * */ public static final Item[] NULL_ITEM_ARRAY = {}; /** * Массив заявок. */ private ArrayList<Item> items = new ArrayList<>(); /** * Добавляет заявку в массив items. * @param item добавляемая заявка * @return в случае успеха операции возвращает true, иначе false. */ public boolean add(Item item) { for (int i = 0; i < items.size(); ++i) { if (items.get(i).equals(item)) { return false; } } items.add(item); return true; } /** * Обновляет заявку в массиве items. * @param item обновляемая заявка * @param newItem новая заявка * @return в случае успеха операции возвращает true, иначе false. */ public boolean update(Item item, Item newItem) { for (int i = 0; i < items.size(); i++) { if (item.equals(items.get(i))) { items.set(i, newItem); return true; } } return false; } /** * Удаляет указанную заявку. * @param item удаляемая заявка * @return в случае успеха операции возвращает true, иначе false. */ public boolean delete(Item item) { boolean result = false; items.remove(item); result = true; return result; } /** * Возвращает все существующие заявки. * @return items массив заявок. */ public Item[] findAll() { if (items.isEmpty()) { return Tracker.NULL_ITEM_ARRAY; } return items.toArray(new Item[items.size()]); } /** * Ищет заявку по совпадению поля @name в заявках массива items. * @param key ключевое слово для поиска * @return возвращает массив Item[] содержащий найденные заявкиб если заявок не найдено возвращает NULL_ITEM_ARRAY. */ public Item[] findByName(String key) { Item[] result = Tracker.NULL_ITEM_ARRAY; for (int i = 0; i < items.size(); i++) { Item count = items.get(i); if (count.getName().equals(key)) { Item[] temp = new Item[result.length + 1]; System.arraycopy(result, 0, temp, 0, result.length); temp[temp.length - 1] = count; result = temp; } } return result; } /** * Производит поиск заявки в массиве заявок items по полю id. * @param id значение поля id * @return найденую заявку, в противном случае возвращает EMPTY_ITEM. */ public Item findById(String id) { Item result = Item.EMPTY_ITEM; for (int i = 0; i < items.size(); i++) { if (items.get(i).getId().equals(id)) { result = items.get(i); break; } } return result; } }
apache-2.0
trejkaz/derby
java/engine/org/apache/derby/impl/load/ImportResultSetMetaData.java
5161
/* Derby - Class org.apache.derby.impl.load.ImportResultSetMetaData Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derby.impl.load; import java.sql.SQLException; import java.util.HashMap; import org.apache.derby.vti.VTIMetaDataTemplate; import org.apache.derby.iapi.reference.Limits; class ImportResultSetMetaData extends VTIMetaDataTemplate { private final int numberOfColumns; private final String[] columnNames; private final int[] columnWidths; // types of the table columns that the data is imported. private final int[] tableColumnTypes ; private final String[] columnTypeNames; private final HashMap udtClasses; public ImportResultSetMetaData(int numberOfColumns, String[] columnNames, int[] columnWidths, int[] tableColumnTypes, String[] columnTypeNames, HashMap udtClasses ) { this.numberOfColumns = numberOfColumns; this.columnNames = columnNames; this.columnWidths = columnWidths; this.tableColumnTypes = tableColumnTypes; this.columnTypeNames = columnTypeNames; this.udtClasses = udtClasses; } public int getColumnCount() { return numberOfColumns; } public String getColumnName(int column) { return columnNames[column-1]; } public int getColumnType(int column) { /* By default all the data in the import file is assumed * to be in varchar format. Appropriate casting is applied * while executing the select on the import VTI. Using this * approach import vti does not have to do the data conversion, * casting will do that. * * But for some types like binary types there is no casting * support from varchar or the data in the file is hex format, * so data needs to be converted to binary format first. And * incase of blobs/clobs stored in an exteranl file memory usage * will be less if data is supplied as stream, instead of * materializing the column data as one string. For these * types import vti result set will return resultset column * type is same as the column type of the import table. Data * for the blob, clob or binary type columns is returned by * the getXXX() calls used by the VTI Resultset to read the * data for that particular type. For example, Blob data * is read using getBlob() method, which will return a * Blob object that contains the data in the import file * for a column. */ int colType; switch (tableColumnTypes[column -1]) { case java.sql.Types.BLOB: // blob colType = java.sql.Types.BLOB; break; case java.sql.Types.CLOB: // clob colType = java.sql.Types.CLOB; break; case java.sql.Types.LONGVARBINARY: // LONG VARCHAR FOR BIT DATA colType = java.sql.Types.LONGVARBINARY; break; case java.sql.Types.VARBINARY: // VARCHAR FOR BIT DATA colType = java.sql.Types.VARBINARY; break; case java.sql.Types.BINARY: // CHAR FOR BIT DATA colType = java.sql.Types.BINARY; break; case java.sql.Types.JAVA_OBJECT: // User-defined type colType = java.sql.Types.JAVA_OBJECT; break; default: // all other data in the import file is // assumed to be in varchar format. colType = java.sql.Types.VARCHAR; } return colType; } public int isNullable(int column) { return columnNullableUnknown; } public int getColumnDisplaySize(int column) { if (columnWidths == null) return Limits.DB2_VARCHAR_MAXWIDTH; else return columnWidths[column-1]; } public String getColumnTypeName(int column) throws SQLException { return columnTypeNames[ column - 1 ]; } /** * Get the class bound to a UDT column. */ Class getUDTClass( int column ) throws SQLException { String columnName = getColumnName( column ); return (Class) udtClasses.get( getColumnName( column ) ); } }
apache-2.0
arnost-starosta/midpoint
tools/schrodinger/src/main/java/com/evolveum/midpoint/schrodinger/component/resource/ResourceAccountsTab.java
3478
package com.evolveum.midpoint.schrodinger.component.resource; import com.codeborne.selenide.Condition; import com.codeborne.selenide.ElementsCollection; import com.codeborne.selenide.SelenideElement; import com.evolveum.midpoint.schrodinger.MidPoint; import com.evolveum.midpoint.schrodinger.component.Component; import com.evolveum.midpoint.schrodinger.util.Schrodinger; import org.openqa.selenium.By; import static com.codeborne.selenide.Selenide.$; import static com.codeborne.selenide.Selenide.$$; /** * Created by matus on 5/22/2018. */ public class ResourceAccountsTab<T> extends Component<T> { public ResourceAccountsTab(T parent, SelenideElement parentElement) { super(parent, parentElement); } public ResourceTaskQuickAccessDropDown<ResourceAccountsTab<T>> importTask() { $(Schrodinger.byElementAttributeValue("label", "data-s-id", "label", "Import")) .waitUntil(Condition.appears, MidPoint.TIMEOUT_DEFAULT_2_S).click(); SelenideElement dropDownElement = $(Schrodinger.byElementAttributeValue("ul", "role", "menu")) .waitUntil(Condition.appears, MidPoint.TIMEOUT_DEFAULT_2_S); return new ResourceTaskQuickAccessDropDown<>(this, dropDownElement); } public ResourceTaskQuickAccessDropDown<ResourceAccountsTab<T>> reconciliationTask() { $(Schrodinger.byElementAttributeValue("label", "data-s-id", "label", "Reconciliation")) .waitUntil(Condition.appears, MidPoint.TIMEOUT_DEFAULT_2_S).click(); SelenideElement dropDownElement = $(Schrodinger.byElementAttributeValue("ul", "role", "menu")) .waitUntil(Condition.appears, MidPoint.TIMEOUT_DEFAULT_2_S); return new ResourceTaskQuickAccessDropDown<>(this, dropDownElement); } public ResourceTaskQuickAccessDropDown<ResourceAccountsTab<T>> liveSyncTask() { $(Schrodinger.byElementValue("label", "data-s-id", "label", "Live Sync")) .waitUntil(Condition.appears, MidPoint.TIMEOUT_DEFAULT_2_S).click(); ElementsCollection dropDownElement = $$(By.cssSelector(".dropdown-menu.pull-right")); SelenideElement concretElement = null; for (SelenideElement element : dropDownElement) { if (element.isDisplayed()) { concretElement = element; break; } } return new ResourceTaskQuickAccessDropDown<>(this, concretElement); } public ResourceAccountsTab<T> clickSearchInRepository() { $(Schrodinger.byDataId("a", "repositorySearch")) .waitUntil(Condition.appears, MidPoint.TIMEOUT_DEFAULT_2_S).click(); $(Schrodinger.byDataId("a", "repositorySearch")) .waitUntil(Condition.enabled, MidPoint.TIMEOUT_DEFAULT_2_S); return this; } public ResourceAccountsTab<T> clickSearchInResource() { $(Schrodinger.byDataId("a", "resourceSearch")) .waitUntil(Condition.visible, MidPoint.TIMEOUT_DEFAULT_2_S).click(); $(Schrodinger.byDataId("a", "resourceSearch")) .waitUntil(Condition.enabled, MidPoint.TIMEOUT_DEFAULT_2_S); return this; } public ResourceShadowTable<ResourceAccountsTab<T>> table() { SelenideElement element = $(By.cssSelector(".box.boxed-table.object-shadow-box")) .waitUntil(Condition.appears, MidPoint.TIMEOUT_DEFAULT_2_S); return new ResourceShadowTable<>(this, element); } }
apache-2.0
ksoichiro/gh-pages-android-playground
app/src/main/java/com/github/ksoichiro/gh_pages_android_playground/MainActivity.java
362
package com.github.ksoichiro.gh_pages_android_playground; import android.os.Bundle; import android.support.v7.app.AppCompatActivity; public class MainActivity extends AppCompatActivity { @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); } }
apache-2.0
sfat/spring-cloud-netflix
spring-cloud-netflix-zuul/src/test/java/org/springframework/cloud/netflix/zuul/ContextPathZuulProxyApplicationTests.java
4398
/* * Copyright 2013-2019 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.cloud.netflix.zuul; import com.netflix.zuul.context.RequestContext; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.EnableAutoConfiguration; import org.springframework.boot.test.context.SpringBootTest; import org.springframework.boot.test.context.SpringBootTest.WebEnvironment; import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.boot.web.server.LocalServerPort; import org.springframework.cloud.netflix.zuul.filters.ZuulProperties.ZuulRoute; import org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator; import org.springframework.cloud.netflix.zuul.test.NoSecurityConfiguration; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Import; import org.springframework.http.HttpEntity; import org.springframework.http.HttpMethod; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import static org.assertj.core.api.Assertions.assertThat; @RunWith(SpringJUnit4ClassRunner.class) @SpringBootTest( classes = ContextPathZuulProxyApplicationTests.ContextPathZuulProxyApplication.class, webEnvironment = WebEnvironment.RANDOM_PORT, value = { "server.servlet.contextPath: /app" }) @DirtiesContext public class ContextPathZuulProxyApplicationTests { @LocalServerPort private int port; @Autowired private TestRestTemplate testRestTemplate; @Autowired private DiscoveryClientRouteLocator routes; @Autowired private RoutesEndpoint endpoint; @Before public void setTestRequestContext() { RequestContext context = new RequestContext(); RequestContext.testSetCurrentContext(context); } @After public void clear() { RequestContext.getCurrentContext().clear(); } @Test public void getOnSelfViaSimpleHostRoutingFilter() { this.routes.addRoute("/self/**", "http://localhost:" + this.port + "/app/local"); this.endpoint.reset(); ResponseEntity<String> result = testRestTemplate.exchange( "http://localhost:" + this.port + "/app/self/1", HttpMethod.GET, new HttpEntity<>((Void) null), String.class); assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK); assertThat(result.getBody()).isEqualTo("Gotten 1!"); } @Test public void stripPrefixFalseAppendsPath() { this.routes.addRoute(new ZuulRoute("strip", "/strip/**", "strip", "http://localhost:" + this.port + "/app/local", false, false, null)); this.endpoint.reset(); ResponseEntity<String> result = testRestTemplate.exchange( "http://localhost:" + this.port + "/app/strip", HttpMethod.GET, new HttpEntity<>((Void) null), String.class); assertThat(result.getStatusCode()).isEqualTo(HttpStatus.OK); // Prefix not stripped to it goes to /local/strip assertThat(result.getBody()).isEqualTo("Gotten strip!"); } // Don't use @SpringBootApplication because we don't want to component scan @Configuration @EnableAutoConfiguration @RestController @EnableZuulProxy @Import(NoSecurityConfiguration.class) static class ContextPathZuulProxyApplication { @RequestMapping(value = "/local/{id}", method = RequestMethod.GET) public String get(@PathVariable String id) { return "Gotten " + id + "!"; } } }
apache-2.0
bunnyblue/realm-java
realm/src/androidTest/java/io/realm/RealmObjectTest.java
22117
/* * Copyright 2014 Realm Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.realm; import android.test.AndroidTestCase; import java.util.Calendar; import java.util.Date; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import io.realm.entities.AllTypes; import io.realm.entities.CyclicType; import io.realm.entities.Dog; import io.realm.entities.Thread; import io.realm.internal.Row; public class RealmObjectTest extends AndroidTestCase { private Realm testRealm; private RealmConfiguration realmConfig; private static final int TEST_SIZE = 5; private static final boolean REMOVE_FIRST = true; private static final boolean REMOVE_LAST = false; @Override protected void setUp() throws Exception { realmConfig = new RealmConfiguration.Builder(getContext()).build(); Realm.deleteRealm(realmConfig); testRealm = Realm.getInstance(realmConfig); } @Override protected void tearDown() throws Exception { testRealm.close(); } // Row realmGetRow() public void testRealmGetRowReturnsValidRow() { testRealm.beginTransaction(); RealmObject realmObject = testRealm.createObject(AllTypes.class); Row row = realmObject.row; testRealm.commitTransaction(); assertNotNull("RealmObject.realmGetRow returns zero ", row); assertEquals(9, row.getColumnCount()); } public void testStringEncoding() { String[] strings = {"ABCD", "ÆØÅ", "Ö∫Ë", "ΠΑΟΚ", "Здравей"}; testRealm.beginTransaction(); testRealm.clear(AllTypes.class); for (String str : strings) { AllTypes obj1 = testRealm.createObject(AllTypes.class); obj1.setColumnString(str); } testRealm.commitTransaction(); RealmResults<AllTypes> objects = testRealm.allObjects(AllTypes.class); assertEquals(strings.length, objects.size()); int i = 0; for (AllTypes obj : objects) { String s = obj.getColumnString(); assertEquals(strings[i], s); i++; } } // removing original object and see if has been removed public void testRemoveFromRealm() { testRealm = Realm.getInstance(getContext()); testRealm.beginTransaction(); Dog rex = testRealm.createObject(Dog.class); rex.setName("Rex"); Dog fido = testRealm.createObject(Dog.class); fido.setName("Fido"); testRealm.commitTransaction(); RealmResults<Dog> allDogsBefore = testRealm.where(Dog.class).equalTo("name", "Rex").findAll(); assertEquals(1, allDogsBefore.size()); testRealm.beginTransaction(); rex.removeFromRealm(); testRealm.commitTransaction(); RealmResults<Dog> allDogsAfter = testRealm.where(Dog.class).equalTo("name", "Rex").findAll(); assertEquals(0, allDogsAfter.size()); fido.getName(); try { rex.getName(); testRealm.close(); fail(); } catch (IllegalStateException ignored) {} // deleting rex twice should fail testRealm.beginTransaction(); try { rex.removeFromRealm(); testRealm.close(); fail(); } catch (IllegalStateException ignored) {} testRealm.commitTransaction(); testRealm.close(); } // query for an object, remove it and see it has been removed from realm public void testRemoveResultFromRealm() { testRealm = Realm.getInstance(getContext()); testRealm.beginTransaction(); testRealm.clear(Dog.class); Dog dogToAdd = testRealm.createObject(Dog.class); dogToAdd.setName("Rex"); testRealm.commitTransaction(); assertEquals(1, testRealm.allObjects(Dog.class).size()); Dog dogToRemove = testRealm.where(Dog.class).findFirst(); assertNotNull(dogToRemove); testRealm.beginTransaction(); dogToRemove.removeFromRealm(); testRealm.commitTransaction(); assertEquals(0, testRealm.allObjects(Dog.class).size()); try { dogToAdd.getName(); testRealm.close(); fail(); } catch (IllegalStateException ignored) {} try { dogToRemove.getName(); testRealm.close(); fail(); } catch (IllegalStateException ignored) {} testRealm.close(); } public void removeOneByOne(boolean atFirst) { Set<Long> ages = new HashSet<Long>(); testRealm.beginTransaction(); testRealm.clear(Dog.class); for (int i = 0; i < TEST_SIZE; i++) { Dog dog = testRealm.createObject(Dog.class); dog.setAge(i); ages.add((long) i); } testRealm.commitTransaction(); assertEquals(TEST_SIZE, testRealm.allObjects(Dog.class).size()); RealmResults<Dog> dogs = testRealm.allObjects(Dog.class); for (int i = 0; i < TEST_SIZE; i++) { testRealm.beginTransaction(); Dog dogToRemove; if (atFirst) { dogToRemove = dogs.first(); } else { dogToRemove = dogs.last(); } ages.remove(Long.valueOf(dogToRemove.getAge())); dogToRemove.removeFromRealm(); // object is no longer valid try { dogToRemove.getAge(); fail(); } catch (IllegalStateException ignored) {} testRealm.commitTransaction(); // and removed from realm and remaining objects are place correctly RealmResults<Dog> remainingDogs = testRealm.allObjects(Dog.class); assertEquals(TEST_SIZE - i - 1, remainingDogs.size()); for (Dog dog : remainingDogs) { assertTrue(ages.contains(Long.valueOf(dog.getAge()))); } } } public void testRemoveFromRealmAtPosition() { removeOneByOne(REMOVE_FIRST); removeOneByOne(REMOVE_LAST); } public boolean methodWrongThread(final boolean callGetter) throws ExecutionException, InterruptedException { testRealm = Realm.getInstance(getContext()); testRealm.beginTransaction(); testRealm.createObject(AllTypes.class); testRealm.commitTransaction(); final AllTypes allTypes = testRealm.where(AllTypes.class).findFirst(); ExecutorService executorService = Executors.newSingleThreadExecutor(); Future<Boolean> future = executorService.submit(new Callable<Boolean>() { @Override public Boolean call() throws Exception { try { if (callGetter) { allTypes.getColumnFloat(); } else { allTypes.setColumnFloat(1.0f); } return false; } catch (IllegalStateException ignored) { return true; } } }); Boolean result = future.get(); testRealm.close(); return result; } public void testGetSetWrongThread() throws ExecutionException, InterruptedException { assertTrue(methodWrongThread(true)); assertTrue(methodWrongThread(false)); } public void testEqualsSameRealmObject() { testRealm.beginTransaction(); CyclicType ct = testRealm.createObject(CyclicType.class); ct.setName("Foo"); testRealm.commitTransaction(); CyclicType ct1 = testRealm.where(CyclicType.class).findFirst(); CyclicType ct2 = testRealm.where(CyclicType.class).findFirst(); assertTrue(ct1.equals(ct2)); assertTrue(ct2.equals(ct1)); } public void testEqualsDifferentRealmObjects() { testRealm.beginTransaction(); CyclicType objA = testRealm.createObject(CyclicType.class); objA.setName("Foo"); CyclicType objB = testRealm.createObject(CyclicType.class); objB.setName("Bar"); testRealm.commitTransaction(); assertFalse(objA.equals(objB)); assertFalse(objB.equals(objA)); } public void testEqualsAfterModification() { testRealm.beginTransaction(); CyclicType ct = testRealm.createObject(CyclicType.class); ct.setName("Foo"); testRealm.commitTransaction(); CyclicType ct1 = testRealm.where(CyclicType.class).findFirst(); CyclicType ct2 = testRealm.where(CyclicType.class).findFirst(); testRealm.beginTransaction(); ct1.setName("Baz"); testRealm.commitTransaction(); assertTrue(ct1.equals(ct1)); assertTrue(ct2.equals(ct2)); } public void testEqualsStandAlone() { testRealm.beginTransaction(); CyclicType ct1 = testRealm.createObject(CyclicType.class); ct1.setName("Foo"); testRealm.commitTransaction(); CyclicType ct2 = new CyclicType(); ct2.setName("Bar"); assertFalse(ct1.equals(ct2)); assertFalse(ct2.equals(ct1)); } public void testCyclicEquals() { testRealm.beginTransaction(); CyclicType foo = createCyclicData(); testRealm.commitTransaction(); assertEquals(foo, testRealm.where(CyclicType.class).equalTo("name", "Foo").findFirst()); } public void testCyclicToString() { testRealm.beginTransaction(); CyclicType foo = createCyclicData(); testRealm.commitTransaction(); String expected = "CyclicType = [{name:Foo},{object:CyclicType},{objects:RealmList<CyclicType>[0]}]"; assertEquals(expected, foo.toString()); } public void testCyclicHashCode() { testRealm.beginTransaction(); CyclicType foo = createCyclicData(); testRealm.commitTransaction(); assertEquals(1344723738, foo.hashCode()); } private CyclicType createCyclicData() { CyclicType foo = testRealm.createObject(CyclicType.class); foo.setName("Foo"); CyclicType bar = testRealm.createObject(CyclicType.class); bar.setName("Bar"); // Setup cycle on normal object references foo.setObject(bar); bar.setObject(foo); return foo; } public void testDateType() { long testDatesValid[] = {-1000, 0, 1000}; long testDatesLoosePrecision[] = {Long.MIN_VALUE, 1, 1001, Long.MAX_VALUE}; // test valid dates testRealm.beginTransaction(); for (long value : testDatesValid) { AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnDate(new Date(value)); } testRealm.commitTransaction(); int i = 0; for (AllTypes allTypes : testRealm.allObjects(AllTypes.class)) { assertEquals("Item " + i, new Date(testDatesValid[i]), allTypes.getColumnDate()); i++; } // test valid dates but with precision lost testRealm.beginTransaction(); testRealm.clear(AllTypes.class); for (long value : testDatesLoosePrecision) { AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnDate(new Date(value)); } testRealm.commitTransaction(); i = 0; for (AllTypes allTypes : testRealm.allObjects(AllTypes.class)) { assertFalse("Item " + i, new Date(testDatesLoosePrecision[i]) == allTypes.getColumnDate()); assertEquals("Item " + i, new Date(1000*(testDatesLoosePrecision[i]/1000)), allTypes.getColumnDate()); i++; } } private Date newDate(int year, int month, int dayOfMonth) { Calendar cal = Calendar.getInstance(); cal.set(Calendar.YEAR, year); cal.set(Calendar.MONTH, month); cal.set(Calendar.DAY_OF_MONTH, dayOfMonth); cal.set(Calendar.HOUR, 0); cal.set(Calendar.MINUTE, 0); cal.set(Calendar.MILLISECOND, 0); return cal.getTime(); } private void addDate(int year, int month, int dayOfMonth) { Date date = newDate(year, month, dayOfMonth); testRealm.beginTransaction(); testRealm.clear(AllTypes.class); AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnDate(date); testRealm.commitTransaction(); AllTypes object = testRealm.allObjects(AllTypes.class).first(); // Realm does not support millisec precision assertEquals(1000 * (date.getTime() / 1000), 1000 * (object.getColumnDate().getTime() / 1000)); } public void testWriteMustThrowOutOfTransaction() { testRealm.beginTransaction(); Dog dog = testRealm.createObject(Dog.class); testRealm.commitTransaction(); try { dog.setName("Rex"); fail(); } catch (IllegalStateException ignored) { // Don't fail } catch (Exception ignored) { fail(); } } public void testSetNullLink() { testRealm.beginTransaction(); CyclicType objA = testRealm.createObject(CyclicType.class); objA.setName("Foo"); CyclicType objB = testRealm.createObject(CyclicType.class); objB.setName("Bar"); objA.setObject(objB); assertNotNull(objA.getObject()); try { objA.setObject(null); } catch (NullPointerException nullPointer) { fail(); } testRealm.commitTransaction(); assertNull(objA.getObject()); } public void testThreadModelClass() { // The model class' name (Thread) clashed with a common Java class. // The annotation process must be able to handle that. testRealm.beginTransaction(); Thread thread = testRealm.createObject(Thread.class); testRealm.commitTransaction(); } public void testIsValidUnManagedObject() { AllTypes allTypes = new AllTypes(); assertFalse(allTypes.isValid()); } public void testIsValidClosedRealm() { RealmConfiguration otherConfig = new RealmConfiguration.Builder(getContext()).name("other-realm").build(); Realm.deleteRealm(otherConfig); Realm testRealm = Realm.getInstance(otherConfig); testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); assertTrue(allTypes.isValid()); testRealm.commitTransaction(); testRealm.close(); assertFalse(allTypes.isValid()); } public void testIsValidDeletedObject() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); assertTrue(allTypes.isValid()); testRealm.clear(AllTypes.class); testRealm.commitTransaction(); assertFalse(allTypes.isValid()); } public void testIsValidManagedObject() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); assertTrue(allTypes.isValid()); testRealm.commitTransaction(); assertTrue(allTypes.isValid()); } public void testAccessObjectRemovalThrows() throws InterruptedException { testRealm.beginTransaction(); AllTypes obj = testRealm.createObject(AllTypes.class); testRealm.commitTransaction(); final CountDownLatch objectDeletedInBackground = new CountDownLatch(1); new java.lang.Thread(new Runnable() { @Override public void run() { Realm realm = Realm.getInstance(realmConfig); realm.beginTransaction(); realm.clear(AllTypes.class); realm.commitTransaction(); realm.close(); objectDeletedInBackground.countDown(); } }).start(); objectDeletedInBackground.await(2, TimeUnit.SECONDS); testRealm.refresh(); // Move to version where underlying object is deleted. try { obj.getColumnLong(); fail(); } catch (IllegalStateException ignored) { } } public void testIsValid() { testRealm.beginTransaction(); Dog dog = testRealm.createObject(Dog.class); dog.setName("Fido"); testRealm.commitTransaction(); assertTrue(dog.isValid()); testRealm.beginTransaction(); dog.removeFromRealm(); testRealm.commitTransaction(); assertFalse(dog.isValid()); } // Test NaN value on float and double columns public void testFloatDoubleNaN() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnFloat(Float.NaN); allTypes.setColumnDouble(Double.NaN); testRealm.commitTransaction(); assertEquals(Float.NaN, testRealm.where(AllTypes.class).findFirst().getColumnFloat()); assertEquals(Double.NaN, testRealm.where(AllTypes.class).findFirst().getColumnDouble()); // NaN != NaN !!! assertEquals(0, testRealm.where(AllTypes.class).equalTo("columnFloat", Float.NaN).count()); assertEquals(0, testRealm.where(AllTypes.class).equalTo("columnDouble", Double.NaN).count()); } // Test max value on float and double columns public void testFloatDoubleMaxValue() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnFloat(Float.MAX_VALUE); allTypes.setColumnDouble(Double.MAX_VALUE); testRealm.commitTransaction(); assertEquals(Float.MAX_VALUE, testRealm.where(AllTypes.class).findFirst().getColumnFloat()); assertEquals(Double.MAX_VALUE, testRealm.where(AllTypes.class).findFirst().getColumnDouble()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnFloat", Float.MAX_VALUE).count()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnDouble", Double.MAX_VALUE).count()); } // Test min normal value on float and double columns public void testFloatDoubleMinNormal() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnFloat(Float.MIN_NORMAL); allTypes.setColumnDouble(Double.MIN_NORMAL); testRealm.commitTransaction(); assertEquals(Float.MIN_NORMAL, testRealm.where(AllTypes.class).findFirst().getColumnFloat()); assertEquals(Double.MIN_NORMAL, testRealm.where(AllTypes.class).findFirst().getColumnDouble()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnFloat", Float.MIN_NORMAL).count()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnDouble", Double.MIN_NORMAL).count()); } // Test min value on float and double columns public void testFloatDoubleMinValue() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnFloat(Float.MIN_VALUE); allTypes.setColumnDouble(Double.MIN_VALUE); testRealm.commitTransaction(); assertEquals(Float.MIN_VALUE, testRealm.where(AllTypes.class).findFirst().getColumnFloat()); assertEquals(Double.MIN_VALUE, testRealm.where(AllTypes.class).findFirst().getColumnDouble()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnFloat", Float.MIN_VALUE).count()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnDouble", Double.MIN_VALUE).count()); } // Test negative infinity value on float and double columns public void testFloatDoubleNegativeInfinity() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnFloat(Float.NEGATIVE_INFINITY); allTypes.setColumnDouble(Double.NEGATIVE_INFINITY); testRealm.commitTransaction(); assertEquals(Float.NEGATIVE_INFINITY, testRealm.where(AllTypes.class).findFirst().getColumnFloat()); assertEquals(Double.NEGATIVE_INFINITY, testRealm.where(AllTypes.class).findFirst().getColumnDouble()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnFloat", Float.NEGATIVE_INFINITY).count()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnDouble", Double.NEGATIVE_INFINITY).count()); } // Test positive infinity value on float and double columns public void testFloatPositiveInfinity() { testRealm.beginTransaction(); AllTypes allTypes = testRealm.createObject(AllTypes.class); allTypes.setColumnFloat(Float.POSITIVE_INFINITY); allTypes.setColumnDouble(Double.POSITIVE_INFINITY); testRealm.commitTransaction(); assertEquals(Float.POSITIVE_INFINITY, testRealm.where(AllTypes.class).findFirst().getColumnFloat()); assertEquals(Double.POSITIVE_INFINITY, testRealm.where(AllTypes.class).findFirst().getColumnDouble()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnFloat", Float.POSITIVE_INFINITY).count()); assertEquals(1, testRealm.where(AllTypes.class).equalTo("columnDouble", Double.POSITIVE_INFINITY).count()); } }
apache-2.0
svenruppert/20151117_jughh_di-frameworks-hidden-pearls
modules/dagger/src/main/java/org/rapidpm/event/jughh/dagger/step06/business/SubService_A_Module.java
566
package org.rapidpm.event.jughh.dagger.step06.business; import dagger.Module; import dagger.Provides; import org.rapidpm.event.jughh.dagger.business.subservice.SubService; import org.rapidpm.event.jughh.dagger.business.subservice.impl_a.SubServiceA; import javax.inject.Named; /** * Created by Sven Ruppert on 19.11.2014. */ @Module(library = true, includes = SubSubServiceModule.class, complete = true) public class SubService_A_Module { @Provides @Named("A") SubService provideSubServiceA(SubServiceA subService) { return subService; } }
apache-2.0
lgobinath/siddhi
modules/siddhi-extensions/event-table/src/main/java/org/wso2/siddhi/extension/table/cache/CacheManager.java
1068
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.siddhi.extension.table.cache; import org.wso2.siddhi.core.event.stream.StreamEvent; public interface CacheManager { public void add(StreamEvent item); public void delete(StreamEvent item); public void read(StreamEvent item); public void update(StreamEvent item); public void invalidateCache(); public boolean isContains(StreamEvent item); }
apache-2.0
jxdong1013/archivems
android/archive_android/archiveapp/src/main/java/com/jxd/archiveapp/GsonResponseHandler.java
1682
package com.jxd.archiveapp; import android.content.Context; import android.os.Handler; import android.os.Message; import android.preference.PreferenceActivity; import android.widget.TextClock; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.loopj.android.http.BaseJsonHttpResponseHandler; import org.apache.http.Header; /** * Created by Administrator on 2016/1/27. */ public class GsonResponseHandler<T> extends BaseJsonHttpResponseHandler<T> { Gson gsom; Class<T> tclass; Context context; Handler handler; public static int SUCCESS=100; public static int FAILTURE = 101; public GsonResponseHandler( Context context , Handler handler , Class<T> tclass ) { super(); this.context = context; this.handler = handler; gsom = new GsonBuilder().create(); this.tclass = tclass; } // public GsonResponseHandler(String encoding) { // super(encoding); // // gsom = new GsonBuilder().create(); // } @Override public void onSuccess(int statusCode, Header[] headers, String rawJsonResponse, T response) { Message msg = handler.obtainMessage(SUCCESS); msg.obj = response; handler.sendMessage(msg); } @Override public void onFailure(int statusCode, Header[] headers, Throwable throwable, String rawJsonData, T errorResponse) { Message msg = handler.obtainMessage(FAILTURE); msg.obj = throwable; handler.sendMessage(msg); } @Override protected T parseResponse(String rawJsonData, boolean isFailure) throws Throwable { return gsom.fromJson( rawJsonData , tclass ); } }
apache-2.0
aitusoftware/transport
src/test/java/com/aitusoftware/transport/reader/CopyingRecordHandlerTest.java
2245
/* * Copyright 2017 - 2018 Aitu Software Limited. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aitusoftware.transport.reader; import com.aitusoftware.transport.Fixtures; import com.aitusoftware.transport.buffer.PageCache; import org.junit.Before; import org.junit.Test; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import static org.hamcrest.CoreMatchers.is; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertThat; public class CopyingRecordHandlerTest { private static final byte[] PAYLOAD = "Something profound".getBytes(StandardCharsets.UTF_8); private CopyingRecordHandler handler; private PageCache pageCache; @Before public void setUp() throws Exception { pageCache = PageCache.create(Fixtures.tempDirectory(), 4096); handler = new CopyingRecordHandler(pageCache); } @Test public void shouldCopyRecord() { handler.onRecord(ByteBuffer.wrap(PAYLOAD), 0, 0); final ValidatingRecordHandler validator = new ValidatingRecordHandler(); new StreamingReader(pageCache, validator, false, Fixtures.testIdler()).process(); assertThat(validator.messageCount, is(1)); } private static final class ValidatingRecordHandler implements RecordHandler { private int messageCount; @Override public void onRecord(final ByteBuffer data, final int pageNumber, final int position) { assertThat(data.remaining(), is(PAYLOAD.length)); final byte[] received = new byte[PAYLOAD.length]; data.get(received); assertArrayEquals(PAYLOAD, received); messageCount++; } } }
apache-2.0
charithe/functional-java-plus
src/main/java/com/github/charithe/fjp/Try.java
7762
/* * Copyright 2015 Charith Ellawala * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.charithe.fjp; import fj.data.Either; import java.util.Optional; import java.util.function.Consumer; import java.util.function.Function; import java.util.function.Predicate; /** * Emulates the Scala Try interface * @param <T> Type of success value * @see <a href="http://www.scala-lang.org/files/archive/nightly/docs/library/index.html#scala.util.Try">Scala Try</a> */ public class Try<T> { private final Either<Throwable, T> tryState; // private constructor to prevent instantiation from outside classes private Try(Either<Throwable, T> tryState) { this.tryState = tryState; } /** * Returns true if this instance holds a success value * @return boolean */ public boolean isSuccess(){ return tryState.isRight(); } /** * Returns true if this instance holds a failure value * @return boolean */ public boolean isFailure(){ return tryState.isLeft(); } /** * Returns the success value if this is a success. Otherwise, throws an exception. * @return Success value if present * @throws com.github.charithe.fjp.Try.CallingSuccessOnFailureValue */ public T success() { if (tryState.isRight()) { return tryState.right().value(); } else { throw new CallingSuccessOnFailureValue(); } } /** * Returns the failure value if this is a failure. Otherwise, throws an exception. * @return Failure value if present * @throws com.github.charithe.fjp.Try.CallingFailureOnSuccessValue */ public Throwable failure() { if(tryState.isLeft()) { return tryState.left().value(); } else { throw new CallingFailureOnSuccessValue(); } } /** * Perform a flatmap over this instance * @param fn flatmap function * @param <A> Return type of flatmap function * @return If this is a success, result of applying the flatmap function. Otherwise, the failure value */ public <A> Try<A> flatMap(Function<T, Try<A>> fn) { if(isFailure()){ return fromFailure(failure()); } else { return fn.apply(success()); } } /** * Perform a map over this instance * @param fn map function * @param <A> Return type of map function * @return If this is a success, result of applying the map function. Otherwise, the failure value */ public <A> Try<A> map(Function<T,A> fn) { return this.flatMap((T t) -> fromSuccess(fn.apply(t))); } /** * Return the success value if this is a success or throw the failure value if this is a failure * @return success value if this is a success instance * @throws Throwable if this is a failure instance */ public T get() throws Throwable { if(isFailure()){ throw failure(); } else { return success(); } } /** * Return the success value or the supplied default if this is a failure instance * @param defaultVal Default value to return in case of failure * @return success value or the supplied default */ public T getOrElse(T defaultVal) { if(isFailure()){ return defaultVal; } else { return success(); } } /** * Return this instance if it is a success instance. Otherwise, return the supplied default. * @param defaultVal Default value to return in case this is a failure * @return this instance or the supplied default */ public Try<T> orElse(Try<T> defaultVal) { if(isFailure()){ return defaultVal; } else { return this; } } /** * Convert this to an {@link Optional} * @return Optional containing the success value or Optional.empty in case this is a failure. */ public Optional<T> toOptional(){ if(isFailure()){ return Optional.empty(); } else { return Optional.of(success()); } } /** * Apply the predicate to the success value to determine whether this should really be a failure * @param predicate Predicate to apply * @return this object if it is a failure. If this is a success, return success if the predicate is satisfied or return failure otherwise. */ public Try<T> filter(Predicate<T> predicate){ return flatMap((T t) -> { if (predicate.test(t)){ return this; } else { return fromFailure(new UnmatchedFilterPredicate()); } }); } /** * Apply the supplied function to the success value if this is a success * @param consumer Function to apply */ public void foreach(Consumer<T> consumer){ map((T t) -> { consumer.accept(t); return null; }); } /** * Create a success instance of Try * @param successVal success value to wrap inside the Try * @param <A> Type of the success value * @return Try object that returns true on calls to isSuccess() */ public static <A> Try<A> fromSuccess(A successVal){ return new Try(Either.right(successVal)); } /** * Create a failure instance of Try * @param failureVal failure value to wrap inside the Try * @param <A> Type of the success value if this were a success * @return Try object that return true on calls to isFailure() */ public static <A> Try<A> fromFailure(Throwable failureVal){ return new Try(Either.left(failureVal)); } /** * Create a Try object from a function that could potentially throw an exception and fail. If the function is successful, * the return value will be Try containing the success. If an exception is thrown, the return value will be a Try object * containing the failure. * @param fn Function that could potentially throw an exception * @param <A> Type of the return value from the function * @return Try object containing the result of the function or the exception thrown */ public static <A> Try<A> doTry(FunctionThrowingException<A> fn){ try { return fromSuccess(fn.apply()); } catch (LambdaException e) { return fromFailure(e.getCause()); } } @FunctionalInterface public interface FunctionThrowingException<A> { A doApply() throws Throwable; default A apply() { try { return doApply(); } catch(Throwable t) { throw new LambdaException(t); } } } public static final class UnmatchedFilterPredicate extends Exception { } public static final class CallingFailureOnSuccessValue extends RuntimeException { } public static final class CallingSuccessOnFailureValue extends RuntimeException { } public static final class LambdaException extends RuntimeException { public LambdaException(Throwable cause) { super(cause); } } }
apache-2.0
qudh1/focusture
SpringCloud/eurekaclient/src/main/java/cn/hybris/eurekaclient/welcome/WelcomeController.java
581
/** * */ package cn.hybris.eurekaclient.welcome; import org.springframework.beans.factory.annotation.Value; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; /** * @author qudh1 * */ @RestController public class WelcomeController { @Value("${server.port}") private String serverPort; @GetMapping(value = "/welcome") public String welcome(@RequestParam String name) { return "Hello " + name + ", I am from port " + serverPort; } }
apache-2.0
andreiHi/hincuA
chapter_007/src/main/java/ru/job4j/multithreading/jmm/CounterThread.java
578
package ru.job4j.multithreading.jmm; /** * Нить вызывает инкремент 1000 раз. * * @author Hincu Andrei (andreih1981@gmail.com) by 07.11.17; * @version $Id$ * @since 0.1 */ public class CounterThread extends Thread { private Counter counter; private Thread thread; CounterThread(Counter counter) { this.counter = counter; this.thread = new Thread(this); this.thread.start(); } @Override public void run() { for (int i = 0; i < 100; i++) { counter.increment(); } } }
apache-2.0
cjug/jigsaw-commons-lang3
common-lang-jigsaw/src/main/java/org/apache/commons/lang3/LocaleUtils.java
13949
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.lang3; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; /** * <p>Operations to assist when working with a {@link Locale}.</p> * * <p>This class tries to handle {@code null} input gracefully. * An exception will not be thrown for a {@code null} input. * Each method documents its behaviour in more detail.</p> * * @since 2.2 * @version $Id: LocaleUtils.java 1565235 2014-02-06 13:31:43Z sebb $ */ public class LocaleUtils { /** Concurrent map of language locales by country. */ private static final ConcurrentMap<String, List<Locale>> cLanguagesByCountry = new ConcurrentHashMap<String, List<Locale>>(); /** Concurrent map of country locales by language. */ private static final ConcurrentMap<String, List<Locale>> cCountriesByLanguage = new ConcurrentHashMap<String, List<Locale>>(); /** * <p>{@code LocaleUtils} instances should NOT be constructed in standard programming. * Instead, the class should be used as {@code LocaleUtils.toLocale("en_GB");}.</p> * * <p>This constructor is public to permit tools that require a JavaBean instance * to operate.</p> */ public LocaleUtils() { super(); } //----------------------------------------------------------------------- /** * <p>Converts a String to a Locale.</p> * * <p>This method takes the string format of a locale and creates the * locale object from it.</p> * * <pre> * LocaleUtils.toLocale("") = new Locale("", "") * LocaleUtils.toLocale("en") = new Locale("en", "") * LocaleUtils.toLocale("en_GB") = new Locale("en", "GB") * LocaleUtils.toLocale("en_GB_xxx") = new Locale("en", "GB", "xxx") (#) * </pre> * * <p>(#) The behaviour of the JDK variant constructor changed between JDK1.3 and JDK1.4. * In JDK1.3, the constructor upper cases the variant, in JDK1.4, it doesn't. * Thus, the result from getVariant() may vary depending on your JDK.</p> * * <p>This method validates the input strictly. * The language code must be lowercase. * The country code must be uppercase. * The separator must be an underscore. * The length must be correct. * </p> * * @param str the locale String to convert, null returns null * @return a Locale, null if null input * @throws IllegalArgumentException if the string is an invalid format * @see Locale#forLanguageTag(String) */ public static Locale toLocale(final String str) { if (str == null) { return null; } if (str.isEmpty()) { // LANG-941 - JDK 8 introduced an empty locale where all fields are blank return new Locale("", ""); } if (str.contains("#")) { // LANG-879 - Cannot handle Java 7 script & extensions throw new IllegalArgumentException("Invalid locale format: " + str); } final int len = str.length(); if (len < 2) { throw new IllegalArgumentException("Invalid locale format: " + str); } final char ch0 = str.charAt(0); if (ch0 == '_') { if (len < 3) { throw new IllegalArgumentException("Invalid locale format: " + str); } final char ch1 = str.charAt(1); final char ch2 = str.charAt(2); if (!Character.isUpperCase(ch1) || !Character.isUpperCase(ch2)) { throw new IllegalArgumentException("Invalid locale format: " + str); } if (len == 3) { return new Locale("", str.substring(1, 3)); } if (len < 5) { throw new IllegalArgumentException("Invalid locale format: " + str); } if (str.charAt(3) != '_') { throw new IllegalArgumentException("Invalid locale format: " + str); } return new Locale("", str.substring(1, 3), str.substring(4)); } String[] split = str.split("_", -1); int occurrences = split.length -1; switch (occurrences) { case 0: if (StringUtils.isAllLowerCase(str) && (len == 2 || len == 3)) { return new Locale(str); } else { throw new IllegalArgumentException("Invalid locale format: " + str); } case 1: if (StringUtils.isAllLowerCase(split[0]) && (split[0].length() == 2 || split[0].length() == 3) && split[1].length() == 2 && StringUtils.isAllUpperCase(split[1])) { return new Locale(split[0], split[1]); } else { throw new IllegalArgumentException("Invalid locale format: " + str); } case 2: if (StringUtils.isAllLowerCase(split[0]) && (split[0].length() == 2 || split[0].length() == 3) && (split[1].length() == 0 || (split[1].length() == 2 && StringUtils.isAllUpperCase(split[1]))) && split[2].length() > 0) { return new Locale(split[0], split[1], split[2]); } //$FALL-THROUGH$ default: throw new IllegalArgumentException("Invalid locale format: " + str); } } //----------------------------------------------------------------------- /** * <p>Obtains the list of locales to search through when performing * a locale search.</p> * * <pre> * localeLookupList(Locale("fr","CA","xxx")) * = [Locale("fr","CA","xxx"), Locale("fr","CA"), Locale("fr")] * </pre> * * @param locale the locale to start from * @return the unmodifiable list of Locale objects, 0 being locale, not null */ public static List<Locale> localeLookupList(final Locale locale) { return localeLookupList(locale, locale); } //----------------------------------------------------------------------- /** * <p>Obtains the list of locales to search through when performing * a locale search.</p> * * <pre> * localeLookupList(Locale("fr", "CA", "xxx"), Locale("en")) * = [Locale("fr","CA","xxx"), Locale("fr","CA"), Locale("fr"), Locale("en"] * </pre> * * <p>The result list begins with the most specific locale, then the * next more general and so on, finishing with the default locale. * The list will never contain the same locale twice.</p> * * @param locale the locale to start from, null returns empty list * @param defaultLocale the default locale to use if no other is found * @return the unmodifiable list of Locale objects, 0 being locale, not null */ public static List<Locale> localeLookupList(final Locale locale, final Locale defaultLocale) { final List<Locale> list = new ArrayList<Locale>(4); if (locale != null) { list.add(locale); if (locale.getVariant().length() > 0) { list.add(new Locale(locale.getLanguage(), locale.getCountry())); } if (locale.getCountry().length() > 0) { list.add(new Locale(locale.getLanguage(), "")); } if (list.contains(defaultLocale) == false) { list.add(defaultLocale); } } return Collections.unmodifiableList(list); } //----------------------------------------------------------------------- /** * <p>Obtains an unmodifiable list of installed locales.</p> * * <p>This method is a wrapper around {@link Locale#getAvailableLocales()}. * It is more efficient, as the JDK method must create a new array each * time it is called.</p> * * @return the unmodifiable list of available locales */ public static List<Locale> availableLocaleList() { return SyncAvoid.AVAILABLE_LOCALE_LIST; } //----------------------------------------------------------------------- /** * <p>Obtains an unmodifiable set of installed locales.</p> * * <p>This method is a wrapper around {@link Locale#getAvailableLocales()}. * It is more efficient, as the JDK method must create a new array each * time it is called.</p> * * @return the unmodifiable set of available locales */ public static Set<Locale> availableLocaleSet() { return SyncAvoid.AVAILABLE_LOCALE_SET; } //----------------------------------------------------------------------- /** * <p>Checks if the locale specified is in the list of available locales.</p> * * @param locale the Locale object to check if it is available * @return true if the locale is a known locale */ public static boolean isAvailableLocale(final Locale locale) { return availableLocaleList().contains(locale); } //----------------------------------------------------------------------- /** * <p>Obtains the list of languages supported for a given country.</p> * * <p>This method takes a country code and searches to find the * languages available for that country. Variant locales are removed.</p> * * @param countryCode the 2 letter country code, null returns empty * @return an unmodifiable List of Locale objects, not null */ public static List<Locale> languagesByCountry(final String countryCode) { if (countryCode == null) { return Collections.emptyList(); } List<Locale> langs = cLanguagesByCountry.get(countryCode); if (langs == null) { langs = new ArrayList<Locale>(); final List<Locale> locales = availableLocaleList(); for (int i = 0; i < locales.size(); i++) { final Locale locale = locales.get(i); if (countryCode.equals(locale.getCountry()) && locale.getVariant().isEmpty()) { langs.add(locale); } } langs = Collections.unmodifiableList(langs); cLanguagesByCountry.putIfAbsent(countryCode, langs); langs = cLanguagesByCountry.get(countryCode); } return langs; } //----------------------------------------------------------------------- /** * <p>Obtains the list of countries supported for a given language.</p> * * <p>This method takes a language code and searches to find the * countries available for that language. Variant locales are removed.</p> * * @param languageCode the 2 letter language code, null returns empty * @return an unmodifiable List of Locale objects, not null */ public static List<Locale> countriesByLanguage(final String languageCode) { if (languageCode == null) { return Collections.emptyList(); } List<Locale> countries = cCountriesByLanguage.get(languageCode); if (countries == null) { countries = new ArrayList<Locale>(); final List<Locale> locales = availableLocaleList(); for (int i = 0; i < locales.size(); i++) { final Locale locale = locales.get(i); if (languageCode.equals(locale.getLanguage()) && locale.getCountry().length() != 0 && locale.getVariant().isEmpty()) { countries.add(locale); } } countries = Collections.unmodifiableList(countries); cCountriesByLanguage.putIfAbsent(languageCode, countries); countries = cCountriesByLanguage.get(languageCode); } return countries; } //----------------------------------------------------------------------- // class to avoid synchronization (Init on demand) static class SyncAvoid { /** Unmodifiable list of available locales. */ private static final List<Locale> AVAILABLE_LOCALE_LIST; /** Unmodifiable set of available locales. */ private static final Set<Locale> AVAILABLE_LOCALE_SET; static { final List<Locale> list = new ArrayList<Locale>(Arrays.asList(Locale.getAvailableLocales())); // extra safe AVAILABLE_LOCALE_LIST = Collections.unmodifiableList(list); AVAILABLE_LOCALE_SET = Collections.unmodifiableSet(new HashSet<Locale>(list)); } } }
apache-2.0
jmarranz/jeplayer
src/main/java/jepl/impl/jta/JEPLJTAConnectionXAPoolImpl.java
5774
/* Copyright 2011 Jose Maria Arranz Santamaria Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package jepl.impl.jta; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.sql.Connection; import java.sql.SQLException; import javax.transaction.RollbackException; import javax.transaction.SystemException; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.xa.XAResource; import jepl.JEPLException; import jepl.impl.JEPLDALImpl; import jepl.impl.JEPLPreparedStatementImpl; /** * * @author jmarranz */ public class JEPLJTAConnectionXAPoolImpl extends JEPLJTAConnectionImpl { static Class standardXAConnectionHandleClass; static Class standardXAConnection; static Field field_tx; static Field field_transactionManager; static Field field_xacon; static Field field_xacon_thisAutoCommit; static Method method_xacon_getXAResource; static Method method_getAutoCommit; static Method method_setAutoCommit; static { initReflection(); } protected boolean isStandardXAConnectionHandle; public JEPLJTAConnectionXAPoolImpl(JEPLJTADataSourceDefaultImpl ds,Connection con) { super(ds,con); // Por si acaso aunque será seguro if (standardXAConnectionHandleClass != null) this.isStandardXAConnectionHandle = con.getClass().isAssignableFrom(standardXAConnectionHandleClass); } @SuppressWarnings("unchecked") private static void initReflection() { try { standardXAConnectionHandleClass = Class.forName("org.enhydra.jdbc.standard.StandardXAConnectionHandle"); standardXAConnection = Class.forName("org.enhydra.jdbc.standard.StandardXAConnection"); field_tx = standardXAConnectionHandleClass.getField("tx"); field_transactionManager = standardXAConnectionHandleClass.getField("transactionManager"); field_xacon = standardXAConnectionHandleClass.getField("xacon"); field_xacon_thisAutoCommit = standardXAConnection.getField("thisAutoCommit"); method_xacon_getXAResource = standardXAConnection.getMethod("getXAResource",(Class<Object>[])null); method_getAutoCommit = standardXAConnectionHandleClass.getMethod("getAutoCommit",(Class<?>[])null); method_setAutoCommit = standardXAConnectionHandleClass.getMethod("setAutoCommit",new Class[]{boolean.class}); } catch(Exception ex) { throw new JEPLException(ex); } } @Override public JEPLPreparedStatementImpl prepareJEPLStatement(JEPLDALImpl dal,String sql,int autoGeneratedKeys) throws SQLException { if (isStandardXAConnectionHandle) fixXAPool(); return super.prepareJEPLStatement(dal,sql,autoGeneratedKeys); } public void fixXAPool() throws SQLException { // http://stackoverflow.com/questions/6927561/standardxaconnectionhandlepreparestatement-should-not-be-used-outside-an-ejbser // http://www.java2s.com/Open-Source/Java-Document/Database-JDBC-Connection-Pool/xapool/org/enhydra/jdbc/standard/StandardXAConnectionHandle.java.htm // http://websvn.ow2.org/filedetails.php?repname=xapool&path=%2Ftrunk%2Fxapool%2Fsrc%2Forg%2Fenhydra%2Fjdbc%2Fstandard%2FStandardXAConnectionHandle.java Connection con = getConnection(); // Usando reflection evitamos dependencias con JOTM try { // StandardXAConnectionHandle jotmCon = (StandardXAConnectionHandle)con; if (field_tx.get(con) == null) // jotmCon.tx == null { TransactionManager txnMgr = (TransactionManager)field_transactionManager.get(con); // TransactionManager txnMgr = jotmCon.transactionManager; Transaction tx = txnMgr.getTransaction(); if (tx != null) { field_tx.set(con,tx); // jotmCon.tx = tx; boolean autoCommit = (Boolean)method_getAutoCommit.invoke(con, (Object[])null); // boolean autoCommit = jotmCon.getAutoCommit(); Object xacon = field_xacon.get(con); // StandardXAConnection xacon = jotmCon.xacon; field_xacon_thisAutoCommit.set(xacon,autoCommit); // xacon.thisAutoCommit = autoCommit; if (autoCommit) method_setAutoCommit.invoke(con,false); // jotmCon.setAutoCommit(false); tx.enlistResource((XAResource)method_xacon_getXAResource.invoke(xacon,(Object[])null)); // tx.enlistResource(xacon.getXAResource()); } } } catch (RollbackException ex1) { throw new JEPLException(ex1); } catch (IllegalStateException ex1) { throw new JEPLException(ex1); } catch (SystemException ex1) { throw new JEPLException(ex1); } catch (IllegalAccessException ex1) { throw new JEPLException(ex1); } catch (InvocationTargetException ex1) { throw new JEPLException(ex1); } } }
apache-2.0
jivesoftware/upena
upena-deployable/src/main/java/com/jivesoftware/os/upena/deployable/region/JVMPluginRegion.java
4087
package com.jivesoftware.os.upena.deployable.region; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import com.jivesoftware.os.mlogger.core.MetricLogger; import com.jivesoftware.os.mlogger.core.MetricLoggerFactory; import com.jivesoftware.os.upena.deployable.JDIAPI; import com.jivesoftware.os.upena.deployable.JDIAPI.ThreadDumpLineType; import com.jivesoftware.os.upena.deployable.region.JVMPluginRegion.JVMPluginRegionInput; import com.jivesoftware.os.upena.deployable.soy.SoyRenderer; import com.jivesoftware.os.upena.service.UpenaStore; import com.jivesoftware.os.upena.shared.Host; import com.jivesoftware.os.upena.shared.Instance; import com.jivesoftware.os.upena.shared.InstanceKey; import java.util.ArrayList; import java.util.List; import java.util.Map; import org.apache.shiro.SecurityUtils; // soy.page.jvmPluginRegion public class JVMPluginRegion implements PageRegion<JVMPluginRegionInput> { private static final MetricLogger log = MetricLoggerFactory.getLogger(); private final String template; private final SoyRenderer renderer; private final UpenaStore upenaStore; private final JDIAPI jvm; public JVMPluginRegion(String template, SoyRenderer renderer, UpenaStore upenaStore, JDIAPI jvm) { this.template = template; this.renderer = renderer; this.upenaStore = upenaStore; this.jvm = jvm; } @Override public String getRootPath() { return "/ui/jvm"; } public static class JVMPluginRegionInput implements PluginInput { String host; String port; final String instanceKey; final String action; public JVMPluginRegionInput(String host, String port, String instanceKey, String action) { this.host = host; this.port = port; this.instanceKey = instanceKey; this.action = action; } @Override public String name() { return "JVM"; } } @Override public String render(String user, JVMPluginRegionInput input) throws Exception { SecurityUtils.getSubject().checkPermission("debug"); if (input.instanceKey != null && !input.instanceKey.isEmpty()) { Instance instance = upenaStore.instances.get(new InstanceKey(input.instanceKey)); Host host = upenaStore.hosts.get(instance.hostKey); input.port = String.valueOf(instance.ports.get("debug").port); input.host = host.name; } Map<String, Object> data = Maps.newHashMap(); data.put("host", input.host); data.put("port", input.port); try { if (input.action.equals("memoryHisto")) { List<Map<String, String>> lines = new ArrayList<>(); jvm.memoryHisto(input.host, Integer.parseInt(input.port), (String name) -> { lines.add(ImmutableMap.of("line", name)); return true; }); data.put("instanceCounts", lines); } if (input.action.equals("threadDump")) { List<List<Map<String, String>>> threadDumps = new ArrayList<>(); List<Map<String, String>> threadDump = new ArrayList<>(); jvm.threadDump(input.host, Integer.parseInt(input.port), (ThreadDumpLineType type, String value) -> { if (type == ThreadDumpLineType.eod) { threadDumps.add(new ArrayList<>(threadDump)); threadDump.clear(); } else { threadDump.add(ImmutableMap.of("type", type.toString(), "value", value)); } return true; }); data.put("threadDumps", threadDumps); } } catch (Exception e) { log.error("Unable to retrieve data", e); } return renderer.render(template, data); } @Override public String getTitle() { return "JVM"; } }
apache-2.0
mmmsplay10/QuizUpWinner
quizup/com/fasterxml/jackson/databind/jsontype/impl/AsPropertyTypeDeserializer.java
5580
package com.fasterxml.jackson.databind.jsontype.impl; import com.fasterxml.jackson.annotation.JsonTypeInfo.As; import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.JsonToken; import com.fasterxml.jackson.core.util.JsonParserSequence; import com.fasterxml.jackson.databind.BeanProperty; import com.fasterxml.jackson.databind.DeserializationContext; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.JsonDeserializer; import com.fasterxml.jackson.databind.jsontype.TypeDeserializer; import com.fasterxml.jackson.databind.jsontype.TypeIdResolver; import com.fasterxml.jackson.databind.util.TokenBuffer; public class AsPropertyTypeDeserializer extends AsArrayTypeDeserializer { private static final long serialVersionUID = 1L; public AsPropertyTypeDeserializer(JavaType paramJavaType, TypeIdResolver paramTypeIdResolver, String paramString, boolean paramBoolean, Class<?> paramClass) { super(paramJavaType, paramTypeIdResolver, paramString, paramBoolean, paramClass); } public AsPropertyTypeDeserializer(AsPropertyTypeDeserializer paramAsPropertyTypeDeserializer, BeanProperty paramBeanProperty) { super(paramAsPropertyTypeDeserializer, paramBeanProperty); } protected final Object _deserializeTypedForId(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, TokenBuffer paramTokenBuffer) { String str = paramJsonParser.getText(); JsonDeserializer localJsonDeserializer = _findDeserializer(paramDeserializationContext, str); if (this._typeIdVisible) { if (paramTokenBuffer == null) paramTokenBuffer = new TokenBuffer(null); paramTokenBuffer.writeFieldName(paramJsonParser.getCurrentName()); paramTokenBuffer.writeString(str); } if (paramTokenBuffer != null) paramJsonParser = JsonParserSequence.createFlattened(paramTokenBuffer.asParser(paramJsonParser), paramJsonParser); paramJsonParser.nextToken(); return localJsonDeserializer.deserialize(paramJsonParser, paramDeserializationContext); } protected Object _deserializeTypedUsingDefaultImpl(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext, TokenBuffer paramTokenBuffer) { JsonDeserializer localJsonDeserializer = _findDefaultImplDeserializer(paramDeserializationContext); if (localJsonDeserializer != null) { if (paramTokenBuffer != null) { paramTokenBuffer.writeEndObject(); JsonParser localJsonParser = paramTokenBuffer.asParser(paramJsonParser); paramJsonParser = localJsonParser; localJsonParser.nextToken(); } return localJsonDeserializer.deserialize(paramJsonParser, paramDeserializationContext); } Object localObject = TypeDeserializer.deserializeIfNatural(paramJsonParser, paramDeserializationContext, this._baseType); if (localObject != null) return localObject; if (paramJsonParser.getCurrentToken() == JsonToken.START_ARRAY) return super.deserializeTypedFromAny(paramJsonParser, paramDeserializationContext); throw paramDeserializationContext.wrongTokenException(paramJsonParser, JsonToken.FIELD_NAME, "missing property '" + this._typePropertyName + "' that is to contain type id (for class " + baseTypeName() + ")"); } public Object deserializeTypedFromAny(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext) { if (paramJsonParser.getCurrentToken() == JsonToken.START_ARRAY) return super.deserializeTypedFromArray(paramJsonParser, paramDeserializationContext); return deserializeTypedFromObject(paramJsonParser, paramDeserializationContext); } public Object deserializeTypedFromObject(JsonParser paramJsonParser, DeserializationContext paramDeserializationContext) { JsonToken localJsonToken1 = paramJsonParser.getCurrentToken(); JsonToken localJsonToken2 = localJsonToken1; if (localJsonToken1 == JsonToken.START_OBJECT) { localJsonToken2 = paramJsonParser.nextToken(); } else { if (localJsonToken2 == JsonToken.START_ARRAY) return _deserializeTypedUsingDefaultImpl(paramJsonParser, paramDeserializationContext, null); if (localJsonToken2 != JsonToken.FIELD_NAME) return _deserializeTypedUsingDefaultImpl(paramJsonParser, paramDeserializationContext, null); } TokenBuffer localTokenBuffer = null; while (localJsonToken2 == JsonToken.FIELD_NAME) { String str = paramJsonParser.getCurrentName(); paramJsonParser.nextToken(); if (this._typePropertyName.equals(str)) return _deserializeTypedForId(paramJsonParser, paramDeserializationContext, localTokenBuffer); if (localTokenBuffer == null) localTokenBuffer = new TokenBuffer(null); localTokenBuffer.writeFieldName(str); localTokenBuffer.copyCurrentStructure(paramJsonParser); localJsonToken2 = paramJsonParser.nextToken(); } return _deserializeTypedUsingDefaultImpl(paramJsonParser, paramDeserializationContext, localTokenBuffer); } public TypeDeserializer forProperty(BeanProperty paramBeanProperty) { if (paramBeanProperty == this._property) return this; return new AsPropertyTypeDeserializer(this, paramBeanProperty); } public JsonTypeInfo.As getTypeInclusion() { return JsonTypeInfo.As.PROPERTY; } } /* Location: /Users/vikas/Documents/Mhacks_Real_app/classes-dex2jar.jar * Qualified Name: com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer * JD-Core Version: 0.6.2 */
apache-2.0
CarlosIribarren/Ejemplos-Examples
Android/AndroidVozHizkuntza/src/com/example/androidvozhizkuntza/MainActivity.java
5609
package com.example.androidvozhizkuntza; import java.util.ArrayList; import java.util.Locale; import java.util.Vector; import android.os.Bundle; import android.app.Activity; import android.content.Intent; import android.content.res.Resources; import android.speech.RecognizerIntent; import android.util.DisplayMetrics; import android.view.Menu; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.TextView; import android.widget.Toast; public class MainActivity extends Activity { private static final int VOICE_RECOGNITION_REQUEST_CODE = 1; private Button bt_start; private TextView text_testua; private Vector<String> nombres; private Vector<String> telefonos; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_main); //Relacionamos el boton con el XML bt_start = (Button)findViewById(R.id.button1); text_testua = (TextView) findViewById(R.id.testua); bt_start.setOnClickListener(new OnClickListener() { public void onClick(View v) { //Lanzamos el reconoimiento de voz startVoiceRecognitionActivity(); } }); //Recogemos todos los telefonos y nombre en los //vetores: nombres y telefonos //getNameNumber(); } private void startVoiceRecognitionActivity() { // Definición del intent para realizar en análisis del mensaje Intent intent = new Intent(RecognizerIntent.ACTION_RECOGNIZE_SPEECH); // Indicamos el modelo de lenguaje para el intent intent.putExtra(RecognizerIntent.EXTRA_LANGUAGE_MODEL, RecognizerIntent.LANGUAGE_MODEL_FREE_FORM); // Definimos el mensaje que aparecerá intent.putExtra(RecognizerIntent.EXTRA_PROMPT,"Diga, Llamar a ..."); // Lanzamos la actividad esperando resultados startActivityForResult(intent, VOICE_RECOGNITION_REQUEST_CODE); } @Override //Recogemos los resultados del reconocimiento de voz protected void onActivityResult(int requestCode, int resultCode, Intent data) { //Si el reconocimiento a sido bueno if(requestCode == VOICE_RECOGNITION_REQUEST_CODE && resultCode == RESULT_OK){ //El intent nos envia un ArrayList aunque en este caso solo //utilizaremos la pos.0 ArrayList<String> matches = data.getStringArrayListExtra (RecognizerIntent.EXTRA_RESULTS); //Separo el texto en palabras. String [ ] palabras = matches.get(0).toString().split(" "); text_testua.setText(matches.toString()); //Toast.makeText(this,matches.toString() , Toast.LENGTH_LONG).show(); String laguntza_katea =getString(R.string.laguntza); if ( matches.indexOf(laguntza_katea)!=-1) { Resources res = this.getResources(); DisplayMetrics dm = res.getDisplayMetrics(); android.content.res.Configuration conf = res.getConfiguration(); conf.locale = new Locale("en"); res.updateConfiguration(conf, dm); finish();startActivity(getIntent()); Toast.makeText(this,"Hizkuntza Inglesa jarri da!!!" , Toast.LENGTH_LONG).show(); //text_testua.setText("Hizkuntza Inglesa jarri da!!!"); } /* //Si la primera palabra es LLAMAR if(palabras[0].contains("hola")) { //Toast.makeText(this,"AYUDA CONSEGUIDOOOOOOO" , Toast.LENGTH_LONG).show(); text_testua.setText("AYUDA CONSEGUIDOOOOOOO"); for(int a=0;a<nombres.size();a++) { //Busco el nombre que es la tercera posicion (LLAMAR A LORENA) if(nombres.get(a).equals(palabras[2])) { //Si la encuentra recojo el numero telf en el otro //vector que coincidira con la posicion ya que //los hemos rellenado a la vez. Intent callIntent = new Intent(Intent.ACTION_CALL); callIntent.setData(Uri.parse("tel:"+telefonos.get(a))); //Realizo la llamada startActivity(callIntent); break; } } } */ } else { text_testua.setText("no se a encontrado nadaaaaa"); } } /* //Con el getNameNumber lo que hago es recoger los nombres //de la SIM en un vector //Y los numeros de telefonos en otro vector, eso sí tienen que coincidir //las posiciones de uno y de otro, por eso los relleno a la vez. private void getNameNumber(){ nombres = new Vector<String>(); telefonos = new Vector<String>(); Uri uri = ContactsContract.CommonDataKinds.Phone.CONTENT_URI; ContentResolver cr = getContentResolver(); Cursor cur = cr.query(ContactsContract.Contacts.CONTENT_URI, null, null, null, null); String[] projection = new String[] { ContactsContract.CommonDataKinds.Phone.DISPLAY_NAME, ContactsContract.CommonDataKinds.Phone.NUMBER }; Cursor names = getContentResolver().query( uri, projection, null, null, null); int indexName = names.getColumnIndex( ContactsContract.CommonDataKinds.Phone.DISPLAY_NAME); int indexNumber = names.getColumnIndex( ContactsContract.CommonDataKinds.Phone.NUMBER); names.moveToFirst(); do { //Aquí relleno los dos String name = names.getString(indexName); nombres.add(name); String number = names.getString(indexNumber); telefonos.add(number); } while (names.moveToNext()); } */ }
apache-2.0
GerritCodeReview/gerrit
java/com/google/gerrit/server/api/config/ConfigModule.java
1013
// Copyright (C) 2015 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.api.config; import com.google.gerrit.extensions.api.config.Config; import com.google.gerrit.extensions.api.config.Server; import com.google.gerrit.extensions.config.FactoryModule; public class ConfigModule extends FactoryModule { @Override protected void configure() { bind(Config.class).to(ConfigImpl.class); bind(Server.class).to(ServerImpl.class); } }
apache-2.0
emolsson/cassandra
src/java/org/apache/cassandra/io/sstable/format/big/BigTableWriter.java
19002
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.cassandra.io.sstable.format.big; import java.io.*; import java.util.Map; import org.apache.cassandra.db.*; import org.apache.cassandra.db.lifecycle.LifecycleTransaction; import org.apache.cassandra.io.sstable.*; import org.apache.cassandra.io.sstable.format.SSTableReader; import org.apache.cassandra.io.sstable.format.SSTableWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.cassandra.config.CFMetaData; import org.apache.cassandra.config.DatabaseDescriptor; import org.apache.cassandra.db.rows.*; import org.apache.cassandra.io.FSWriteError; import org.apache.cassandra.io.compress.CompressedSequentialWriter; import org.apache.cassandra.io.sstable.metadata.MetadataCollector; import org.apache.cassandra.io.sstable.metadata.MetadataComponent; import org.apache.cassandra.io.sstable.metadata.MetadataType; import org.apache.cassandra.io.sstable.metadata.StatsMetadata; import org.apache.cassandra.io.util.*; import org.apache.cassandra.utils.ByteBufferUtil; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.FilterFactory; import org.apache.cassandra.utils.IFilter; import org.apache.cassandra.utils.concurrent.Transactional; import org.apache.cassandra.utils.SyncUtil; public class BigTableWriter extends SSTableWriter { private static final Logger logger = LoggerFactory.getLogger(BigTableWriter.class); private final IndexWriter iwriter; private final SegmentedFile.Builder dbuilder; private final SequentialWriter dataFile; private DecoratedKey lastWrittenKey; private FileMark dataMark; public BigTableWriter(Descriptor descriptor, Long keyCount, Long repairedAt, CFMetaData metadata, MetadataCollector metadataCollector, SerializationHeader header, LifecycleTransaction txn) { super(descriptor, keyCount, repairedAt, metadata, metadataCollector, header); txn.trackNew(this); // must track before any files are created if (compression) { dataFile = SequentialWriter.open(getFilename(), descriptor.filenameFor(Component.COMPRESSION_INFO), metadata.compressionParameters(), metadataCollector); dbuilder = SegmentedFile.getCompressedBuilder((CompressedSequentialWriter) dataFile); } else { dataFile = SequentialWriter.open(new File(getFilename()), new File(descriptor.filenameFor(Component.CRC))); dbuilder = SegmentedFile.getBuilder(DatabaseDescriptor.getDiskAccessMode(), false); } iwriter = new IndexWriter(keyCount, dataFile); // txnLogs will delete if safe to do so (early readers) iwriter.indexFile.deleteFile(false); dataFile.deleteFile(false); } public void mark() { dataMark = dataFile.mark(); iwriter.mark(); } public void resetAndTruncate() { dataFile.resetAndTruncate(dataMark); iwriter.resetAndTruncate(); } /** * Perform sanity checks on @param decoratedKey and @return the position in the data file before any data is written */ private long beforeAppend(DecoratedKey decoratedKey) { assert decoratedKey != null : "Keys must not be null"; // empty keys ARE allowed b/c of indexed column values //if (lastWrittenKey != null && lastWrittenKey.compareTo(decoratedKey) >= 0) // throw new RuntimeException("Last written key " + lastWrittenKey + " >= current key " + decoratedKey + " writing into " + getFilename()); return (lastWrittenKey == null) ? 0 : dataFile.getFilePointer(); } private void afterAppend(DecoratedKey decoratedKey, long dataEnd, RowIndexEntry index) throws IOException { metadataCollector.addKey(decoratedKey.getKey()); lastWrittenKey = decoratedKey; last = lastWrittenKey; if (first == null) first = lastWrittenKey; if (logger.isTraceEnabled()) logger.trace("wrote {} at {}", decoratedKey, dataEnd); iwriter.append(decoratedKey, index, dataEnd); dbuilder.addPotentialBoundary(dataEnd); } /** * Appends partition data to this writer. * * @param iterator the partition to write * @return the created index entry if something was written, that is if {@code iterator} * wasn't empty, {@code null} otherwise. * * @throws FSWriteError if a write to the dataFile fails */ public RowIndexEntry append(UnfilteredRowIterator iterator) { DecoratedKey key = iterator.partitionKey(); if (key.getKey().remaining() > FBUtilities.MAX_UNSIGNED_SHORT) { logger.error("Key size {} exceeds maximum of {}, skipping row", key.getKey().remaining(), FBUtilities.MAX_UNSIGNED_SHORT); return null; } if (iterator.isEmpty()) return null; long startPosition = beforeAppend(key); try (StatsCollector withStats = new StatsCollector(iterator, metadataCollector)) { ColumnIndex index = ColumnIndex.writeAndBuildIndex(withStats, dataFile, header, descriptor.version); RowIndexEntry entry = RowIndexEntry.create(startPosition, iterator.partitionLevelDeletion(), index); long endPosition = dataFile.getFilePointer(); long rowSize = endPosition - startPosition; maybeLogLargePartitionWarning(key, rowSize); metadataCollector.addPartitionSizeInBytes(rowSize); afterAppend(key, endPosition, entry); return entry; } catch (IOException e) { throw new FSWriteError(e, dataFile.getPath()); } } private void maybeLogLargePartitionWarning(DecoratedKey key, long rowSize) { if (rowSize > DatabaseDescriptor.getCompactionLargePartitionWarningThreshold()) { String keyString = metadata.getKeyValidator().getString(key.getKey()); logger.warn("Compacting large partition {}/{}:{} ({} bytes)", metadata.ksName, metadata.cfName, keyString, rowSize); } } private static class StatsCollector extends AlteringUnfilteredRowIterator { private final MetadataCollector collector; private int cellCount; StatsCollector(UnfilteredRowIterator iter, MetadataCollector collector) { super(iter); this.collector = collector; collector.update(iter.partitionLevelDeletion()); } @Override protected Row computeNextStatic(Row row) { if (!row.isEmpty()) cellCount += Rows.collectStats(row, collector); return row; } @Override protected Row computeNext(Row row) { collector.updateClusteringValues(row.clustering()); cellCount += Rows.collectStats(row, collector); return row; } @Override protected RangeTombstoneMarker computeNext(RangeTombstoneMarker marker) { collector.updateClusteringValues(marker.clustering()); if (marker.isBoundary()) { RangeTombstoneBoundaryMarker bm = (RangeTombstoneBoundaryMarker)marker; collector.update(bm.endDeletionTime()); collector.update(bm.startDeletionTime()); } else { collector.update(((RangeTombstoneBoundMarker)marker).deletionTime()); } return marker; } @Override public void close() { collector.addCellPerPartitionCount(cellCount); super.close(); } } @SuppressWarnings("resource") public SSTableReader openEarly() { // find the max (exclusive) readable key IndexSummaryBuilder.ReadableBoundary boundary = iwriter.getMaxReadable(); if (boundary == null) return null; StatsMetadata stats = statsMetadata(); assert boundary.indexLength > 0 && boundary.dataLength > 0; // open the reader early IndexSummary indexSummary = iwriter.summary.build(metadata.partitioner, boundary); SegmentedFile ifile = iwriter.builder.buildIndex(descriptor, indexSummary, boundary); SegmentedFile dfile = dbuilder.buildData(descriptor, stats, boundary); SSTableReader sstable = SSTableReader.internalOpen(descriptor, components, metadata, ifile, dfile, indexSummary, iwriter.bf.sharedCopy(), maxDataAge, stats, SSTableReader.OpenReason.EARLY, header); // now it's open, find the ACTUAL last readable key (i.e. for which the data file has also been flushed) sstable.first = getMinimalKey(first); sstable.last = getMinimalKey(boundary.lastKey); return sstable; } public SSTableReader openFinalEarly() { // we must ensure the data is completely flushed to disk dataFile.sync(); iwriter.indexFile.sync(); return openFinal(descriptor, SSTableReader.OpenReason.EARLY); } @SuppressWarnings("resource") private SSTableReader openFinal(Descriptor desc, SSTableReader.OpenReason openReason) { if (maxDataAge < 0) maxDataAge = System.currentTimeMillis(); StatsMetadata stats = statsMetadata(); // finalize in-memory state for the reader IndexSummary indexSummary = iwriter.summary.build(this.metadata.partitioner); SegmentedFile ifile = iwriter.builder.buildIndex(desc, indexSummary); SegmentedFile dfile = dbuilder.buildData(desc, stats); SSTableReader sstable = SSTableReader.internalOpen(desc, components, this.metadata, ifile, dfile, indexSummary, iwriter.bf.sharedCopy(), maxDataAge, stats, openReason, header); sstable.first = getMinimalKey(first); sstable.last = getMinimalKey(last); return sstable; } protected SSTableWriter.TransactionalProxy txnProxy() { return new TransactionalProxy(); } class TransactionalProxy extends SSTableWriter.TransactionalProxy { // finalise our state on disk, including renaming protected void doPrepare() { Map<MetadataType, MetadataComponent> metadataComponents = finalizeMetadata(); iwriter.prepareToCommit(); // write sstable statistics dataFile.setDescriptor(descriptor).prepareToCommit(); writeMetadata(descriptor, metadataComponents); // save the table of components SSTable.appendTOC(descriptor, components); if (openResult) finalReader = openFinal(descriptor, SSTableReader.OpenReason.NORMAL); } protected Throwable doCommit(Throwable accumulate) { accumulate = dataFile.commit(accumulate); accumulate = iwriter.commit(accumulate); return accumulate; } @Override protected Throwable doPreCleanup(Throwable accumulate) { accumulate = dbuilder.close(accumulate); return accumulate; } protected Throwable doAbort(Throwable accumulate) { accumulate = iwriter.abort(accumulate); accumulate = dataFile.abort(accumulate); return accumulate; } } private static void writeMetadata(Descriptor desc, Map<MetadataType, MetadataComponent> components) { File file = new File(desc.filenameFor(Component.STATS)); try (SequentialWriter out = SequentialWriter.open(file)) { desc.getMetadataSerializer().serialize(components, out.stream); out.setDescriptor(desc).finish(); } catch (IOException e) { throw new FSWriteError(e, file.getPath()); } } public long getFilePointer() { return dataFile.getFilePointer(); } public long getOnDiskFilePointer() { return dataFile.getOnDiskFilePointer(); } /** * Encapsulates writing the index and filter for an SSTable. The state of this object is not valid until it has been closed. */ class IndexWriter extends AbstractTransactional implements Transactional { private final SequentialWriter indexFile; public final SegmentedFile.Builder builder; public final IndexSummaryBuilder summary; public final IFilter bf; private FileMark mark; IndexWriter(long keyCount, final SequentialWriter dataFile) { indexFile = SequentialWriter.open(new File(descriptor.filenameFor(Component.PRIMARY_INDEX))); builder = SegmentedFile.getBuilder(DatabaseDescriptor.getIndexAccessMode(), false); summary = new IndexSummaryBuilder(keyCount, metadata.getMinIndexInterval(), Downsampling.BASE_SAMPLING_LEVEL); bf = FilterFactory.getFilter(keyCount, metadata.getBloomFilterFpChance(), true, descriptor.version.hasOldBfHashOrder()); // register listeners to be alerted when the data files are flushed indexFile.setPostFlushListener(new Runnable() { public void run() { summary.markIndexSynced(indexFile.getLastFlushOffset()); } }); dataFile.setPostFlushListener(new Runnable() { public void run() { summary.markDataSynced(dataFile.getLastFlushOffset()); } }); } // finds the last (-offset) decorated key that can be guaranteed to occur fully in the flushed portion of the index file IndexSummaryBuilder.ReadableBoundary getMaxReadable() { return summary.getLastReadableBoundary(); } public void append(DecoratedKey key, RowIndexEntry indexEntry, long dataEnd) throws IOException { bf.add(key); long indexStart = indexFile.getFilePointer(); try { ByteBufferUtil.writeWithShortLength(key.getKey(), indexFile.stream); rowIndexEntrySerializer.serialize(indexEntry, indexFile.stream); } catch (IOException e) { throw new FSWriteError(e, indexFile.getPath()); } long indexEnd = indexFile.getFilePointer(); if (logger.isTraceEnabled()) logger.trace("wrote index entry: {} at {}", indexEntry, indexStart); summary.maybeAddEntry(key, indexStart, indexEnd, dataEnd); builder.addPotentialBoundary(indexStart); } /** * Closes the index and bloomfilter, making the public state of this writer valid for consumption. */ void flushBf() { if (components.contains(Component.FILTER)) { String path = descriptor.filenameFor(Component.FILTER); try (FileOutputStream fos = new FileOutputStream(path); DataOutputStreamPlus stream = new BufferedDataOutputStreamPlus(fos)) { // bloom filter FilterFactory.serialize(bf, stream); stream.flush(); SyncUtil.sync(fos); } catch (IOException e) { throw new FSWriteError(e, path); } } } public void mark() { mark = indexFile.mark(); } public void resetAndTruncate() { // we can't un-set the bloom filter addition, but extra keys in there are harmless. // we can't reset dbuilder either, but that is the last thing called in afterappend so // we assume that if that worked then we won't be trying to reset. indexFile.resetAndTruncate(mark); } protected void doPrepare() { flushBf(); // truncate index file long position = iwriter.indexFile.getFilePointer(); iwriter.indexFile.setDescriptor(descriptor).prepareToCommit(); FileUtils.truncate(iwriter.indexFile.getPath(), position); // save summary summary.prepareToCommit(); try (IndexSummary summary = iwriter.summary.build(getPartitioner())) { SSTableReader.saveSummary(descriptor, first, last, iwriter.builder, dbuilder, summary); } } protected Throwable doCommit(Throwable accumulate) { return indexFile.commit(accumulate); } protected Throwable doAbort(Throwable accumulate) { return indexFile.abort(accumulate); } @Override protected Throwable doPreCleanup(Throwable accumulate) { accumulate = summary.close(accumulate); accumulate = bf.close(accumulate); accumulate = builder.close(accumulate); return accumulate; } } }
apache-2.0
garethahealy/jon-plugins
gah-alert-definitions/src/main/java/com/garethahealy/jon/plugins/server/gah/alert/defintions/templates/cxf/CXFEndpointNotRunningTemplate.java
1121
/* * #%L * GarethHealy :: JBoss ON :: Plugins :: GAH Alert Definitions * %% * Copyright (C) 2013 - 2018 Gareth Healy * %% * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * #L% */ package com.garethahealy.jon.plugins.server.gah.alert.defintions.templates.cxf; import com.garethahealy.jon.plugins.server.gah.alert.defintions.templates.base.NotRunningTemplate; public class CXFEndpointNotRunningTemplate extends NotRunningTemplate { public CXFEndpointNotRunningTemplate() { super("CXF", "CXF Endpoint", "CXFEndpointNotRunning", "A cxf endpoint is down, not up, disabled or unknown"); } }
apache-2.0
shisoft/LinkedIn-J
core/src/main/java/com/google/code/linkedinapi/schema/Application.java
1610
/* * Copyright 2010-2011 Nabeel Mukhtar * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.google.code.linkedinapi.schema; /** * <p>Java class for anonymous complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType> * &lt;complexContent> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"> * &lt;sequence> * &lt;element ref="{}name"/> * &lt;/sequence> * &lt;/restriction> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ public interface Application extends SchemaEntity { /** * Gets the value of the name property. * * @return * possible object is * {@link String } * */ String getName(); /** * Sets the value of the name property. * * @param value * allowed object is * {@link String } * */ void setName(String value); }
apache-2.0
philburk/jsyn
src/main/java/com/jsyn/unitgen/LineOut.java
1630
/* * Copyright 2009 Phil Burk, Mobileer Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.jsyn.unitgen; import com.jsyn.ports.UnitInputPort; /** * Input audio is sent to the external audio output device. * * @author Phil Burk (C) 2009 Mobileer Inc */ public class LineOut extends UnitGenerator implements UnitSink { public UnitInputPort input; public LineOut() { addPort(input = new UnitInputPort(2, "Input")); } @Override public void generate(int start, int limit) { double[] inputs0 = input.getValues(0); double[] inputs1 = input.getValues(1); double[] buffer0 = synthesisEngine.getOutputBuffer(0); double[] buffer1 = synthesisEngine.getOutputBuffer(1); for (int i = start; i < limit; i++) { buffer0[i] += inputs0[i]; buffer1[i] += inputs1[i]; } } /** * This unit won't do anything unless you start() it. */ @Override public boolean isStartRequired() { return true; } @Override public UnitInputPort getInput() { return input; } }
apache-2.0
kreon/jnode
jnode-httpd-module/src/org/jnode/httpd/routes/get/LinksRoute.java
3687
/* * Licensed to the jNode FTN Platform Develpoment Team (jNode Team) * under one or more contributor license agreements. * See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * The jNode Team licenses this file to you under the * Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.jnode.httpd.routes.get; import java.util.Collections; import java.util.Comparator; import java.util.List; import jnode.dto.Link; import jnode.ftn.types.FtnAddress; import jnode.orm.ORMManager; import org.jnode.httpd.util.HTML; import org.jnode.httpd.util.JSONUtil; import spark.Request; import spark.Response; import spark.Route; public class LinksRoute extends Route { private static String links = null; public LinksRoute() { super("/secure/links.html"); if (links == null) { links = HTML.getContents("/parts/links.html"); } } @Override public Object handle(Request req, Response resp) { StringBuilder sb = new StringBuilder(); String id = req.queryParams("id"); if (id != null) { try { String cb = req.queryParams("cb"); if (cb != null) { sb.append(cb + "("); } Long lid = Long.valueOf(id); Link l = ORMManager.get(Link.class).getById(lid); sb.append(JSONUtil.value(l)); if (cb != null) { sb.append(")"); } resp.type("text/javascript"); return sb.toString(); } catch (NumberFormatException e) { } } else { List<Link> links = ORMManager.get(Link.class).getAll(); sortLinks(links); for (Link object : links) { sb.append(String .format("<tr><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td>%s</td><td><a href=\"#edit\" class=\"css-link-1\" onclick=\"edit(%d)\">Изменить</a>&nbsp;<a href=\"#options\" class=\"css-link-1\" onclick=\"options(%d)\">Настройки</a>&nbsp;<a href=\"#\" class=\"css-link-1\" onclick=\"del(%d)\">Удалить</a></td></tr>", object.getLinkName(), object.getLinkAddress(), object.getProtocolAddress(), object.getProtocolPassword(), object.getPaketPassword(), object.getId(), object.getId(), object.getId())); } return HTML.start(true) .append(String.format(LinksRoute.links, sb.toString())) .footer().get(); } return null; } private void sortLinks(List<Link> links) { Collections.sort(links, new Comparator<Link>() { @Override public int compare(Link o1, Link o2) { FtnAddress a1 = new FtnAddress(o1.getLinkAddress()); FtnAddress a2 = new FtnAddress(o2.getLinkAddress()); if (a1.getPoint() > 0) { if (a2.getPoint() > 0) { return a1.getPoint() - a2.getPoint(); } else { return 1; } } if (a2.getPoint() > 0) { if (a1.getPoint() > 0) { return a1.getPoint() - a2.getPoint(); } else { return -1; } } if (a1.getZone() == a2.getZone()) { if (a1.getNet() == a2.getNet()) { if (a1.getNode() == a2.getNode()) { return 0; } else { return a1.getNode() - a2.getNode(); } } else { return a1.getNet() - a2.getNet(); } } else { return a1.getZone() - a2.getZone(); } } }); } }
apache-2.0
torrances/swtk-commons
commons-dict-wordnet-indexbyname/src/main/java/org/swtk/commons/dict/wordnet/indexbyname/instance/h/a/e/WordnetNounIndexNameInstanceHAE.java
9293
package org.swtk.commons.dict.wordnet.indexbyname.instance.h.a.e; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexNameInstanceHAE { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("{\"term\":\"haecceity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05930646\"]}"); add("{\"term\":\"haeckel\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"11041663\"]}"); add("{\"term\":\"haem\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15049564\"]}"); add("{\"term\":\"haemagglutination\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13513339\"]}"); add("{\"term\":\"haemal arch\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"05595464\"]}"); add("{\"term\":\"haemangioma\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14271057\"]}"); add("{\"term\":\"haemanthus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12440840\"]}"); add("{\"term\":\"haemanthus coccineus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12441227\"]}"); add("{\"term\":\"haematemesis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00120030\"]}"); add("{\"term\":\"haematinic\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03519105\"]}"); add("{\"term\":\"haematite\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14929871\"]}"); add("{\"term\":\"haematobia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02202004\"]}"); add("{\"term\":\"haematobia irritans\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02202154\"]}"); add("{\"term\":\"haematocele\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14340648\"]}"); add("{\"term\":\"haematochezia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13513490\"]}"); add("{\"term\":\"haematocoele\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14340648\"]}"); add("{\"term\":\"haematocolpometra\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14340860\"]}"); add("{\"term\":\"haematocolpos\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14340989\"]}"); add("{\"term\":\"haematocrit\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"03519273\", \"13843978\"]}"); add("{\"term\":\"haematocytopenia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13997079\"]}"); add("{\"term\":\"haematocyturia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14289902\"]}"); add("{\"term\":\"haematogenesis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13513656\"]}"); add("{\"term\":\"haematohiston\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15050509\"]}"); add("{\"term\":\"haematoidin\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14780548\"]}"); add("{\"term\":\"haematologist\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10190353\"]}"); add("{\"term\":\"haematology\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"06060665\"]}"); add("{\"term\":\"haematolysis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13514416\"]}"); add("{\"term\":\"haematoma\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14342141\"]}"); add("{\"term\":\"haematopodidae\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02039518\"]}"); add("{\"term\":\"haematopoiesis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13513656\"]}"); add("{\"term\":\"haematopus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02039636\"]}"); add("{\"term\":\"haematoxylon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12517240\"]}"); add("{\"term\":\"haematoxylum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12517240\"]}"); add("{\"term\":\"haematoxylum campechianum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12517454\"]}"); add("{\"term\":\"haematuria\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14337747\"]}"); add("{\"term\":\"haemitin\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15049564\"]}"); add("{\"term\":\"haemodialysis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00651786\"]}"); add("{\"term\":\"haemodoraceae\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12332411\"]}"); add("{\"term\":\"haemodorum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"12332793\"]}"); add("{\"term\":\"haemogenesis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13513656\"]}"); add("{\"term\":\"haemoglobin\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15048807\"]}"); add("{\"term\":\"haemoglobinemia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14215385\"]}"); add("{\"term\":\"haemoglobinopathy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14215516\"]}"); add("{\"term\":\"haemoglobinuria\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14337910\"]}"); add("{\"term\":\"haemolysin\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14930412\"]}"); add("{\"term\":\"haemolysis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13514416\"]}"); add("{\"term\":\"haemolytic anaemia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14188722\"]}"); add("{\"term\":\"haemophile\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10190661\"]}"); add("{\"term\":\"haemophilia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14193819\"]}"); add("{\"term\":\"haemophilia a\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14194254\"]}"); add("{\"term\":\"haemophilia b\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14194469\"]}"); add("{\"term\":\"haemophiliac\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"10190661\"]}"); add("{\"term\":\"haemopis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01941259\"]}"); add("{\"term\":\"haemopoiesis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"13513656\"]}"); add("{\"term\":\"haemoproteid\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01427423\"]}"); add("{\"term\":\"haemoproteidae\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01427248\"]}"); add("{\"term\":\"haemoprotein\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14913035\"]}"); add("{\"term\":\"haemoproteus\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01427579\"]}"); add("{\"term\":\"haemoptysis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14215681\"]}"); add("{\"term\":\"haemorrhage\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14311737\"]}"); add("{\"term\":\"haemorrhagic fever\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14200905\"]}"); add("{\"term\":\"haemorrhagic stroke\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14312063\"]}"); add("{\"term\":\"haemorrhoid\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14350534\"]}"); add("{\"term\":\"haemorrhoidectomy\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00677936\"]}"); add("{\"term\":\"haemosiderin\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"15051530\"]}"); add("{\"term\":\"haemosiderosis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14338024\"]}"); add("{\"term\":\"haemosporidia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01426398\"]}"); add("{\"term\":\"haemosporidian\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"01426612\"]}"); add("{\"term\":\"haemostasia\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00678077\"]}"); add("{\"term\":\"haemostasis\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"00678077\"]}"); add("{\"term\":\"haemostat\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"03519595\"]}"); add("{\"term\":\"haemothorax\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"14344240\"]}"); add("{\"term\":\"haemulidae\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02590738\"]}"); add("{\"term\":\"haemulon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02591210\"]}"); add("{\"term\":\"haemulon album\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02591424\"]}"); add("{\"term\":\"haemulon aurolineatum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02591692\"]}"); add("{\"term\":\"haemulon macrostomum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02591575\"]}"); add("{\"term\":\"haemulon malanurum\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02591826\"]}"); add("{\"term\":\"haemulon parra\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"02591946\"]}"); } private static void add(final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(indexNoun.getTerm())) ? map.get(indexNoun.getTerm()) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(indexNoun.getTerm(), list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> terms() { return map.keySet(); } }
apache-2.0
perezd/bazel
src/main/java/com/google/devtools/build/lib/rules/java/AndroidLintActionBuilder.java
6610
// Copyright 2020 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.nio.charset.StandardCharsets.UTF_8; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.ExecutionRequirements; import com.google.devtools.build.lib.actions.ParamFileInfo; import com.google.devtools.build.lib.actions.ParameterFile.ParameterFileType; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.SpawnAction; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.packages.TargetUtils; import com.google.devtools.build.lib.util.StringCanonicalizer; import javax.annotation.Nullable; /** Helper to create Android Lint actions. */ class AndroidLintActionBuilder { private AndroidLintActionBuilder() {} private static final String MNEMONIC = "AndroidLint"; private static final ParamFileInfo PARAM_FILE_INFO = ParamFileInfo.builder(ParameterFileType.UNQUOTED) .setCharset(UTF_8) .setUseAlways(true) // needed to support workers .build(); /** Creates and registers Android Lint action if needed and returns action's output if created. */ @Nullable static Artifact create( RuleContext ruleContext, JavaConfiguration config, JavaTargetAttributes attributes, BootClassPathInfo bootClassPathInfo, JavaCommon common, JavaSemantics semantics, JavaCompileOutputs<Artifact> outputs) { if (!config.runAndroidLint() || !attributes.hasSources() || JavaCommon.isNeverLink(ruleContext)) { // Note Javac doesn't run when depending on neverlink library, so we also skip Android Lint. return null; } if (config.limitAndroidLintToAndroidCompatible() && !JavaCommon.getConstraints(ruleContext).contains("android")) { return null; } JavaToolchainProvider toolchain = JavaToolchainProvider.from(ruleContext); AndroidLintTool androidLint = toolchain.getAndroidLint(); if (androidLint == null) { ruleContext.ruleError( "android_lint_wrapper not set in java_toolchain: " + toolchain.getToolchainLabel()); return null; } ImmutableList<Artifact> allSrcJars = attributes.getSourceJars(); if (outputs.genSource() != null) { allSrcJars = ImmutableList.<Artifact>builder().addAll(allSrcJars).add(outputs.genSource()).build(); } NestedSet<Artifact> classpath = attributes.getCompileTimeClassPath(); if (!bootClassPathInfo.auxiliary().isEmpty()) { classpath = NestedSetBuilder.<Artifact>naiveLinkOrder() .addTransitive(bootClassPathInfo.auxiliary()) .addTransitive(classpath) .build(); } CustomCommandLine.Builder cmd = CustomCommandLine.builder(); cmd.addExecPaths("--sources", attributes.getSourceFiles()) .addExecPaths("--source_jars", allSrcJars) .addExecPaths("--bootclasspath", bootClassPathInfo.bootclasspath()) .addExecPaths("--classpath", classpath) .addExecPaths("--plugins", attributes.plugins().plugins().processorClasspath()) .addLabel("--target_label", ruleContext.getLabel()); ImmutableList<String> javacopts = common.getJavacOpts().stream().map(StringCanonicalizer::intern).collect(toImmutableList()); if (!javacopts.isEmpty()) { cmd.addAll("--javacopts", javacopts); // terminate --javacopts with `--` to support javac flags that start with `--` cmd.add("--"); } cmd.add("--lintopts"); cmd.addAll(androidLint.options()); SpawnAction.Builder spawnAction = new SpawnAction.Builder(); for (JavaPackageConfigurationProvider provider : androidLint.packageConfiguration()) { if (provider.matches(ruleContext.getLabel())) { cmd.addAll(provider.javacopts()); spawnAction.addTransitiveInputs(provider.data()); } } Artifact result = ruleContext.getPackageRelativeArtifact( ruleContext.getLabel().getName() + "_android_lint_output.xml", ruleContext.getBinOrGenfilesDirectory()); cmd.addExecPath("--xml", result); NestedSetBuilder<Artifact> toolInputs = NestedSetBuilder.stableOrder(); androidLint.tool().buildCommandLine(spawnAction.executableArguments(), toolchain, toolInputs); semantics.setLintProgressMessage(spawnAction); ruleContext.registerAction( spawnAction .addCommandLine(cmd.build(), PARAM_FILE_INFO) .addInputs(attributes.getSourceFiles()) .addInputs(allSrcJars) .addTransitiveInputs(bootClassPathInfo.bootclasspath()) .addTransitiveInputs(classpath) .addTransitiveInputs(attributes.plugins().plugins().processorClasspath()) .addTransitiveInputs(attributes.plugins().plugins().data()) .addTransitiveTools(toolInputs.build()) .addOutput(result) .setMnemonic(MNEMONIC) .setExecutionInfo(getExecutionInfo(ruleContext)) .build(ruleContext)); return result; } /** Advertises worker support added in b/191156225. */ private static ImmutableMap<String, String> getExecutionInfo(RuleContext ruleContext) { ImmutableMap<String, String> executionInfo = ImmutableMap.of(ExecutionRequirements.SUPPORTS_WORKERS, "1"); return ImmutableMap.<String, String>builder() .putAll(ruleContext.getConfiguration().modifiedExecutionInfo(executionInfo, MNEMONIC)) .putAll( TargetUtils.getExecutionInfo( ruleContext.getRule(), ruleContext.isAllowTagsPropagation())) .build(); } }
apache-2.0
jonefeewang/armeria
core/src/main/java/com/linecorp/armeria/server/logging/structured/StructuredLoggingService.java
3752
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.server.logging.structured; import static java.util.Objects.requireNonNull; import com.linecorp.armeria.common.Request; import com.linecorp.armeria.common.Response; import com.linecorp.armeria.common.logging.RequestLog; import com.linecorp.armeria.common.logging.RequestLogAvailability; import com.linecorp.armeria.server.Server; import com.linecorp.armeria.server.ServerListenerAdapter; import com.linecorp.armeria.server.Service; import com.linecorp.armeria.server.ServiceConfig; import com.linecorp.armeria.server.ServiceRequestContext; import com.linecorp.armeria.server.SimpleDecoratingService; /** * A decorating service which provides support of structured and optionally externalized request/response * content logging. * * @param <I> the {@link Request} type * @param <O> the {@link Response} type * @param <L> the type of the structured log representation */ public abstract class StructuredLoggingService<I extends Request, O extends Response, L> extends SimpleDecoratingService<I, O> { private final StructuredLogBuilder<L> logBuilder; private Server associatedServer; /** * Creates a new {@link StructuredLoggingService}. * * @param delegate the {@link Service} being decorated * @param logBuilder an instance of {@link StructuredLogBuilder} which is used to construct an entry of * structured log */ protected StructuredLoggingService(Service<? super I, ? extends O> delegate, StructuredLogBuilder<L> logBuilder) { super(delegate); this.logBuilder = requireNonNull(logBuilder, "logBuilder"); } @Override public void serviceAdded(ServiceConfig cfg) throws Exception { super.serviceAdded(cfg); if (associatedServer != null) { if (associatedServer != cfg.server()) { throw new IllegalStateException("cannot be added to more than one server"); } else { return; } } associatedServer = cfg.server(); associatedServer.addListener(new ServerListenerAdapter() { @Override public void serverStopped(Server server) throws Exception { close(); } }); } @Override public O serve(ServiceRequestContext ctx, I req) throws Exception { ctx.log().addListener(log -> { L structuredLog = logBuilder.build(log); if (structuredLog != null) { writeLog(log, structuredLog); } }, RequestLogAvailability.COMPLETE); return delegate().serve(ctx, req); } /** * Writes given {@code structuredLog} to the underlying system. * @param log the {@link RequestLog} which is a source of constructed {@code structuredLog} * @param structuredLog the content of a structuredLog */ protected abstract void writeLog(RequestLog log, L structuredLog); /** * Cleanup resources which were opened for logging. */ protected void close() { // noop by default } }
apache-2.0
ludoch/benchmarkjavawebapp
benchmark-java-webapp/src/main/java/com/google/appengine/benchmark/webapp/otherservlets/Servlet12.java
2873
package com.google.appengine.benchmark.webapp.otherservlets; import java.io.IOException; import java.io.PrintWriter; import javax.annotation.security.DenyAll; import javax.annotation.security.PermitAll; import javax.annotation.security.RolesAllowed; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; /** * * @author ludo */ @WebServlet(name = "Servlet12", urlPatterns = {"/Servlet12"}) public class Servlet12 extends HttpServlet { /** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { response.setContentType("text/html;charset=UTF-8"); try (PrintWriter out = response.getWriter()) { /* TODO output your page here. You may use following sample code. */ out.println("<!DOCTYPE html>"); out.println("<html>"); out.println("<head>"); out.println("<title>Servlet Servlet12</title>"); out.println("</head>"); out.println("<body>"); out.println("<h1>Servlet Servlet12 at " + request.getContextPath() + "</h1>"); out.println("</body>"); out.println("</html>"); } } /** * Handles the HTTP <code>GET</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override @PermitAll protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } /** * Handles the HTTP <code>POST</code> method. * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ @Override @RolesAllowed("javaee") protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { processRequest(request, response); } @Override @DenyAll protected void doTrace(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException { } /** * Returns a short description of the servlet. * * @return a String containing servlet description */ @Override public String getServletInfo() { return "Short description"; }// </editor-fold> }
apache-2.0
lhartikk/JunitQuest
src/main/java/org/tsers/junitquest/CallParam.java
442
package org.tsers.junitquest; import org.tsers.junitquest.instance.Instance; public class CallParam { private final Instance instance; private final int position; public CallParam(Instance instance, int position) { this.instance = instance; this.position = position; } public int getPosition() { return position; } public Instance getInstance() { return instance; } }
apache-2.0
oehme/analysing-gradle-performance
my-app/src/main/java/org/gradle/test/performance/mediummonolithicjavaproject/p119/Production2392.java
1891
package org.gradle.test.performance.mediummonolithicjavaproject.p119; public class Production2392 { private String property0; public String getProperty0() { return property0; } public void setProperty0(String value) { property0 = value; } private String property1; public String getProperty1() { return property1; } public void setProperty1(String value) { property1 = value; } private String property2; public String getProperty2() { return property2; } public void setProperty2(String value) { property2 = value; } private String property3; public String getProperty3() { return property3; } public void setProperty3(String value) { property3 = value; } private String property4; public String getProperty4() { return property4; } public void setProperty4(String value) { property4 = value; } private String property5; public String getProperty5() { return property5; } public void setProperty5(String value) { property5 = value; } private String property6; public String getProperty6() { return property6; } public void setProperty6(String value) { property6 = value; } private String property7; public String getProperty7() { return property7; } public void setProperty7(String value) { property7 = value; } private String property8; public String getProperty8() { return property8; } public void setProperty8(String value) { property8 = value; } private String property9; public String getProperty9() { return property9; } public void setProperty9(String value) { property9 = value; } }
apache-2.0
aws/aws-sdk-java
aws-java-sdk-servicecatalog/src/main/java/com/amazonaws/services/servicecatalog/model/CreateProvisioningArtifactResult.java
13287
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.servicecatalog.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/servicecatalog-2015-12-10/CreateProvisioningArtifact" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class CreateProvisioningArtifactResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * Information about the provisioning artifact. * </p> */ private ProvisioningArtifactDetail provisioningArtifactDetail; /** * <p> * Specify the template source with one of the following options, but not both. Keys accepted: [ * <code>LoadTemplateFromURL</code>, <code>ImportFromPhysicalId</code> ]. * </p> * <p> * The URL of the CloudFormation template in Amazon S3, in JSON format. * </p> * <p> * <code>LoadTemplateFromURL</code> * </p> * <p> * Use the URL of the CloudFormation template in Amazon S3 in JSON format. * </p> * <p> * <code>ImportFromPhysicalId</code> * </p> * <p> * Use the physical id of the resource that contains the template; currently supports CloudFormation stack ARN. * </p> */ private java.util.Map<String, String> info; /** * <p> * The status of the current request. * </p> */ private String status; /** * <p> * Information about the provisioning artifact. * </p> * * @param provisioningArtifactDetail * Information about the provisioning artifact. */ public void setProvisioningArtifactDetail(ProvisioningArtifactDetail provisioningArtifactDetail) { this.provisioningArtifactDetail = provisioningArtifactDetail; } /** * <p> * Information about the provisioning artifact. * </p> * * @return Information about the provisioning artifact. */ public ProvisioningArtifactDetail getProvisioningArtifactDetail() { return this.provisioningArtifactDetail; } /** * <p> * Information about the provisioning artifact. * </p> * * @param provisioningArtifactDetail * Information about the provisioning artifact. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateProvisioningArtifactResult withProvisioningArtifactDetail(ProvisioningArtifactDetail provisioningArtifactDetail) { setProvisioningArtifactDetail(provisioningArtifactDetail); return this; } /** * <p> * Specify the template source with one of the following options, but not both. Keys accepted: [ * <code>LoadTemplateFromURL</code>, <code>ImportFromPhysicalId</code> ]. * </p> * <p> * The URL of the CloudFormation template in Amazon S3, in JSON format. * </p> * <p> * <code>LoadTemplateFromURL</code> * </p> * <p> * Use the URL of the CloudFormation template in Amazon S3 in JSON format. * </p> * <p> * <code>ImportFromPhysicalId</code> * </p> * <p> * Use the physical id of the resource that contains the template; currently supports CloudFormation stack ARN. * </p> * * @return Specify the template source with one of the following options, but not both. Keys accepted: [ * <code>LoadTemplateFromURL</code>, <code>ImportFromPhysicalId</code> ].</p> * <p> * The URL of the CloudFormation template in Amazon S3, in JSON format. * </p> * <p> * <code>LoadTemplateFromURL</code> * </p> * <p> * Use the URL of the CloudFormation template in Amazon S3 in JSON format. * </p> * <p> * <code>ImportFromPhysicalId</code> * </p> * <p> * Use the physical id of the resource that contains the template; currently supports CloudFormation stack * ARN. */ public java.util.Map<String, String> getInfo() { return info; } /** * <p> * Specify the template source with one of the following options, but not both. Keys accepted: [ * <code>LoadTemplateFromURL</code>, <code>ImportFromPhysicalId</code> ]. * </p> * <p> * The URL of the CloudFormation template in Amazon S3, in JSON format. * </p> * <p> * <code>LoadTemplateFromURL</code> * </p> * <p> * Use the URL of the CloudFormation template in Amazon S3 in JSON format. * </p> * <p> * <code>ImportFromPhysicalId</code> * </p> * <p> * Use the physical id of the resource that contains the template; currently supports CloudFormation stack ARN. * </p> * * @param info * Specify the template source with one of the following options, but not both. Keys accepted: [ * <code>LoadTemplateFromURL</code>, <code>ImportFromPhysicalId</code> ].</p> * <p> * The URL of the CloudFormation template in Amazon S3, in JSON format. * </p> * <p> * <code>LoadTemplateFromURL</code> * </p> * <p> * Use the URL of the CloudFormation template in Amazon S3 in JSON format. * </p> * <p> * <code>ImportFromPhysicalId</code> * </p> * <p> * Use the physical id of the resource that contains the template; currently supports CloudFormation stack * ARN. */ public void setInfo(java.util.Map<String, String> info) { this.info = info; } /** * <p> * Specify the template source with one of the following options, but not both. Keys accepted: [ * <code>LoadTemplateFromURL</code>, <code>ImportFromPhysicalId</code> ]. * </p> * <p> * The URL of the CloudFormation template in Amazon S3, in JSON format. * </p> * <p> * <code>LoadTemplateFromURL</code> * </p> * <p> * Use the URL of the CloudFormation template in Amazon S3 in JSON format. * </p> * <p> * <code>ImportFromPhysicalId</code> * </p> * <p> * Use the physical id of the resource that contains the template; currently supports CloudFormation stack ARN. * </p> * * @param info * Specify the template source with one of the following options, but not both. Keys accepted: [ * <code>LoadTemplateFromURL</code>, <code>ImportFromPhysicalId</code> ].</p> * <p> * The URL of the CloudFormation template in Amazon S3, in JSON format. * </p> * <p> * <code>LoadTemplateFromURL</code> * </p> * <p> * Use the URL of the CloudFormation template in Amazon S3 in JSON format. * </p> * <p> * <code>ImportFromPhysicalId</code> * </p> * <p> * Use the physical id of the resource that contains the template; currently supports CloudFormation stack * ARN. * @return Returns a reference to this object so that method calls can be chained together. */ public CreateProvisioningArtifactResult withInfo(java.util.Map<String, String> info) { setInfo(info); return this; } /** * Add a single Info entry * * @see CreateProvisioningArtifactResult#withInfo * @returns a reference to this object so that method calls can be chained together. */ public CreateProvisioningArtifactResult addInfoEntry(String key, String value) { if (null == this.info) { this.info = new java.util.HashMap<String, String>(); } if (this.info.containsKey(key)) throw new IllegalArgumentException("Duplicated keys (" + key.toString() + ") are provided."); this.info.put(key, value); return this; } /** * Removes all the entries added into Info. * * @return Returns a reference to this object so that method calls can be chained together. */ public CreateProvisioningArtifactResult clearInfoEntries() { this.info = null; return this; } /** * <p> * The status of the current request. * </p> * * @param status * The status of the current request. * @see Status */ public void setStatus(String status) { this.status = status; } /** * <p> * The status of the current request. * </p> * * @return The status of the current request. * @see Status */ public String getStatus() { return this.status; } /** * <p> * The status of the current request. * </p> * * @param status * The status of the current request. * @return Returns a reference to this object so that method calls can be chained together. * @see Status */ public CreateProvisioningArtifactResult withStatus(String status) { setStatus(status); return this; } /** * <p> * The status of the current request. * </p> * * @param status * The status of the current request. * @see Status */ public void setStatus(Status status) { withStatus(status); } /** * <p> * The status of the current request. * </p> * * @param status * The status of the current request. * @return Returns a reference to this object so that method calls can be chained together. * @see Status */ public CreateProvisioningArtifactResult withStatus(Status status) { this.status = status.toString(); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getProvisioningArtifactDetail() != null) sb.append("ProvisioningArtifactDetail: ").append(getProvisioningArtifactDetail()).append(","); if (getInfo() != null) sb.append("Info: ").append(getInfo()).append(","); if (getStatus() != null) sb.append("Status: ").append(getStatus()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CreateProvisioningArtifactResult == false) return false; CreateProvisioningArtifactResult other = (CreateProvisioningArtifactResult) obj; if (other.getProvisioningArtifactDetail() == null ^ this.getProvisioningArtifactDetail() == null) return false; if (other.getProvisioningArtifactDetail() != null && other.getProvisioningArtifactDetail().equals(this.getProvisioningArtifactDetail()) == false) return false; if (other.getInfo() == null ^ this.getInfo() == null) return false; if (other.getInfo() != null && other.getInfo().equals(this.getInfo()) == false) return false; if (other.getStatus() == null ^ this.getStatus() == null) return false; if (other.getStatus() != null && other.getStatus().equals(this.getStatus()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getProvisioningArtifactDetail() == null) ? 0 : getProvisioningArtifactDetail().hashCode()); hashCode = prime * hashCode + ((getInfo() == null) ? 0 : getInfo().hashCode()); hashCode = prime * hashCode + ((getStatus() == null) ? 0 : getStatus().hashCode()); return hashCode; } @Override public CreateProvisioningArtifactResult clone() { try { return (CreateProvisioningArtifactResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
apache-2.0
variac/bazel
src/test/java/com/google/devtools/build/lib/rules/config/ConfigFeatureFlagTest.java
13129
// Copyright 2017 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License package com.google.devtools.build.lib.rules.config; import static com.google.common.truth.Truth.assertThat; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; import com.google.common.testing.EqualsTester; import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider; import com.google.devtools.build.lib.analysis.ConfiguredTarget; import com.google.devtools.build.lib.rules.SkylarkRuleContext; import com.google.devtools.build.lib.skylark.util.SkylarkTestCase; import com.google.devtools.build.lib.testutil.TestRuleClassProvider; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** Tests for the config_feature_flag rule. */ @RunWith(JUnit4.class) public final class ConfigFeatureFlagTest extends SkylarkTestCase { @Before public void useDynamicConfigurations() throws Exception { useConfiguration("--experimental_dynamic_configs=on"); } @Override protected ConfiguredRuleClassProvider getRuleClassProvider() { ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder().addRuleDefinition(new FeatureFlagSetterRule()); TestRuleClassProvider.addStandardRules(builder); return builder.build(); } @Test public void configFeatureFlagProvider_fromTargetReturnsNullIfTargetDoesNotExportProvider() throws Exception { scratch.file( "test/BUILD", "feature_flag_setter(", " name = 'top',", " flag_values = {", " },", ")"); assertThat(ConfigFeatureFlagProvider.fromTarget(getConfiguredTarget("//test:top"))).isNull(); } @Test public void configFeatureFlagProvider_containsValueFromConfiguration() throws Exception { scratch.file( "test/BUILD", "feature_flag_setter(", " name = 'top',", " exports_flag = ':flag',", " flag_values = {", " ':flag': 'configured',", " },", ")", "config_feature_flag(", " name = 'flag',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); assertThat(ConfigFeatureFlagProvider.fromTarget(getConfiguredTarget("//test:top")).getValue()) .isEqualTo("configured"); } @Test public void configFeatureFlagProvider_valueIsAccessibleFromSkylark() throws Exception { scratch.file( "test/wrapper.bzl", "def _flag_reading_wrapper_impl(ctx):", " pass", "flag_reading_wrapper = rule(", " implementation = _flag_reading_wrapper_impl,", " attrs = {'flag': attr.label()},", ")"); scratch.file( "test/BUILD", "load(':wrapper.bzl', 'flag_reading_wrapper')", "feature_flag_setter(", " name = 'top',", " deps = [':wrapper'],", " flag_values = {", " ':flag': 'configured',", " },", ")", "flag_reading_wrapper(", " name = 'wrapper',", " flag = ':flag',", ")", "config_feature_flag(", " name = 'flag',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); ConfiguredTarget top = getConfiguredTarget("//test:top"); ConfiguredTarget wrapper = (ConfiguredTarget) Iterables.getOnlyElement(getPrerequisites(top, "deps")); SkylarkRuleContext ctx = new SkylarkRuleContext(getRuleContextForSkylark(wrapper), null, getSkylarkSemantics()); update("ruleContext", ctx); update("config_common", new ConfigSkylarkCommon()); String value = (String) eval("ruleContext.attr.flag[config_common.FeatureFlagInfo].value"); assertThat(value).isEqualTo("configured"); } @Test public void configFeatureFlagProvider_validatesValuesUsingAllowedValuesAttribute() throws Exception { scratch.file( "test/BUILD", "config_feature_flag(", " name = 'flag',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); ConfigFeatureFlagProvider provider = ConfigFeatureFlagProvider.fromTarget(getConfiguredTarget("//test:flag")); assertThat(provider.isValidValue("default")).isTrue(); assertThat(provider.isValidValue("configured")).isTrue(); assertThat(provider.isValidValue("other")).isTrue(); assertThat(provider.isValidValue("absent")).isFalse(); assertThat(provider.isValidValue("conFigured")).isFalse(); assertThat(provider.isValidValue(" other")).isFalse(); } @Test public void configFeatureFlagProvider_valueValidationIsPossibleFromSkylark() throws Exception { scratch.file( "test/wrapper.bzl", "def _flag_reading_wrapper_impl(ctx):", " pass", "flag_reading_wrapper = rule(", " implementation = _flag_reading_wrapper_impl,", " attrs = {'flag': attr.label()},", ")"); scratch.file( "test/BUILD", "load(':wrapper.bzl', 'flag_reading_wrapper')", "flag_reading_wrapper(", " name = 'wrapper',", " flag = ':flag',", ")", "config_feature_flag(", " name = 'flag',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); SkylarkRuleContext ctx = createRuleContext("//test:wrapper"); update("ruleContext", ctx); update("config_common", new ConfigSkylarkCommon()); String provider = "ruleContext.attr.flag[config_common.FeatureFlagInfo]"; Boolean isDefaultValid = (Boolean) eval(provider + ".is_valid_value('default')"); Boolean isConfiguredValid = (Boolean) eval(provider + ".is_valid_value('configured')"); Boolean isOtherValid = (Boolean) eval(provider + ".is_valid_value('other')"); Boolean isAbsentValid = (Boolean) eval(provider + ".is_valid_value('absent')"); Boolean isIncorrectCapitalizationValid = (Boolean) eval(provider + ".is_valid_value('conFigured')"); Boolean isIncorrectSpacingValid = (Boolean) eval(provider + ".is_valid_value(' other')"); assertThat(isDefaultValid).isTrue(); assertThat(isConfiguredValid).isTrue(); assertThat(isOtherValid).isTrue(); assertThat(isAbsentValid).isFalse(); assertThat(isIncorrectCapitalizationValid).isFalse(); assertThat(isIncorrectSpacingValid).isFalse(); } @Test public void configFeatureFlagProvider_usesDefaultValueIfConfigurationDoesntSetValue() throws Exception { scratch.file( "test/BUILD", "feature_flag_setter(", " name = 'top',", " exports_flag = ':flag',", " flag_values = {", " ':other': 'configured',", " },", ")", "config_feature_flag(", " name = 'flag',", " allowed_values = ['other', 'default', 'configured'],", " default_value = 'default',", ")", "config_feature_flag(", " name = 'other',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); assertThat(ConfigFeatureFlagProvider.fromTarget(getConfiguredTarget("//test:top")).getValue()) .isEqualTo("default"); } @Test public void allowedValuesAttribute_cannotBeEmpty() throws Exception { reporter.removeHandler(failFastHandler); // expecting an error scratch.file( "test/BUILD", "config_feature_flag(", " name = 'flag',", " allowed_values = [],", " default_value = 'default',", ")"); assertThat(getConfiguredTarget("//test:flag")).isNull(); assertContainsEvent( "in allowed_values attribute of config_feature_flag rule //test:flag: " + "attribute must be non empty"); } @Test public void allowedValuesAttribute_cannotContainDuplicates() throws Exception { reporter.removeHandler(failFastHandler); // expecting an error scratch.file( "test/BUILD", "config_feature_flag(", " name = 'flag',", " allowed_values = ['double', 'double', 'toil', 'trouble'],", " default_value = 'trouble',", ")"); assertThat(getConfiguredTarget("//test:flag")).isNull(); assertContainsEvent( "in allowed_values attribute of config_feature_flag rule //test:flag: " + "cannot contain duplicates, but contained multiple of [\"double\"]"); } @Test public void defaultValueAttribute_mustBeMemberOfAllowedValues() throws Exception { reporter.removeHandler(failFastHandler); // expecting an error scratch.file( "test/BUILD", "feature_flag_setter(", " name = 'top',", " exports_flag = ':flag',", " flag_values = {", " ':flag': 'legal',", " },", ")", "config_feature_flag(", " name = 'flag',", " allowed_values = ['legal', 'eagle'],", " default_value = 'beagle',", ")"); assertThat(getConfiguredTarget("//test:top")).isNull(); assertContainsEvent( "in default_value attribute of config_feature_flag rule //test:flag: " + "must be one of [\"eagle\", \"legal\"], but was \"beagle\""); } @Test public void configurationValue_mustBeMemberOfAllowedValues() throws Exception { reporter.removeHandler(failFastHandler); // expecting an error scratch.file( "test/BUILD", "feature_flag_setter(", " name = 'top',", " exports_flag = ':flag',", " flag_values = {", " ':flag': 'invalid',", " },", ")", "config_feature_flag(", " name = 'flag',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); assertThat(getConfiguredTarget("//test:top")).isNull(); // TODO(mstaib): when configurationError is implemented, switch to testing for that assertContainsEvent( "in config_feature_flag rule //test:flag: " + "value must be one of [\"configured\", \"default\", \"other\"], but was \"invalid\""); } @Test public void policy_mustContainRulesPackage() throws Exception { reporter.removeHandler(failFastHandler); // expecting an error scratch.file( "policy/BUILD", "package_group(name = 'feature_flag_users', packages = ['//some/other'])"); scratch.file( "test/BUILD", "config_feature_flag(", " name = 'flag',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); useConfiguration( "--experimental_dynamic_configs=on", "--feature_control_policy=config_feature_flag=//policy:feature_flag_users"); assertThat(getConfiguredTarget("//test:flag")).isNull(); assertContainsEvent( "in config_feature_flag rule //test:flag: the config_feature_flag rule is not available in " + "package 'test' according to policy '//policy:feature_flag_users'"); } @Test public void policy_doesNotBlockRuleIfInPackageGroup() throws Exception { scratch.file( "policy/BUILD", "package_group(name = 'feature_flag_users', packages = ['//test'])"); scratch.file( "test/BUILD", "config_feature_flag(", " name = 'flag',", " allowed_values = ['default', 'configured', 'other'],", " default_value = 'default',", ")"); useConfiguration( "--experimental_dynamic_configs=on", "--feature_control_policy=config_feature_flag=//policy:feature_flag_users"); assertThat(getConfiguredTarget("//test:flag")).isNotNull(); assertNoEvents(); } @Test public void equalsTester() { new EqualsTester() .addEqualityGroup( // Basic case. ConfigFeatureFlagProvider.create("flag1", Predicates.<String>alwaysTrue())) .addEqualityGroup( // Will be distinct from the first group because CFFP instances are all distinct. ConfigFeatureFlagProvider.create("flag1", Predicates.<String>alwaysTrue())) .addEqualityGroup( // Change the value, still distinct from the above. ConfigFeatureFlagProvider.create("flag2", Predicates.<String>alwaysTrue())) .testEquals(); } }
apache-2.0
GlennioTech/MetadataEditor
app/src/main/java/varunest/com/metadataeditor/ColorUtil.java
2768
package varunest.com.metadataeditor; import android.graphics.Color; import android.support.annotation.ColorInt; import android.support.annotation.FloatRange; /** * @author Karim Abou Zeid (kabouzeid) */ public final class ColorUtil { public static int stripAlpha(@ColorInt int color) { return 0xff000000 | color; } @ColorInt public static int shiftColor(@ColorInt int color, @FloatRange(from = 0.0f, to = 2.0f) float by) { if (by == 1f) return color; int alpha = Color.alpha(color); float[] hsv = new float[3]; Color.colorToHSV(color, hsv); hsv[2] *= by; // value component return (alpha << 24) + (0x00ffffff & Color.HSVToColor(hsv)); } @ColorInt public static int darkenColor(@ColorInt int color) { return shiftColor(color, 0.9f); } @ColorInt public static int lightenColor(@ColorInt int color) { return shiftColor(color, 1.1f); } public static boolean isColorLight(@ColorInt int color) { final double darkness = 1 - (0.299 * Color.red(color) + 0.587 * Color.green(color) + 0.114 * Color.blue(color)) / 255; return darkness < 0.4; } @ColorInt public static int invertColor(@ColorInt int color) { final int r = 255 - Color.red(color); final int g = 255 - Color.green(color); final int b = 255 - Color.blue(color); return Color.argb(Color.alpha(color), r, g, b); } @ColorInt public static int adjustAlpha(@ColorInt int color, @FloatRange(from = 0.0, to = 1.0) float factor) { int alpha = Math.round(Color.alpha(color) * factor); int red = Color.red(color); int green = Color.green(color); int blue = Color.blue(color); return Color.argb(alpha, red, green, blue); } @ColorInt public static int withAlpha(@ColorInt int baseColor, @FloatRange(from = 0.0, to = 1.0) float alpha) { int a = Math.min(255, Math.max(0, (int) (alpha * 255))) << 24; int rgb = 0x00ffffff & baseColor; return a + rgb; } /** * Taken from CollapsingToolbarLayout's CollapsingTextHelper class. */ public static int blendColors(int color1, int color2, @FloatRange(from = 0.0, to = 1.0) float ratio) { final float inverseRatio = 1f - ratio; float a = (Color.alpha(color1) * inverseRatio) + (Color.alpha(color2) * ratio); float r = (Color.red(color1) * inverseRatio) + (Color.red(color2) * ratio); float g = (Color.green(color1) * inverseRatio) + (Color.green(color2) * ratio); float b = (Color.blue(color1) * inverseRatio) + (Color.blue(color2) * ratio); return Color.argb((int) a, (int) r, (int) g, (int) b); } private ColorUtil() { } }
apache-2.0
IAMTJW/Tomcat-8.5.20
tomcat-8.5.20/java/javax/persistence/SynchronizationType.java
926
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package javax.persistence; public enum SynchronizationType { SYNCHRONIZED, UNSYNCHRONIZED }
apache-2.0
b2ihealthcare/snow-owl
commons/com.b2international.index/src/com/b2international/index/es/client/tcp/IndicesTcpClient.java
4754
/* * Copyright 2018-2021 B2i Healthcare Pte Ltd, http://b2i.sg * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.b2international.index.es.client.tcp; import java.io.IOException; import org.elasticsearch.action.admin.indices.delete.DeleteIndexRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsRequest; import org.elasticsearch.action.admin.indices.settings.get.GetSettingsResponse; import org.elasticsearch.action.admin.indices.settings.put.UpdateSettingsRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.IndicesAdminClient; import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.client.indices.CreateIndexResponse; import org.elasticsearch.client.indices.GetMappingsRequest; import org.elasticsearch.client.indices.GetMappingsResponse; import org.elasticsearch.client.indices.PutMappingRequest; import org.elasticsearch.cluster.metadata.MappingMetadata; import com.b2international.index.es.client.IndicesClient; import com.b2international.index.mapping.DocumentMapping; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterators; /** * @since 6.11 */ public final class IndicesTcpClient implements IndicesClient { private final IndicesAdminClient client; public IndicesTcpClient(IndicesAdminClient client) { this.client = client; } @Override public boolean exists(String...indices) throws IOException { return EsTcpClient.execute(client.prepareExists(indices).execute()).isExists(); } @Override public CreateIndexResponse create(CreateIndexRequest req) throws IOException { // Convert mappings using the generic "_doc" mapping type; include index settings var tcpReq = new org.elasticsearch.action.admin.indices.create.CreateIndexRequest(req.index()) .mapping(DocumentMapping._DOC, req.mappings().utf8ToString(), req.mappingsXContentType()) .settings(req.settings()); var tcpResp = EsTcpClient.execute(client.create(tcpReq)); // Convert acknowledgment flags and index name back to a client response return new CreateIndexResponse(tcpResp.isAcknowledged(), tcpResp.isShardsAcknowledged(), tcpResp.index()); } @Override public AcknowledgedResponse delete(DeleteIndexRequest req) throws IOException { return EsTcpClient.execute(client.delete(req)); } @Override public RefreshResponse refresh(RefreshRequest req) throws IOException { return EsTcpClient.execute(client.refresh(req)); } @Override public GetMappingsResponse getMapping(GetMappingsRequest req) throws IOException { // Propagate index name(s) to the non-client get mapping request var tcpReq = new org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest() .indices(req.indices()); var tcpResp = EsTcpClient.execute(client.getMappings(tcpReq)); // Unpack inner Map containing the mapping for the single/only document type final ImmutableMap.Builder<String, MappingMetadata> mappings = ImmutableMap.builder(); tcpResp.mappings().forEach(cursor -> { mappings.put(cursor.key, Iterators.getOnlyElement(cursor.value.valuesIt())); }); return new GetMappingsResponse(mappings.build()); } @Override public AcknowledgedResponse updateMapping(PutMappingRequest req) throws IOException { /* * Propagate index name(s) and request source; add "_doc" as the mapping type name, which is not * contained in the original request, but is required here. */ var tcpReq = new org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest(req.indices()) .source(req.source(), req.xContentType()) .type(DocumentMapping._DOC); var tcpResp = EsTcpClient.execute(client.putMapping(tcpReq)); // Response can be used directly return tcpResp; } @Override public GetSettingsResponse settings(GetSettingsRequest req) throws IOException { return EsTcpClient.execute(client.getSettings(req)); } @Override public AcknowledgedResponse updateSettings(UpdateSettingsRequest req) throws IOException { return EsTcpClient.execute(client.updateSettings(req)); } }
apache-2.0
lime-company/lime-security-powerauth-push
powerauth-push-server/src/main/java/io/getlime/push/errorhandling/exceptions/FcmInitializationFailedException.java
1409
/* * Copyright 2018 Wultra s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.getlime.push.errorhandling.exceptions; /** * Exception for case when FCM initialization fails. * * @author Roman Strobl, roman.strobl@wultra.com */ public class FcmInitializationFailedException extends PushServerException { /** * Constructor with message. * @param message Message. */ public FcmInitializationFailedException(String message) { super(message); } /** * Constructor with message and cause. * @param message Message. * @param cause Cause. */ public FcmInitializationFailedException(String message, Throwable cause) { super(message, cause); } /** * Constructor with cause. * @param cause Cause. */ public FcmInitializationFailedException(Throwable cause) { super(cause); } }
apache-2.0
unic/neba
core/src/main/java/io/neba/core/resourcemodels/package-info.java
1112
/* Copyright 2013 the original author or authors. Licensed under the Apache License, Version 2.0 the "License"; you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /** * The sub packages contain the implementations of the {@link io.neba.api.annotations.ResourceModel} * lifecycle: Registration (detection and lookup), metadata (created at registration time and runtime), * adaptation ({@link org.apache.sling.api.adapter.AdapterFactory mainly adapter factory support}) for adapting * from resources to the resource models, mapping (for injecting properties of resources into the resource models) and * caching of adapted resource models. */ package io.neba.core.resourcemodels;
apache-2.0
patrickfav/tuwien
bakk/Ticketline/src/ticketline/dao/hibernate/HibernateInterceptor.java
3706
/* * PROJECT: TLJava * $Id: HibernateInterceptor.java,v 1.5 2006/05/22 11:59:40 0425918 Exp $ */ package ticketline.dao.hibernate; import java.io.Serializable; import java.util.Iterator; import org.hibernate.CallbackException; import org.hibernate.EntityMode; import org.hibernate.Interceptor; import org.hibernate.Transaction; import org.hibernate.type.Type; import ticketline.db.Entity; /** * Hibernate Interceptor. * * @author manuel */ public class HibernateInterceptor implements Interceptor, Serializable { private static final long serialVersionUID = -191599284735375777L; public void onCollectionRecreate(Object arg0, Serializable arg1) throws CallbackException { // do nothing } public void onCollectionRemove(Object arg0, Serializable arg1) throws CallbackException { // do nothing } public void onCollectionUpdate(Object arg0, Serializable arg1) throws CallbackException { // do nothing } public String onPrepareStatement(String arg0) { return arg0; } public void afterTransactionBegin(Transaction arg0) { // do nothing } public void afterTransactionCompletion(Transaction arg0) { // do nothing } public void beforeTransactionCompletion(Transaction arg0) { // do nothing } public Object getEntity(String arg0, Serializable arg1) throws CallbackException { return null; } public String getEntityName(Object arg0) throws CallbackException { return null; } public Object instantiate(String arg0, EntityMode arg1, Serializable arg2) throws CallbackException { return null; } public Boolean isTransient(Object arg0) { return null; } /** * @see Interceptor#onLoad(Object, Serializable, Object[], String[], Type[]) */ public boolean onLoad(Object arg0, Serializable arg1, Object[] arg2, String[] arg3, Type[] arg4) throws CallbackException { if (arg0 instanceof Entity) ((Entity) arg0).onLoad(); return false; } /** * @see Interceptor#onFlushDirty(Object, Serializable, Object[], Object[], * String[], Type[]) */ public boolean onFlushDirty(Object arg0, Serializable arg1, Object[] arg2, Object[] arg3, String[] arg4, Type[] arg5) throws CallbackException { return false; } /** * @see Interceptor#onSave(Object, Serializable, Object[], String[], Type[]) */ public boolean onSave(Object arg0, Serializable arg1, Object[] arg2, String[] arg3, Type[] arg4) throws CallbackException { if (arg0 instanceof Entity) ((Entity) arg0).onSave(); // System.out.println(((Entity) arg0).isSaved()); return false; } /** * @see Interceptor#onDelete(Object, Serializable, Object[], String[], * Type[]) */ public void onDelete(Object arg0, Serializable arg1, Object[] arg2, String[] arg3, Type[] arg4) throws CallbackException { // do nothing } /** * @see Interceptor#preFlush(Iterator) */ public void preFlush(Iterator arg0) throws CallbackException { // do nothing } /** * @see Interceptor#postFlush(Iterator) */ public void postFlush(Iterator arg0) throws CallbackException { // do nothing } /** * @param arg0 * @return Boolean */ public Boolean isUnsaved(Object arg0) { if (arg0 instanceof Entity) { return new Boolean(!((Entity) arg0).isSaved()); } return null; } /** * @see Interceptor#findDirty(Object, Serializable, Object[], Object[], * String[], Type[]) */ public int[] findDirty(Object arg0, Serializable arg1, Object[] arg2, Object[] arg3, String[] arg4, Type[] arg5) { return null; } /** * @param arg0 * @param arg1 * @return Object * @throws CallbackException */ public Object instantiate(Class arg0, Serializable arg1) throws CallbackException { return null; } }
apache-2.0
googleads/googleads-java-lib
modules/adwords_appengine/src/main/java/com/google/api/ads/adwords/jaxws/v201809/cm/ClientTermsErrorReason.java
1599
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.api.ads.adwords.jaxws.v201809.cm; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for ClientTermsError.Reason. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="ClientTermsError.Reason"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="INCOMPLETE_SIGNUP_CURRENT_ADWORDS_TNC_NOT_AGREED"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "ClientTermsError.Reason") @XmlEnum public enum ClientTermsErrorReason { /** * * Customer has not agreed to the latest AdWords Terms & Conditions * * */ INCOMPLETE_SIGNUP_CURRENT_ADWORDS_TNC_NOT_AGREED; public String value() { return name(); } public static ClientTermsErrorReason fromValue(String v) { return valueOf(v); } }
apache-2.0
qafedev/qafe-platform
qafe-core/src/test/java/test/com/qualogy/qafe/core/application/ApplicationContextLoaderTest.java
10998
/** * Copyright 2008-2017 Qualogy Solutions B.V. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package test.com.qualogy.qafe.core.application; import java.io.File; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import junit.framework.TestCase; import org.apache.commons.lang.ClassUtils; import org.apache.commons.lang.StringUtils; import com.qualogy.qafe.bind.business.action.BusinessAction; import com.qualogy.qafe.bind.core.application.ApplicationContext; import com.qualogy.qafe.bind.domain.ApplicationMapping; import com.qualogy.qafe.bind.orm.jibx.BindException; import com.qualogy.qafe.bind.presentation.component.Window; import com.qualogy.qafe.core.application.ApplicationCluster; import com.qualogy.qafe.core.application.ApplicationContextLoader; import com.qualogy.qafe.core.application.LoadFailedException; import com.qualogy.qafe.core.application.NotLoadedException; public class ApplicationContextLoaderTest extends TestCase { public final static String PACKAGE = "test.com.qualogy.genesis.core.application."; public final static String CLASS_NAME_INITIALIZER_A = PACKAGE + "InitializerA"; public final static String CLASS_NAME_INITIALIZER_B = PACKAGE + "InitializerB"; public final static String FILE_NAME_FRAMEWORK_FILE_1 = "samples/readertest/1.xml"; public final static String FILE_NAME_FRAMEWORK_FILE_2 = "samples/readertest/2.xml"; public final static String[] DEFAULT_FILE_PATHS = new String[]{FILE_NAME_FRAMEWORK_FILE_1, FILE_NAME_FRAMEWORK_FILE_2}; private String getSamplesDir(){ String pckName = ClassUtils.getPackageName(this.getClass()); return StringUtils.replace(pckName, ".", "/") + "/"; } public void testLoadHappyDay(){ ApplicationContext context = loadAppHappyDay(); ApplicationMapping default_gf = context.getApplicationMapping(); assertNotNull(default_gf.getPresentationTier().getView()); assertNull(default_gf.getIntegrationTier()); } private ApplicationContext loadAppHappyDay(){ ApplicationContextLoader.load(getSamplesDir() + "application-config-happy-day.xml"); ApplicationContext context = null; //expecting only the app happy day in the cluster for (Iterator<ApplicationContext> iter = ApplicationCluster.getInstance().iterator(); iter.hasNext();) { context = (ApplicationContext) iter.next(); } return context; } public void testLoadWithMessages(){ ApplicationContextLoader.load(getSamplesDir() + "application-config-with-messages.xml"); } // public void testLoadWithEmptyMapping(){ // try{ // //has root pointing to non-exsting dir // ApplicationContextLoader.load(getSamplesDir() + "application-config-empty-mapping.xml"); // fail("expecting an exception"); // }catch(LoadFailedException e){ // } // // } public void testLoadWithNoMapping(){ ApplicationContextLoader.load(getSamplesDir() + "application-config-no-mapping.xml"); } public void testNonExistingAppConfigLocation(){ try{ ApplicationContextLoader.load(getSamplesDir() + "jaja"); fail("expecting an exception"); }catch(LoadFailedException e){ } } public void testLoadTransactionTest(){ Map<String, String> BA_IDS = new HashMap<String, String>(); BA_IDS.put("BA_MANAGED", "10"); BA_IDS.put("NO_WILDCARD", "1"); BA_IDS.put("WILDCARD2WILDCARD", "2"); BA_IDS.put("WILDCARD3", "3" ); BA_IDS.put("4WILDCARD", "4" ); ApplicationContextLoader.load(getSamplesDir() + "application-config-transactiontest.xml"); for (Iterator<ApplicationContext> iter = ApplicationCluster.getInstance().iterator(); iter.hasNext();) { ApplicationContext context = (ApplicationContext) iter.next(); List<BusinessAction> businessActions = context.getApplicationMapping().getBusinessTier().getBusinessActions(); for (Iterator<BusinessAction> iterator = businessActions.iterator(); iterator.hasNext();) { BusinessAction ba = (BusinessAction) iterator.next(); String timeout = (String) BA_IDS.get(ba.getId()); if(timeout!=null) assertEquals("id["+ba.getId()+"]", Integer.parseInt(timeout), ba.getTransactionBehaviour().getTimeout()); else assertNull(ba.getTransactionBehaviour()); } } } /** * this test tests if the reader can leve a file out, which is in the assigned root directory * */ // public void testLoadWithMappingTwoSameFiles(){ // //has root pointing to non-exsting dir // //TODO: nice exception handling on id bug // try{ // ApplicationContextLoader.load(getSamplesDir() + "application-two-configs.xml"); // fail("expecting to throw on duplicate"); // }catch(Exception e){ // // } // } public void testLoadWithoutAppsDefined(){ /* <application name="app-4a" root=""/> <application name="app-4b"/> <application name="app-6a" root="samples/contextnoapps"> <application-mapping resource="1.xml"/> </application> <application name="app-6b" root="samples/contextnoapps" /> <application name="app-6c" root=""> <application-mapping resource="samples/contextnoapps/1.xml"/> </application> <application name="app-6d" root=""> <application-mapping resource="samples/contextnoapps"/> </application> <application name="app-6e" root="/workspace_qpd/genesis-core/test/samples/contextnoapps"> <application-mapping resource="1.xml"/> </application> <application name="app-6f" root=""> <application-mapping resource="/workspace_qpd/genesis-core/test/samples/contextnoapps/1.xml"/> </application> <application name="app-6g" root="/workspace_qpd/genesis-core/test/samples/contextnoapps" /> */ ApplicationContextLoader.load(getSamplesDir() + "application-config-no-apps-defined.xml"); assertNotNull(ApplicationCluster.getInstance().iterator()); Map<String, String> expectedDirs = new HashMap<String, String>(); expectedDirs.put("app-4a", new File(getSamplesDir()).getAbsolutePath()); expectedDirs.put("app-4b", new File(getSamplesDir()).getAbsolutePath()); expectedDirs.put("app-6b", new File("samples/contextnoapps").getAbsolutePath()); // expectedDirs.put("app-6c", new File(getSamplesDir()).getAbsolutePath()); // expectedDirs.put("app-6d", new File(getSamplesDir()).getAbsolutePath()); // expectedDirs.put("app-6g", new File("samples/contextnoapps").getAbsolutePath()); expectedDirs.put("app-7", new File(getSamplesDir()).getAbsolutePath()); for (Iterator<ApplicationContext> iter = ApplicationCluster.getInstance().iterator(); iter.hasNext();) { ApplicationContext context = (ApplicationContext) iter.next(); String name = context.getName(); if(!expectedDirs.containsKey(name)) continue; assertEquals(name, expectedDirs.get(name), new File(context.getRoot()).getAbsolutePath()); ApplicationMapping gf = context.getApplicationMapping(); assertNotNull(gf); assertNotNull(gf.getPresentationTier()); assertNotNull(gf.getPresentationTier().getView()); assertNotNull(gf.getPresentationTier().getView().getWindows()); assertNotNull(gf.getPresentationTier().getView().getWindows().get(0)); assertEquals(((Window)gf.getPresentationTier().getView().getWindows().get(0)).getDisplayname(), "succes"); } } public void testDuplicateKey(){ String fileName = "application-config-duplicate-key.xml"; try{ ApplicationContextLoader.load(getPath(fileName)); }catch(BindException e){ } } private String getPath(String fileName){ File dir = new File(getSamplesDir()); String path = dir.getPath() + File.separator; return path + fileName; } public void testLoadWithoutRoot(){ File dir = new File(getSamplesDir()); ApplicationContextLoader.load(getPath("application-config-no-root.xml")); assertNotNull(ApplicationCluster.getInstance().iterator()); for (Iterator<ApplicationContext> iter = ApplicationCluster.getInstance().iterator(); iter.hasNext();) { ApplicationContext context = (ApplicationContext) iter.next(); if(!context.getId().startsWith("app-no-root")) continue; String expected = dir.getAbsolutePath() + File.separator; assertEquals(expected, context.getRoot()); } } // public void testLoadNoMappingLocationSet(){ // try{ // ApplicationContextLoader.load(getPath("application-config-no-mappinglocation.xml")); // fail("expecting loadfailedexception"); // }catch(LoadFailedException e){ // // } // // } public void testReLoadOneContextHappyDay(){ ApplicationContext context = loadAppHappyDay(); context = ApplicationContextLoader.reload(context.getId()); ApplicationMapping default_gf = context.getApplicationMapping(); assertNotNull(default_gf.getPresentationTier().getView()); assertNull(default_gf.getIntegrationTier()); } public void testReLoadAllHappyDay(){ ApplicationContextLoader.load(getSamplesDir() + "application-config-no-apps-defined.xml"); Set<String> ids = new HashSet<String>(); for (Iterator<ApplicationContext> iter = ApplicationCluster.getInstance().iterator(); iter.hasNext();) { ApplicationContext context = (ApplicationContext) iter.next(); ids.add(context.getId().stringValueOf()); } ApplicationContextLoader.reload(); for (Iterator<ApplicationContext> iter = ApplicationCluster.getInstance().iterator(); iter.hasNext();) { ApplicationContext context = (ApplicationContext) iter.next(); assertTrue(ids.contains(context.getId().stringValueOf())); ids.remove(context.getId().stringValueOf()); } assertEquals(0, ids.size()); ApplicationContext contextHappyDay = loadAppHappyDay(); ApplicationContextLoader.reload(); ApplicationCluster.getInstance().get(contextHappyDay.getId()); ApplicationMapping default_gf = contextHappyDay.getApplicationMapping(); assertNotNull(default_gf.getPresentationTier().getView()); assertNull(default_gf.getIntegrationTier()); } public void testUnLoadForIdHappyDay(){ ApplicationContext context = loadAppHappyDay(); ApplicationContextLoader.unload(context.getId()); try{ ApplicationCluster.getInstance().get(context.getId()); fail("expected the context to be unloaded"); }catch(NotLoadedException e){ } } public void testUnLoadAllHappyDay(){ ApplicationContext context = loadAppHappyDay(); ApplicationContextLoader.unload(); try{ ApplicationCluster.getInstance().get(context.getId()); fail("expected the context to be unloaded"); }catch(NotLoadedException e){ } } protected void setUp() throws Exception { super.setUp(); ApplicationContextLoader.unload(); } }
apache-2.0
dwaynehoy/OpenDialer
app/src/androidTest/java/com/squizbit/opendialer/mocks/ValidatorContentProvider.java
1072
package com.squizbit.opendialer.mocks; import android.annotation.SuppressLint; import android.content.ContentValues; import android.net.Uri; import android.test.mock.MockContentProvider; @SuppressLint("Registered") public class ValidatorContentProvider extends MockContentProvider { public ValidatorContentProvider(Uri expectedUri){ mExpectedUri = expectedUri; } ContentValues mContentValues; Uri mExpectedUri; @Override public Uri insert(Uri uri, ContentValues values) { if(!uri.equals(mExpectedUri)){ throw new RuntimeException("Uri was different than expected"); } mContentValues = values; return uri; } @Override public int update(Uri uri, ContentValues values, String selection, String[] selectionArgs) { if(!uri.equals(mExpectedUri)){ throw new RuntimeException("Uri was different than expected"); } mContentValues = values; return 1; } public ContentValues getContentValues() { return mContentValues; } }
apache-2.0
osoctz/usdp
usdp-util/src/main/java/cn/com/git/usdp/util/ReflectUtils.java
1121
package cn.com.git.usdp.util; import java.lang.reflect.ParameterizedType; import java.lang.reflect.Type; /** * 反射工具类 * Created by tangzan on 2016/4/1. */ public class ReflectUtils { /** * 得到指定类型的指定位置的泛型实参 * * @param clazz * @param index * @param <T> * @return */ @SuppressWarnings("unchecked") public static <T> Class<T> findParameterizedType(Class<?> clazz, int index) { Type parameterizedType = clazz.getGenericSuperclass(); //CGLUB subclass target object(泛型在父类上) if (!(parameterizedType instanceof ParameterizedType)) { parameterizedType = clazz.getSuperclass().getGenericSuperclass(); } if (!(parameterizedType instanceof ParameterizedType)) { return null; } Type[] actualTypeArguments = ((ParameterizedType) parameterizedType).getActualTypeArguments(); if (actualTypeArguments == null || actualTypeArguments.length == 0) { return null; } return (Class<T>) actualTypeArguments[index]; } }
apache-2.0
quann169/MotownBlueCurrent
identification-authorization/app/src/test/java/io/motown/identificationauthorization/app/AuthorizationEventListenerTest.java
5716
/** * Copyright (C) 2013 Motown.IO (info@motown.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.motown.identificationauthorization.app; import io.motown.domain.api.chargingstation.AuthorizationRequestedEvent; import io.motown.domain.api.chargingstation.CorrelationToken; import io.motown.domain.api.chargingstation.DenyAuthorizationCommand; import io.motown.domain.api.chargingstation.GrantAuthorizationCommand; import io.motown.domain.api.security.IdentityContext; import io.motown.domain.api.security.NullUserIdentity; import io.motown.domain.api.security.TypeBasedAddOnIdentity; import org.axonframework.commandhandling.CommandMessage; import org.axonframework.commandhandling.GenericCommandMessage; import org.junit.Before; import org.junit.Test; import org.mockito.ArgumentMatcher; import java.util.Collections; import static io.motown.domain.api.chargingstation.test.ChargingStationTestUtils.*; import static org.axonframework.commandhandling.GenericCommandMessage.asCommandMessage; import static org.mockito.Matchers.argThat; import static org.mockito.Mockito.*; public class AuthorizationEventListenerTest { private AuthorizationEventListener eventListener; private IdentificationAuthorizationService service; private AuthorizationCommandGateway gateway; private static final String ADD_ON_TYPE = "IDENTIFICATION-AUTHORIZATION"; private IdentityContext identityContext; @Before public void setUp() { eventListener = new AuthorizationEventListener(); service = mock(IdentificationAuthorizationService.class); when(service.isValid(INVALID_IDENTIFYING_TOKEN)).thenReturn(false); when(service.isValid(IDENTIFYING_TOKEN)).thenReturn(true); eventListener.setIdentificationAuthorizationService(service); gateway = mock(AuthorizationCommandGateway.class); eventListener.setCommandGateway(gateway); eventListener.setAddOnIdentity("1"); identityContext = new IdentityContext(new TypeBasedAddOnIdentity(ADD_ON_TYPE, "1"), new NullUserIdentity()); } @Test public void testValidIdentification() { CorrelationToken token = new CorrelationToken(); eventListener.onEvent(new AuthorizationRequestedEvent(CHARGING_STATION_ID, IDENTIFYING_TOKEN, identityContext), token); verify(service).isValid(IDENTIFYING_TOKEN); final CommandMessage command = asCommandMessage( new GrantAuthorizationCommand(CHARGING_STATION_ID, IDENTIFYING_TOKEN, identityContext)).andMetaData(Collections.singletonMap(CorrelationToken.KEY, token)); // because GenericCommandMessage doesn't implement 'equals' method we have to provide a ArgumentMatcher to validate the argument verify(gateway).send(argThat(new ArgumentMatcher<CommandMessage>() { @Override public boolean matches(Object o) { if (!(o instanceof GenericCommandMessage)) { return false; } GenericCommandMessage arg = (GenericCommandMessage) o; return command.getMetaData().equals((arg).getMetaData()) && command.getPayload().equals((arg).getPayload()); } })); } @Test public void testInvalidIdentification() { CorrelationToken token = new CorrelationToken(); eventListener.onEvent(new AuthorizationRequestedEvent(CHARGING_STATION_ID, INVALID_IDENTIFYING_TOKEN, identityContext), token); verify(service).isValid(INVALID_IDENTIFYING_TOKEN); final CommandMessage command = asCommandMessage( new DenyAuthorizationCommand(CHARGING_STATION_ID, INVALID_IDENTIFYING_TOKEN, identityContext)).andMetaData(Collections.singletonMap(CorrelationToken.KEY, token)); // because GenericCommandMessage doesn't implement 'equals' method we have to provide a ArgumentMatcher to validate the argument verify(gateway).send(argThat(new ArgumentMatcher<CommandMessage>() { @Override public boolean matches(Object o) { if (!(o instanceof GenericCommandMessage)) { return false; } GenericCommandMessage arg = (GenericCommandMessage) o; return command.getMetaData().equals((arg).getMetaData()) && command.getPayload().equals((arg).getPayload()); } })); } @Test public void testNullCorrelationId() { eventListener.onEvent(new AuthorizationRequestedEvent(CHARGING_STATION_ID, IDENTIFYING_TOKEN, identityContext), null); verify(service).isValid(IDENTIFYING_TOKEN); // because GenericCommandMessage doesn't implement 'equals' method we have to provide a ArgumentMatcher to validate the argument verify(gateway).send(argThat(new ArgumentMatcher<CommandMessage>() { @Override public boolean matches(Object o) { if (!(o instanceof GenericCommandMessage)) { return false; } // just verify the meta data size return ((GenericCommandMessage) o).getMetaData().size() == 0; } })); } }
apache-2.0
consulo/consulo-java
java-impl/src/main/java/com/intellij/codeInsight/generation/GenerateFieldOrPropertyHandler.java
5604
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.generation; import java.util.Arrays; import java.util.Collections; import java.util.List; import javax.annotation.Nonnull; import com.intellij.openapi.project.Project; import com.intellij.psi.JavaPsiFacade; import com.intellij.psi.PsiAnnotation; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElementFactory; import com.intellij.psi.PsiField; import com.intellij.psi.PsiMember; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiModifierList; import com.intellij.psi.PsiType; import com.intellij.psi.codeStyle.JavaCodeStyleManager; import com.intellij.psi.codeStyle.VariableKind; import com.intellij.psi.util.PropertyMemberType; import com.intellij.psi.util.PropertyUtil; import com.intellij.util.IncorrectOperationException; import com.intellij.util.containers.ContainerUtil; /** * @author Gregory.Shrago */ public class GenerateFieldOrPropertyHandler extends GenerateMembersHandlerBase { private final String myAttributeName; private final PsiType myType; private final PropertyMemberType myMemberType; private final PsiAnnotation[] myAnnotations; public GenerateFieldOrPropertyHandler(String attributeName, PsiType type, final PropertyMemberType memberType, final PsiAnnotation... annotations) { super(""); myAttributeName = attributeName; myType = type; myMemberType = memberType; myAnnotations = annotations; } @Override protected ClassMember[] chooseOriginalMembers(PsiClass aClass, Project project) { return ClassMember.EMPTY_ARRAY; } @Override @Nonnull public List<? extends GenerationInfo> generateMemberPrototypes(PsiClass aClass, ClassMember[] members) throws IncorrectOperationException { PsiElementFactory psiElementFactory = JavaPsiFacade.getInstance(aClass.getProject()).getElementFactory(); try { String fieldName = getFieldName(aClass); PsiField psiField = psiElementFactory.createField(fieldName, myType); GenerationInfo[] infos = new GenerateGetterAndSetterHandler().generateMemberPrototypes(aClass, new PsiFieldMember(psiField)); if(myAnnotations.length > 0) { PsiMember targetMember = null; if(myMemberType == PropertyMemberType.FIELD) { targetMember = psiField; } else { for(GenerationInfo info : infos) { PsiMember member = info.getPsiMember(); if(!(member instanceof PsiMethod)) { continue; } if(myMemberType == PropertyMemberType.GETTER && PropertyUtil.isSimplePropertyGetter((PsiMethod) member) || myMemberType == PropertyMemberType.SETTER && PropertyUtil .isSimplePropertySetter((PsiMethod) member)) { targetMember = member; break; } } if(targetMember == null) { targetMember = findExistingMember(aClass, myMemberType); } } PsiModifierList modifierList = targetMember != null ? targetMember.getModifierList() : null; if(modifierList != null) { for(PsiAnnotation annotation : myAnnotations) { PsiAnnotation existing = modifierList.findAnnotation(annotation.getQualifiedName()); if(existing != null) { existing.replace(annotation); } else { modifierList.addAfter(annotation, null); } } } } return ContainerUtil.concat(Collections.singletonList(new PsiGenerationInfo<PsiField>(psiField)), Arrays.asList(infos)); } catch(IncorrectOperationException e) { assert false : e; return Collections.emptyList(); } } @javax.annotation.Nullable public PsiMember findExistingMember(@Nonnull PsiClass aClass, @Nonnull PropertyMemberType memberType) { if(memberType == PropertyMemberType.FIELD) { return aClass.findFieldByName(getFieldName(aClass), false); } else if(memberType == PropertyMemberType.GETTER) { try { PsiElementFactory psiElementFactory = JavaPsiFacade.getInstance(aClass.getProject()).getElementFactory(); PsiField field = psiElementFactory.createField(myAttributeName, myType); PsiMethod[] templates = GetterSetterPrototypeProvider.generateGetterSetters(field, myMemberType == PropertyMemberType.GETTER); for(PsiMethod template : templates) { PsiMethod existingMethod = aClass.findMethodBySignature(template, true); if(existingMethod != null) { return existingMethod; } } } catch(IncorrectOperationException e) { assert false : e; } } return null; } private String getFieldName(PsiClass aClass) { return myMemberType == PropertyMemberType.FIELD ? myAttributeName : JavaCodeStyleManager.getInstance(aClass.getProject()).propertyNameToVariableName(myAttributeName, VariableKind.FIELD); } @Override protected ClassMember[] getAllOriginalMembers(PsiClass aClass) { throw new UnsupportedOperationException(); } @Override protected GenerationInfo[] generateMemberPrototypes(PsiClass aClass, ClassMember originalMember) throws IncorrectOperationException { throw new UnsupportedOperationException(); } }
apache-2.0
eFaps/eFapsApp-Sales
src/main/efaps/ESJP/org/efaps/esjp/sales/document/Template.java
282
package org.efaps.esjp.sales.document; import org.efaps.admin.program.esjp.EFapsApplication; import org.efaps.admin.program.esjp.EFapsUUID; @EFapsUUID("633b8be0-63ac-4759-b746-8ace8f142c80") @EFapsApplication("eFapsApp-Sales") public class Template extends Template_Base { }
apache-2.0
Yannic/closure-compiler
src/com/google/javascript/jscomp/IncrementalScopeCreator.java
13788
/* * Copyright 2017 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkState; import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.HashMultimap; import com.google.common.collect.ImmutableList; import com.google.common.collect.Multimap; import com.google.javascript.jscomp.SyntacticScopeCreator.ScopeScanner; import com.google.javascript.rhino.Node; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * A reusable scope creator which invalidates scopes based on reported AST changes to SCRIPT and * FUNCTION codes (aka "change scopes"). This class stores an instance of itself on the compiler * object which is accessible via the "getInstance" static method. To ensure that consumers see a * consistent state, they must call "freeze"/"thaw" before and after use (typically for the duration * of a NodeTraveral). * * <p>This class delegates to the SyntacticScopeCreator and requires a consistent definition of * global Scope (the global scope root must include both externs and code). */ class IncrementalScopeCreator implements ScopeCreator { private final AbstractCompiler compiler; // TODO(johnlenz): This leaks scope object for scopes removed from the AST. // Soon we will track removed function nodes use that to remove scopes. private final Map<Node, PersistentScope> scopesByScopeRoot = new HashMap<>(); private final SyntacticScopeCreator delegate; private final PersistentScopeFactory factory = new PersistentScopeFactory(); private boolean frozen; private IncrementalScopeCreator(AbstractCompiler compiler) { this.compiler = compiler; this.delegate = createInternalScopeCreator(compiler); } // Get an instance of the ScopeCreator public static IncrementalScopeCreator getInstance(AbstractCompiler compiler) { IncrementalScopeCreator creator = compiler.getScopeCreator(); if (creator == null) { creator = new IncrementalScopeCreator(compiler); compiler.putScopeCreator(creator); } return creator; } public IncrementalScopeCreator freeze() { checkState(!this.frozen, "inconsistent freeze state: already frozen"); frozen = true; invalidateChangedScopes(); return this; } public IncrementalScopeCreator thaw() { checkState(this.frozen, "inconsistent freeze state: already thaw'd"); frozen = false; return this; } private void invalidateChangedScopes() { List<Node> changedRoots = compiler.getChangedScopeNodesForPass("Scopes"); List<Node> scripts = new ArrayList<>(); if (changedRoots != null) { for (Node root : changedRoots) { if (root.isScript()) { scripts.add(root); } else { checkState(!root.isRoot()); invalidateRoot(root); } } invalidateScripts(scripts); } } private void invalidateScripts(List<Node> invalidatedScripts) { if (!invalidatedScripts.isEmpty()) { Node root = compiler.getRoot(); PersistentGlobalScope scope = (PersistentGlobalScope) scopesByScopeRoot.get(root); if (scope != null) { scope.invalidate(invalidatedScripts); } } } private void invalidateRoot(Node n) { PersistentLocalScope scope = (PersistentLocalScope) scopesByScopeRoot.get(n); if (scope != null) { scope.invalidate(); } } @Override public Scope createScope(Node n, AbstractScope<?, ?> parent) { checkState(parent == null || parent instanceof PersistentScope); checkState(parent == null || ((PersistentScope) parent).isValid(), "parent is not valid"); checkState(frozen, "freeze() must be called before retrieving scopes"); checkArgument(parent != null || n == compiler.getRoot(), "the shared persistent scope must always be root at the tip of the AST"); PersistentScope scope = scopesByScopeRoot.get(n); if (scope == null) { scope = (PersistentScope) delegate.createScope(n, parent); scopesByScopeRoot.put(n, scope); } else { scope.refresh(compiler, (PersistentScope) parent); } checkState(scope.isValid(), "scope is not valid"); return scope; } @Override public boolean hasBlockScope() { return delegate.hasBlockScope(); } /** * A subclass of the traditional Scope class that knows about its children, * and has methods for updating the scope heirarchy. */ private abstract static class PersistentScope extends Scope { boolean valid = true; // starts as valid PersistentScope parent; int depth; PersistentScope(PersistentScope parent, Node rootNode) { super(rootNode); checkChildScope(parent); this.parent = parent; this.depth = parent.depth + 1; } PersistentScope(Node rootNode) { super(rootNode); checkArgument(rootNode.isRoot()); // Note: this is a stronger check than checkRootScope() this.parent = null; this.depth = 0; } static PersistentScope create(PersistentScope parent, Node rootNode) { if (parent == null) { checkArgument(rootNode.isRoot() && rootNode.getParent() == null, rootNode); return new PersistentGlobalScope(rootNode); } else { return new PersistentLocalScope(parent, rootNode); } } public boolean isValid() { return valid; } @Override public PersistentScope getParent() { checkState(parent == null || parent.valid, "parent scope is not valid"); // The node traversal should ask for scopes in order, so parents should always be valid. return parent; } @Override public int getDepth() { return depth; } abstract void refresh(AbstractCompiler compiler, PersistentScope newParent); abstract void addChildScope(PersistentLocalScope scope); } private static class PersistentGlobalScope extends PersistentScope { Multimap<Node, PersistentLocalScope> validChildren = ArrayListMultimap.create(); Set<Node> scriptsToUpdate = new HashSet<>(); Multimap<Node, Var> scriptToVarMap = ArrayListMultimap.create(); Multimap<Node, Node> scriptDeclarationsPairs = HashMultimap.create(); PersistentScopeFactory factory = new PersistentScopeFactory(); protected PersistentGlobalScope(Node rootNode) { super(rootNode); checkArgument(rootNode.isRoot() && rootNode.getParent() == null); } @Override void addChildScope(PersistentLocalScope scope) { // Only track child scopes that should be // invalidated when a "change scope" is changed, // not scopes that are themselves change scope roots. if (!NodeUtil.isChangeScopeRoot(scope.getRootNode())) { Node script = getContainingScript(scope.getRootNode()); checkState(script.isScript()); validChildren.put(script, scope); } } public void invalidate(List<Node> invalidatedScripts) { valid = false; for (Node script : invalidatedScripts) { checkState(script.isScript()); // invalidate any generated child scopes for (PersistentLocalScope scope : validChildren.removeAll(script)) { scope.invalidate(); } } scriptsToUpdate.addAll(invalidatedScripts); } @Override void refresh(AbstractCompiler compiler, PersistentScope newParent) { checkArgument(newParent == null); // Update the scope if needed. if (!this.valid) { checkState(!scriptsToUpdate.isEmpty()); expandInvalidatedScriptPairs(); clearPairsForInvalidatedScripts(); undeclareVarsForInvalidatedScripts(); new ScopeScanner(compiler, factory, this, scriptsToUpdate).populate(); scriptsToUpdate.clear(); this.valid = true; } else { checkState(scriptsToUpdate.isEmpty()); } } void expandInvalidatedScriptPairs() { // Make a copy as before we star to update the set List<Node> scripts = new ArrayList<>(scriptsToUpdate); for (Node script : scripts) { expandInvalidatedScript(script); } } // For every script look for scripts which may contains redeclarations void expandInvalidatedScript(Node script) { Collection<Node> pairs = scriptDeclarationsPairs.get(script); for (Node n : pairs) { if (scriptsToUpdate.add(n)) { expandInvalidatedScript(script); } } } void clearPairsForInvalidatedScripts() { for (Node script : scriptsToUpdate) { scriptDeclarationsPairs.removeAll(script); } } /** undeclare all vars in the invalidated scripts */ void undeclareVarsForInvalidatedScripts() { for (Node script : scriptsToUpdate) { for (Var var : scriptToVarMap.removeAll(script)) { super.undeclareInteral(var); } } } Node getContainingScript(Node n) { while (!n.isScript()) { n = n.getParent(); } return n; } @Override Var declare(String name, Node nameNode, CompilerInput input) { Node declareScript = getContainingScript(nameNode); Var v = super.declare(name, nameNode, input); scriptToVarMap.put(declareScript, v); return v; } /** * link any script that redeclares a variable to the original script so the two scripts * always get built together. */ public void redeclare(Node n) { checkArgument(n.isName()); String name = n.getString(); checkArgument(!Var.ARGUMENTS.equals(name)); Node redeclareScript = getContainingScript(n); Var v = getOwnSlot(name); Node declarationScript = getContainingScript(v.getNode()); if (redeclareScript != declarationScript) { scriptDeclarationsPairs.put(redeclareScript, declarationScript); scriptDeclarationsPairs.put(declarationScript, redeclareScript); } } } private static final ImmutableList<PersistentLocalScope> PRIMORDIAL_LIST = ImmutableList.of(); /** * A subclass of the traditional Scope class that knows about its children, * and has methods for updating the scope hierarchy. */ private static class PersistentLocalScope extends PersistentScope { // A list of Scope within the "change scope" (those not crossing function boundaries) // which were added to this scope. List<PersistentLocalScope> validChildren = PRIMORDIAL_LIST; PersistentLocalScope(PersistentScope parent, Node rootNode) { super(parent, rootNode); parent.addChildScope(this); } @Override void addChildScope(PersistentLocalScope scope) { // Keep track of valid children within the "change scope". if (!NodeUtil.isChangeScopeRoot(scope.getRootNode())) { // The first time we have added to the list, create a real list. if (validChildren == PRIMORDIAL_LIST) { validChildren = new ArrayList<>(); } validChildren.add(scope); } } @Override public boolean isValid() { return valid; } void invalidate() { if (valid) { valid = false; for (PersistentLocalScope child : validChildren) { checkState(!NodeUtil.isChangeScopeRoot(child.getRootNode())); child.invalidate(); } if (validChildren != PRIMORDIAL_LIST) { validChildren.clear(); } } } @Override void refresh(AbstractCompiler compiler, PersistentScope newParent) { checkArgument(newParent != null && newParent.isValid()); checkState(parent != null); // Even if this scope hasn't been invalidated, its parent scopes may have, // so update the scope chaining. this.parent = newParent; // Even if the parent hasn't changed the depth might have, update it now. this.depth = parent.getDepth() + 1; // Update the scope if needed. if (!valid) { clearVarsInternal(); new ScopeScanner(compiler, this).populate(); valid = true; // NOTE(johnlenz): It doesn't really matter which parent scope in the "change scope" // invalidates this scope so it doesn't need to update when the parent changes. getParent().addChildScope(this); } } } SyntacticScopeCreator createInternalScopeCreator(AbstractCompiler compiler) { return new SyntacticScopeCreator(compiler, factory, factory); } private static class PersistentScopeFactory implements SyntacticScopeCreator.ScopeFactory, SyntacticScopeCreator.RedeclarationHandler { @Override public PersistentScope create(Scope parent, Node n) { return PersistentScope.create((PersistentScope) parent, n); } @Override public void onRedeclaration(Scope s, String name, Node n, CompilerInput input) { if (s.isGlobal()) { ((PersistentGlobalScope) s).redeclare(n); // TODO(johnlenz): link source script and the redeclaration script so // that the global scope is rebuilt in the presense of redeclarations. } } } }
apache-2.0
neowu/core-ng-project
core-ng-mongo/src/main/java/core/framework/mongo/Count.java
219
package core.framework.mongo; import com.mongodb.ReadPreference; import org.bson.conversions.Bson; /** * @author neo */ public final class Count { public Bson filter; public ReadPreference readPreference; }
apache-2.0
jbertram/activemq-artemis-old
artemis-core-client/src/main/java/org/apache/activemq/artemis/api/core/JGroupsChannelBroadcastEndpoint.java
1268
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.api.core; import org.jgroups.JChannel; public class JGroupsChannelBroadcastEndpoint extends JGroupsBroadcastEndpoint { private final JChannel jChannel; public JGroupsChannelBroadcastEndpoint(JChannel jChannel, final String channelName) throws Exception { super(channelName); this.jChannel = jChannel; } @Override public JChannel createChannel() throws Exception { return jChannel; } }
apache-2.0
drochetti/jnap-core
src/main/java/org/jnap/core/mvc/support/PathNameTransformer.java
125
package org.jnap.core.mvc.support; public interface PathNameTransformer { public String transform(String text); }
apache-2.0
tkao1000/pinot
pinot-core/src/test/java/com/linkedin/pinot/core/data/readers/PinotSegmentRecordReaderTest.java
5883
/** * Copyright (C) 2014-2015 LinkedIn Corp. (pinot-core@linkedin.com) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.linkedin.pinot.core.data.readers; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Random; import java.util.concurrent.TimeUnit; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.RandomStringUtils; import org.testng.Assert; import org.testng.annotations.AfterClass; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import com.google.common.io.Files; import com.linkedin.pinot.common.data.DimensionFieldSpec; import com.linkedin.pinot.common.data.FieldSpec; import com.linkedin.pinot.common.data.FieldSpec.DataType; import com.linkedin.pinot.common.data.MetricFieldSpec; import com.linkedin.pinot.common.data.Schema; import com.linkedin.pinot.common.data.TimeFieldSpec; import com.linkedin.pinot.common.data.TimeGranularitySpec; import com.linkedin.pinot.core.data.GenericRow; import com.linkedin.pinot.core.indexsegment.generator.SegmentGeneratorConfig; import com.linkedin.pinot.core.segment.creator.impl.SegmentIndexCreationDriverImpl; /** * Tests the PinotSegmentRecordReader to check that the records being generated * are the same as the records used to create the segment */ public class PinotSegmentRecordReaderTest { private String segmentName; private Schema schema; private String segmentOutputDir; private File segmentIndexDir; private List<GenericRow> rows; private TestRecordReader recordReader; private static String D_SV_1 = "d_sv_1"; private static String D_MV_1 = "d_mv_1"; private static String M1 = "m1"; private static String M2 = "m2"; private static String TIME = "t"; @BeforeClass public void setup() throws Exception { segmentName = "pinotSegmentRecordReaderTest"; schema = createPinotSchema(); segmentOutputDir = Files.createTempDir().toString(); segmentIndexDir = new File(segmentOutputDir, segmentName); rows = createTestData(); recordReader = new TestRecordReader(rows, schema); createSegment(); } private List<GenericRow> createTestData() { List<GenericRow> rows = new ArrayList<>(); Random random = new Random(); Map<String, Object> fields; for (int i = 0; i < 10000; i++) { fields = new HashMap<>(); fields.put(D_SV_1, D_SV_1 + "_" + RandomStringUtils.randomAlphabetic(2)); Object[] d2Array = new Object[5]; for (int j = 0; j < 5; j++) { d2Array[j] = D_MV_1 + "_" + j + "_" + RandomStringUtils.randomAlphabetic(2); } fields.put(D_MV_1, d2Array); fields.put(M1, Math.abs(random.nextInt())); fields.put(M2, Math.abs(random.nextFloat())); fields.put(TIME, Math.abs(random.nextLong())); GenericRow row = new GenericRow(); row.init(fields); rows.add(row); } return rows; } private Schema createPinotSchema() { Schema testSchema = new Schema(); testSchema.setSchemaName("schema"); FieldSpec spec; spec = new DimensionFieldSpec(D_SV_1, DataType.STRING, true); testSchema.addField(D_SV_1, spec); spec = new DimensionFieldSpec(D_MV_1, DataType.STRING, false, ","); testSchema.addField(D_MV_1, spec); spec = new MetricFieldSpec(M1, DataType.INT); testSchema.addField(M1, spec); spec = new MetricFieldSpec(M2, DataType.FLOAT); testSchema.addField(M2, spec); spec = new TimeFieldSpec(new TimeGranularitySpec(DataType.LONG, TimeUnit.HOURS, TIME)); testSchema.addField(TIME, spec); return testSchema; } private void createSegment() throws Exception { SegmentGeneratorConfig segmentGeneratorConfig = new SegmentGeneratorConfig(schema); segmentGeneratorConfig.setTableName(segmentName); segmentGeneratorConfig.setOutDir(segmentOutputDir); segmentGeneratorConfig.setSegmentName(segmentName); SegmentIndexCreationDriverImpl driver = new SegmentIndexCreationDriverImpl(); driver.init(segmentGeneratorConfig, recordReader); driver.build(); if (!segmentIndexDir.exists()) { throw new IllegalStateException("Segment generation failed"); } } @Test public void testPinotSegmentRecordReader() throws Exception { List<GenericRow> outputRows = new ArrayList<>(); PinotSegmentRecordReader pinotSegmentRecordReader = new PinotSegmentRecordReader(segmentIndexDir); pinotSegmentRecordReader.init(); while (pinotSegmentRecordReader.hasNext()) { outputRows.add(pinotSegmentRecordReader.next()); } pinotSegmentRecordReader.close(); Assert.assertEquals(outputRows.size(), rows.size(), "Number of rows returned by PinotSegmentRecordReader is incorrect"); for (int i = 0; i < outputRows.size(); i++) { GenericRow outputRow = outputRows.get(i); GenericRow row = rows.get(i); Assert.assertEquals(outputRow.getValue(D_SV_1), row.getValue(D_SV_1)); Assert.assertEquals(outputRow.getValue(D_MV_1), row.getValue(D_MV_1)); Assert.assertEquals(outputRow.getValue(M1), row.getValue(M1)); Assert.assertEquals(outputRow.getValue(M2), row.getValue(M2)); Assert.assertEquals(outputRow.getValue(TIME), row.getValue(TIME)); } } @AfterClass public void cleanup() { FileUtils.deleteQuietly(new File(segmentOutputDir)); } }
apache-2.0
karussell/fastutil
src/it/unimi/dsi/fastutil/chars/Char2FloatFunction.java
4322
/* Generic definitions */ /* Assertions (useful to generate conditional code) */ /* Current type and class (and size, if applicable) */ /* Value methods */ /* Interfaces (keys) */ /* Interfaces (values) */ /* Abstract implementations (keys) */ /* Abstract implementations (values) */ /* Static containers (keys) */ /* Static containers (values) */ /* Implementations */ /* Synchronized wrappers */ /* Unmodifiable wrappers */ /* Other wrappers */ /* Methods (keys) */ /* Methods (values) */ /* Methods (keys/values) */ /* Methods that have special names depending on keys (but the special names depend on values) */ /* Equality */ /* Object/Reference-only definitions (keys) */ /* Primitive-type-only definitions (keys) */ /* Object/Reference-only definitions (values) */ /* Primitive-type-only definitions (values) */ /* * Copyright (C) 2002-2013 Sebastiano Vigna * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package it.unimi.dsi.fastutil.chars; import it.unimi.dsi.fastutil.Function; /** A type-specific {@link Function}; provides some additional methods that use polymorphism to avoid (un)boxing. * * <P>Type-specific versions of <code>get()</code>, <code>put()</code> and * <code>remove()</code> cannot rely on <code>null</code> to denote absence of * a key. Rather, they return a {@linkplain #defaultReturnValue() default * return value}, which is set to 0 cast to the return type (<code>false</code> * for booleans) at creation, but can be changed using the * <code>defaultReturnValue()</code> method. * * <P>For uniformity reasons, even maps returning objects implement the default * return value (of course, in this case the default return value is * initialized to <code>null</code>). * * <P><strong>Warning:</strong> to fall in line as much as possible with the * {@linkplain java.util.Map standard map interface}, it is strongly suggested * that standard versions of <code>get()</code>, <code>put()</code> and * <code>remove()</code> for maps with primitive-type values <em>return * <code>null</code> to denote missing keys</em> rather than wrap the default * return value in an object (of course, for maps with object keys and values * this is not possible, as there is no type-specific version). * * @see Function */ public interface Char2FloatFunction extends Function<Character, Float> { /** Adds a pair to the map. * * @param key the key. * @param value the value. * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. * @see Function#put(Object,Object) */ float put( char key, float value ); /** Returns the value to which the given key is mapped. * * @param key the key. * @return the corresponding value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. * @see Function#get(Object) */ float get( char key ); /** Removes the mapping with the given key. * @param key * @return the old value, or the {@linkplain #defaultReturnValue() default return value} if no value was present for the given key. * @see Function#remove(Object) */ float remove( char key ); /** * @see Function#containsKey(Object) */ boolean containsKey( char key ); /** Sets the default return value. * * This value must be returned by type-specific versions of * <code>get()</code>, <code>put()</code> and <code>remove()</code> to * denote that the map does not contain the specified key. It must be * 0/<code>false</code>/<code>null</code> by default. * * @param rv the new default return value. * @see #defaultReturnValue() */ void defaultReturnValue( float rv ); /** Gets the default return value. * * @return the current default return value. */ float defaultReturnValue(); }
apache-2.0
BadWaka/SmallDianPing
app/src/main/java/com/waka/workspace/smalldianping/Constant.java
4878
package com.waka.workspace.smalldianping; /** * 常量类 * Created by waka on 2015/12/22. */ public class Constant { //数据库库名 public static final String DATA_BASE_NAME = "SmallDianPing.db";//数据库全名,包括后缀名 //搜索历史表 public static final String TABLE_NAME_SEARCH_HISTORY = "search_history";//表名 public static final String TABLE_NAME_ORDER = "orders";//订单表名 public static final String TABLE_NAME_STORE = "store";//商家信息表名 public static final String TABLE_NAME_USER = "user";//用户信息表名 public static final String COLUMN_NAME_HISTORY_IN_SEARCH_HISTORY = "_history";//历史记录,TEXT PRIMARY KEY //商家信息表 public static final String COLUMN_NAME_STORE_ID_IN_STORE = "_store_id";//商家id,TEXT PRIMARY KEY public static final String COLUMN_NAME_STORE_RATING_IN_STORE = "_store_rating";//商家评分,DOUBLE public static final String COLUMN_NAME_STORE_AVERAGE_COST_IN_STORE = "_store_avgcost";//人均花费,DOUBLE public static final String COLUMN_NAME_STORE_IMAGE_IN_STORE = "_store_image";//商家图片,BLOB public static final String COLUMN_NAME_STORE_NAME_IN_STORE = "_store_name";//商家名 public static final String COLUMN_NAME_STORE_ADDRESS_IN_STORE = "_store_address";//商家具体地址 public static final String COLUMN_NAME_STORE_CITY_IN_STORE = "_store_city";//商家所在城市 public static final String COLUMN_NAME_STORE_DETAIL_IN_STORE = "_store_detail";//商家具体介绍 public static final String COLUMN_NAME_STORE_INTRODUCTION_IN_STORE = "_store_introduction";//商家简介 //订单信息表 public static final String COLUMN_NAME_ORDER_ID_IN_ORDER = "_order_id";//订单ID public static final String COLUMN_NAME_USER_ID_IN_ORDER = "_user_id";//订单客户ID public static final String COLUMN_NAME_STORE_ID_IN_ORDER = "_store_id";//订单商家ID public static final String COLUMN_NAME_GOODS_ID_IN_ORDER = "_goods_id";//订单商品ID public static final String COLUMN_NAME_GOODS_NUM_IN_ORDER = "_goods_num";//订单商品数量 public static final String COLUMN_NAME_ORDER_TOTAL_COST_IN_ORDER = "_order_total_cost";//订单总价 public static final String COLUMN_NAME_ORDER_KEY_IN_ORDER = "_order_key";//订单使用码 public static final String COLUMN_NAME_ORDER_PAY_DATETIME_IN_ORDER = "_order_pay_datetime";//订单付款时间 public static final String COLUMN_NAME_ORDER_EVAULATION_IN_ORDER = "_order_evaulation";//订单评论 public static final String COLUMN_NAME_ORDER_EVAULATION_DATETIME_IN_ORDER = "_order_evaulation_datetime";//订单评论时间 public static final String COLUMN_NAME_ORDER_RATING_IN_ORDER = "_order_rating";//订单评价星级 public static final String COLUMN_NAME_ORDER_STATE_IN_ORDER = "_order_state";//订单状态 //用户信息 public static final String COLUMN_NAME_USERID= "_user_id";//用户id,INTEGER PRIMARY KEY public static final String COLUMN_NAME_USER_NAME = "_user_name";//用户名,TEXT public static final String COLUMN_NAME_USER_PWD = "_user_pwd";//用户密码,TEXT //RequestCode public static final int LOGIN_ACTIVITY_REQUEST_CODE = 1001;//从LoginActivity返回的RequestCode public static final int SEARCH_INPUT_ACTIVITY_REQUEST_CODE = 1002;//从SearchInputActivity返回的RequestCode public static final int RESULT_LIST_ACTIVITY_REQUEST_CODE = 1003;//从ResultListActivity返回的RequestCode public static final int RESULT_MAP_ACTIVITY_REQUEST_CODE = 1004;//从ResultMapActivity返回的RequestCode public static final int CITY_PICKER_DIALOG_ACTIVITY_REQUEST_CODE = 1005;//从CityPickerDialogActivity返回的RequestCode public static final int STORE_DETAIL_ACTIVITY_REQUEST_CODE = 1006;//从StoreDetailActivity返回的RequestCode public static final int ORDER_DETAIL_ACTIVITY_REQUEST_CODE = 1007;//从OrderEvaluateActivity返回的RequestCode public static final int ORDER_EVALUATE_ACTIVITY_REQUEST_CODE = 1008;//从OrderEvaluateActivity返回的RequestCode public static final int REGIST_ACTIVITY_REQUEST_CODE = 1009; public static final int CAPTURE_ACTIVITY_REQUEST_CODE=1010; //商品信息表 public static final String TABLE_NAME_COMMODITY="commoditytable"; public static final String COLUMN_NAME_COMMODITY_NAME="_commodityname"; //商品名称 public static final String COLUMN_NAME_COMMODITY_STOREID="_storeID";//商品对应的商店ID public static final String COLUMN_NAME_COMMODITY_COMMODITYID="_commodityID";//商品ID public static final String COLUMN_NAME_COMMODITY_PRICE="_price";//商品价格 public static final String COLUMN_NAME_COMMODITY_SOLD ="_sold";//商品销售量 public static final String COLUMN_NAME_COMMODITY_PICTURE ="_picture";//商品图片 }
apache-2.0
lakshmiDRIP/DRIP
src/main/java/org/drip/sample/treasury/YAS_DBR.java
10722
package org.drip.sample.treasury; import org.drip.analytics.cashflow.CompositePeriod; import org.drip.analytics.date.*; import org.drip.market.otc.*; import org.drip.param.creator.*; import org.drip.param.market.CurveSurfaceQuoteContainer; import org.drip.param.valuation.ValuationParams; import org.drip.product.creator.*; import org.drip.product.credit.BondComponent; import org.drip.product.definition.CalibratableComponent; import org.drip.product.rates.FixFloatComponent; import org.drip.quant.common.FormatUtil; import org.drip.service.env.EnvManager; import org.drip.state.creator.ScenarioDiscountCurveBuilder; import org.drip.state.discount.MergedDiscountForwardCurve; import org.drip.state.identifier.ForwardLabel; /*! * Copyright (C) 2017 Lakshmi Krishnamurthy * Copyright (C) 2016 Lakshmi Krishnamurthy * Copyright (C) 2015 Lakshmi Krishnamurthy * * This file is part of DRIP, a free-software/open-source library for buy/side financial/trading model * libraries targeting analysts and developers * https://lakshmidrip.github.io/DRIP/ * * DRIP is composed of four main libraries: * * - DRIP Fixed Income - https://lakshmidrip.github.io/DRIP-Fixed-Income/ * - DRIP Asset Allocation - https://lakshmidrip.github.io/DRIP-Asset-Allocation/ * - DRIP Numerical Optimizer - https://lakshmidrip.github.io/DRIP-Numerical-Optimizer/ * - DRIP Statistical Learning - https://lakshmidrip.github.io/DRIP-Statistical-Learning/ * * - DRIP Fixed Income: Library for Instrument/Trading Conventions, Treasury Futures/Options, * Funding/Forward/Overnight Curves, Multi-Curve Construction/Valuation, Collateral Valuation and XVA * Metric Generation, Calibration and Hedge Attributions, Statistical Curve Construction, Bond RV * Metrics, Stochastic Evolution and Option Pricing, Interest Rate Dynamics and Option Pricing, LMM * Extensions/Calibrations/Greeks, Algorithmic Differentiation, and Asset Backed Models and Analytics. * * - DRIP Asset Allocation: Library for model libraries for MPT framework, Black Litterman Strategy * Incorporator, Holdings Constraint, and Transaction Costs. * * - DRIP Numerical Optimizer: Library for Numerical Optimization and Spline Functionality. * * - DRIP Statistical Learning: Library for Statistical Evaluation and Machine Learning. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ /** * YAS_DBR contains the sample demonstrating the replication of Bloomberg's Deutsche EUR BUND YAS * Functionality. * * @author Lakshmi Krishnamurthy */ public class YAS_DBR { private static BondComponent TSYBond ( final JulianDate dtEffective, final JulianDate dtMaturity, final int iFreq, final String strCurrency, final double dblCoupon) throws Exception { return BondBuilder.CreateSimpleFixed ( "DBR_" + dtMaturity, strCurrency, "", dblCoupon, iFreq, "DCAct_Act_UST", dtEffective, dtMaturity, null, null ); } private static final FixFloatComponent OTCIRS ( final JulianDate dtSpot, final String strCurrency, final String strMaturityTenor, final double dblCoupon) { FixedFloatSwapConvention ffConv = IBORFixedFloatContainer.ConventionFromJurisdiction ( strCurrency, "ALL", strMaturityTenor, "MAIN" ); return ffConv.createFixFloatComponent ( dtSpot, strMaturityTenor, dblCoupon, 0., 1. ); } /* * Sample demonstrating building of rates curve from cash/future/swaps * * USE WITH CARE: This sample ignores errors and does not handle exceptions. */ private static MergedDiscountForwardCurve BuildRatesCurveFromInstruments ( final JulianDate dtStart, final String[] astrCashTenor, final double[] adblCashRate, final String[] astrIRSTenor, final double[] adblIRSRate, final double dblBump, final String strCurrency) throws Exception { int iNumDCInstruments = astrCashTenor.length + adblIRSRate.length; int aiDate[] = new int[iNumDCInstruments]; double adblRate[] = new double[iNumDCInstruments]; String astrCalibMeasure[] = new String[iNumDCInstruments]; double adblCompCalibValue[] = new double[iNumDCInstruments]; CalibratableComponent aCompCalib[] = new CalibratableComponent[iNumDCInstruments]; // Cash Calibration JulianDate dtCashEffective = dtStart.addBusDays ( 1, strCurrency ); for (int i = 0; i < astrCashTenor.length; ++i) { astrCalibMeasure[i] = "Rate"; adblRate[i] = java.lang.Double.NaN; adblCompCalibValue[i] = adblCashRate[i] + dblBump; aCompCalib[i] = SingleStreamComponentBuilder.Deposit ( dtCashEffective, new JulianDate (aiDate[i] = dtCashEffective.addTenor (astrCashTenor[i]).julian()), ForwardLabel.Create ( strCurrency, astrCashTenor[i] ) ); } // IRS Calibration JulianDate dtIRSEffective = dtStart.addBusDays (2, strCurrency); for (int i = 0; i < astrIRSTenor.length; ++i) { astrCalibMeasure[i + astrCashTenor.length] = "Rate"; adblRate[i + astrCashTenor.length] = java.lang.Double.NaN; adblCompCalibValue[i + astrCashTenor.length] = adblIRSRate[i] + dblBump; aiDate[i + astrCashTenor.length] = dtIRSEffective.addTenor (astrIRSTenor[i]).julian(); aCompCalib[i + astrCashTenor.length] = OTCIRS ( dtIRSEffective, strCurrency, astrIRSTenor[i], 0. ); } /* * Build the IR curve from the components, their calibration measures, and their calibration quotes. */ return ScenarioDiscountCurveBuilder.NonlinearBuild ( dtStart, strCurrency, aCompCalib, adblCompCalibValue, astrCalibMeasure, null ); } private static final MergedDiscountForwardCurve FundingCurve ( final JulianDate dtSpot, final String strCurrency) throws Exception { String[] astrCashTenor = new String[] {"3M"}; double[] adblCashRate = new double[] {0.00276}; String[] astrIRSTenor = new String[] { "1Y", "2Y", "3Y", "4Y", "5Y", "6Y", "7Y", "8Y", "9Y", "10Y", "11Y", "12Y", "15Y", "20Y", "25Y", "30Y", "40Y", "50Y"}; double[] adblIRSRate = new double[] {0.00367, 0.00533, 0.00843, 0.01238, 0.01609, 0.01926, 0.02191, 0.02406, 0.02588, 0.02741, 0.02870, 0.02982, 0.03208, 0.03372, 0.03445, 0.03484, 0.03501, 0.03484}; return BuildRatesCurveFromInstruments ( dtSpot, astrCashTenor, adblCashRate, astrIRSTenor, adblIRSRate, 0., strCurrency ); } private static final void TSYMetrics ( final BondComponent tsyBond, final double dblNotional, final JulianDate dtSettle, final CurveSurfaceQuoteContainer mktParams, final double dblCleanPrice) throws Exception { double dblAccrued = tsyBond.accrued ( dtSettle.julian(), null ); double dblYield = tsyBond.yieldFromPrice ( new ValuationParams ( dtSettle, dtSettle, tsyBond.currency() ), mktParams, null, dblCleanPrice ); double dblModifiedDuration = tsyBond.modifiedDurationFromPrice ( new ValuationParams ( dtSettle, dtSettle, tsyBond.currency() ), mktParams, null, dblCleanPrice ); double dblRisk = tsyBond.yield01FromPrice ( new ValuationParams ( dtSettle, dtSettle, tsyBond.currency() ), mktParams, null, dblCleanPrice ); double dblConvexity = tsyBond.convexityFromPrice ( new ValuationParams ( dtSettle, dtSettle, tsyBond.currency() ), mktParams, null, dblCleanPrice ); // double dblPreviousCouponDate = tsyBond.previousCouponRate (dtSettle, mktParams); System.out.println(); System.out.println ("\tPrice : " + FormatUtil.FormatDouble (dblCleanPrice, 1, 4, 100.)); System.out.println ("\tYield : " + FormatUtil.FormatDouble (dblYield, 1, 4, 100.) + "%"); System.out.println(); System.out.println ("\tModified Duration : " + FormatUtil.FormatDouble (dblModifiedDuration, 1, 4, 10000.)); System.out.println ("\tRisk : " + FormatUtil.FormatDouble (dblRisk, 1, 4, 10000.)); System.out.println ("\tConvexity : " + FormatUtil.FormatDouble (dblConvexity * dblNotional, 1, 4, 1.)); System.out.println ("\tDV01 : " + FormatUtil.FormatDouble (dblRisk * dblNotional, 1, 0, 1.)); System.out.println(); // System.out.println ("\tPrevious Coupon Date : " + new JulianDate (dblPreviousCouponDate)); System.out.println ("\tFace : " + FormatUtil.FormatDouble (dblNotional, 1, 2, 1.)); System.out.println ("\tPrincipal : " + FormatUtil.FormatDouble (dblCleanPrice * dblNotional, 1, 2, 1.)); System.out.println ("\tAccrued : " + FormatUtil.FormatDouble (dblAccrued * dblNotional, 1, 2, 1.)); // System.out.println ("\tAccrual Days : " + (dtSettle.julian() - dblPreviousCouponDate)); } public static final void main ( final String astrArgs[]) throws Exception { EnvManager.InitEnv (""); JulianDate dtSpot = DateUtil.CreateFromYMD ( 2015, DateUtil.MAY, 5 ); JulianDate dtEffective = DateUtil.CreateFromYMD ( 2015, DateUtil.FEBRUARY, 15 ); JulianDate dtMaturity = DateUtil.CreateFromYMD ( 2024, DateUtil.FEBRUARY, 15 ); int iFreq = 1; String strCurrency = "EUR"; double dblCoupon = 0.0175; double dblNotional = 1000000.; double dblCleanPrice = 1.13; BondComponent tsyBond = TSYBond ( dtEffective, dtMaturity, iFreq, strCurrency, dblCoupon ); System.out.println(); System.out.println ("\tEffective : " + tsyBond.effectiveDate()); System.out.println ("\tMaturity : " + tsyBond.maturityDate()); System.out.println(); MergedDiscountForwardCurve dc = FundingCurve ( dtSpot, strCurrency ); TSYMetrics ( tsyBond, dblNotional, dtSpot, MarketParamsBuilder.Create ( dc, null, null, null, null, null, null ), dblCleanPrice ); System.out.println ("\n\tCashflow\n\t--------"); for (CompositePeriod p : tsyBond.couponPeriods()) System.out.println ("\t\t" + DateUtil.YYYYMMDD (p.startDate()) + " | " + DateUtil.YYYYMMDD (p.endDate()) + " | " + DateUtil.YYYYMMDD (p.payDate()) + " | " + FormatUtil.FormatDouble (p.couponDCF(), 1, 4, 1.) + " ||" ); } }
apache-2.0
jonefeewang/armeria
core/src/main/java/com/linecorp/armeria/client/http/HttpSessionChannelFactory.java
4700
/* * Copyright 2016 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.client.http; import static java.util.Objects.requireNonNull; import java.net.InetSocketAddress; import java.net.SocketAddress; import java.util.Collections; import java.util.IdentityHashMap; import java.util.Map; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.function.Function; import com.linecorp.armeria.client.SessionOptions; import com.linecorp.armeria.client.SessionProtocolNegotiationCache; import com.linecorp.armeria.client.SessionProtocolNegotiationException; import com.linecorp.armeria.client.pool.PoolKey; import com.linecorp.armeria.common.SessionProtocol; import io.netty.bootstrap.Bootstrap; import io.netty.channel.Channel; import io.netty.channel.ChannelFuture; import io.netty.channel.ChannelInitializer; import io.netty.channel.EventLoop; import io.netty.util.concurrent.Future; import io.netty.util.concurrent.Promise; class HttpSessionChannelFactory implements Function<PoolKey, Future<Channel>> { private final Bootstrap baseBootstrap; private final EventLoop eventLoop; private final Map<SessionProtocol, Bootstrap> bootstrapMap; private final SessionOptions options; HttpSessionChannelFactory(Bootstrap bootstrap, SessionOptions options) { baseBootstrap = requireNonNull(bootstrap); eventLoop = (EventLoop) bootstrap.config().group(); bootstrapMap = Collections.synchronizedMap(new IdentityHashMap<>()); this.options = options; } @Override public Future<Channel> apply(PoolKey key) { final InetSocketAddress remoteAddress = key.remoteAddress(); final SessionProtocol protocol = key.sessionProtocol(); if (SessionProtocolNegotiationCache.isUnsupported(remoteAddress, protocol)) { // Fail immediately if it is sure that the remote address does not support the requested protocol. return eventLoop.newFailedFuture( new SessionProtocolNegotiationException(protocol, "previously failed negotiation")); } final Promise<Channel> sessionPromise = eventLoop.newPromise(); connect(remoteAddress, protocol, sessionPromise); return sessionPromise; } void connect(SocketAddress remoteAddress, SessionProtocol protocol, Promise<Channel> sessionPromise) { final Bootstrap bootstrap = bootstrap(protocol); final ChannelFuture connectFuture = bootstrap.connect(remoteAddress); connectFuture.addListener((ChannelFuture future) -> { if (future.isSuccess()) { initSession(protocol, future, sessionPromise); } else { sessionPromise.setFailure(future.cause()); } }); } private Bootstrap bootstrap(SessionProtocol sessionProtocol) { return bootstrapMap.computeIfAbsent(sessionProtocol, sp -> { Bootstrap bs = baseBootstrap.clone(); bs.handler(new ChannelInitializer<Channel>() { @Override protected void initChannel(Channel ch) throws Exception { ch.pipeline().addLast(new HttpClientPipelineConfigurator(sp, options)); } }); return bs; }); } private void initSession(SessionProtocol protocol, ChannelFuture connectFuture, Promise<Channel> sessionPromise) { assert connectFuture.isSuccess(); final Channel ch = connectFuture.channel(); final EventLoop eventLoop = ch.eventLoop(); assert eventLoop.inEventLoop(); final ScheduledFuture<?> timeoutFuture = eventLoop.schedule(() -> { if (sessionPromise.tryFailure(new SessionProtocolNegotiationException( protocol, "connection established, but session creation timed out: " + ch))) { ch.close(); } }, options.connectTimeoutMillis(), TimeUnit.MILLISECONDS); ch.pipeline().addLast(new HttpSessionHandler(this, ch, sessionPromise, timeoutFuture)); } }
apache-2.0
longxingkai/coolweather
src/com/coolweather/model/City.java
629
package com.coolweather.model; public class City { private int id; private String cityName; private String cityCode; private int provinceId; public int getId() { return id; } public void setId(int id) { this.id = id; } public String getCityName() { return cityName; } public void setCityName(String cityName) { this.cityName = cityName; } public String getCityCode() { return cityCode; } public void setCityCode(String cityCode) { this.cityCode = cityCode; } public int getProvinceId() { return provinceId; } public void setProvinceId(int provinceId) { this.provinceId = provinceId; } }
apache-2.0
harfalm/Sakai-10.1
kernel/api/src/main/java/org/sakaiproject/tool/api/Breakdownable.java
4050
/********************************************************************************** * $URL: https://source.sakaiproject.org/svn/kernel/tags/sakai-10.1/api/src/main/java/org/sakaiproject/tool/api/Breakdownable.java $ * $Id: Breakdownable.java 308853 2014-04-25 23:40:21Z enietzel@anisakai.com $ *********************************************************************************** * * Copyright (c) 2014 The Apereo Foundation. * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.tool.api; import java.io.Serializable; /** * Allows a class type to be broken down for caching or export and then later * rebuilt into an equivalent object in memory. * * This follow the typical pattern which is used in EntityBroker for providers * but is currently generally independent from that system. * * Breakdownable classes are classes that are not by themselves serializable, * but they can be converted into a Serializable format using the makeBreakdown. * In addition, the class can be reconstituted using the doRebuild * * @author Aaron Zeckoski (azeckoski @ unicon.net) * @author mgillian */ public interface Breakdownable<T> { /** * @return the class which is managed by this breakdown and rebuild processor */ public Class<T> defineHandledClass(); /** * Return the simplest amount of data needed to rebuild the implementing object later. * e.g. For a Site this might only be the id since trying to store the whole site is dangerous (would get out of date) * and too large and non-serializable, the id would be enough to rebuild it later via a lookup * * @param sizeHint a hint about the size to make the returned serialized data * @return serialized version of the data needed to rebuild this object later */ public Serializable makeBreakdown(T object, BreakdownableSize sizeHint); /** * Rebuilds an object using breakdown data. * Should return a fully formed version of the implementing object. * Can assume the current ClassLoader is the one for this Class. * * @param objectData serialized breakdown data (whatever was returned from #makeBreakdown) * @param sizeHint a hint about the size of the serialized data * @return the actual object (this should never be null) * @throws java.lang.IllegalArgumentException if the inputs are invalid * @throws java.lang.RuntimeException if there is a failure attempting to create the object */ public T doRebuild(Serializable objectData, BreakdownableSize sizeHint); /** * OPTIONAL * Allow override of the default handling, if this returns null then the ClassLoader will be the one for this handler. * All invalid outputs are simply ignored and the default is used. * * @return the ClassLoader for the class type handled by this Breakdownable */ public ClassLoader defineClassLoader(); /** * BreakdownableSize is additional information about how the class should be serialized */ public enum BreakdownableSize { /** * Make the resulting data as small as possible, * fast to breakdown but slow to rebuild, * used for data replication and cache store with other objects */ TINY, /** * Make the object data complete, * used for generating JSON or XML formatted data */ EXPORT } }
apache-2.0
lestard/FluxFX
examples/todoflux/src/main/java/todoflux/views/item/ItemOverviewView.java
753
package todoflux.views.item; import eu.lestard.fluxfx.View; import eu.lestard.fluxfx.utils.ViewCellFactory; import javafx.fxml.FXML; import javafx.scene.control.ListView; import todoflux.stores.ItemsStore; import todoflux.stores.TodoItem; public class ItemOverviewView implements View { @FXML public ListView<TodoItem> items; private final ItemsStore itemStore; public ItemOverviewView(ItemsStore itemStore) { this.itemStore = itemStore; } public void initialize() { ViewCellFactory<TodoItem, ItemView> cellFactory = new ViewCellFactory<>(ItemView.class, (todoItem, itemView) -> itemView.update(todoItem)); items.setCellFactory(cellFactory); items.setItems(itemStore.getItems()); } }
apache-2.0
Movilizer/movilizer-spring-connector
src/main/java/com/movilizer/connector/queues/ToMovilizerQueueService.java
3075
package com.movilizer.connector.queues; import com.movilizer.connector.persistence.entities.MasterdataToMovilizerQueue; import com.movilizer.connector.persistence.entities.MoveletToMovilizerQueue; import com.movilizer.connector.persistence.entities.ParticipantToMovilizerQueue; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.springframework.beans.factory.annotation.Qualifier; import org.springframework.stereotype.Service; import javax.annotation.Resource; import java.util.Collection; import java.util.List; @Service public class ToMovilizerQueueService { @Resource @Qualifier("masterdataToMovilizerQueueService") private JavaSpringConnectorQueueService<MasterdataToMovilizerQueue> masterdataService; @Resource @Qualifier("moveletToMovilizerQueueService") private JavaSpringConnectorQueueService<MoveletToMovilizerQueue> moveletService; @Resource @Qualifier("participantToMovilizerQueueService") private JavaSpringConnectorQueueService<ParticipantToMovilizerQueue> participantService; //masterdata public boolean offer(MasterdataToMovilizerQueue masterdataRecord) { return masterdataService.offer(masterdataRecord); } public List<MasterdataToMovilizerQueue> getAllMasterdataUpdatesOrdered() { return masterdataService.getAllOrdered(); } public void removeMasterdata(Collection<MasterdataToMovilizerQueue> records) { masterdataService.remove(records); } public void removeMasterdataByKey(Collection<String> masterdataKeys) { masterdataService.removeByKeys(masterdataKeys); } public MasterdataToMovilizerQueue pollMasterdataUpdate() { return masterdataService.poll(); } //movelet public boolean offer(MoveletToMovilizerQueue moveletRecord) { return moveletService.offer(moveletRecord); } public List<MoveletToMovilizerQueue> getAllMoveletUpdatesOrdered() { return moveletService.getAllOrdered(); } public void removeMovelets(Collection<MoveletToMovilizerQueue> records) { moveletService.remove(records); } public void removeMoveletsByKey(Collection<String> moveletKeys) { moveletService.removeByKeys(moveletKeys); } public MoveletToMovilizerQueue pollMoveletUpdate() { return moveletService.poll(); } //participant public boolean offer(ParticipantToMovilizerQueue participantRecord) { return participantService.offer(participantRecord); } public List<ParticipantToMovilizerQueue> getAllParticipantUpdatesOrdered() { return participantService.getAllOrdered(); } public void removeParticipants(Collection<ParticipantToMovilizerQueue> records) { participantService.remove(records); } public void removeParticipantsByDeviceAddress(Collection<String> deviceAddresses) { participantService.removeByKeys(deviceAddresses); } public ParticipantToMovilizerQueue pollParticipantUpdate() { return participantService.poll(); } }
apache-2.0
jiangjiguang/lib-java
src/com/jiangjg/lib/ThingkingInJava/Th19/UpcastEnum.java
318
package com.jiangjg.lib.ThingkingInJava.Th19; enum Search{HITHER,YON} public class UpcastEnum { /** * @param args */ public static void main(String[] args) { Search[] vals = Search.values(); Enum<?> e = Search.HITHER; for(Enum en: e.getClass().getEnumConstants()){ System.out.println(en); } } }
apache-2.0
pmoor/gletscher
src/main/java/ws/moor/gletscher/catalog/CatalogReader.java
3482
/* * Copyright 2021 Patrick Moor <patrick@moor.ws> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ws.moor.gletscher.catalog; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import ws.moor.gletscher.blocks.PersistedBlock; import ws.moor.gletscher.proto.Gletscher; import javax.annotation.Nullable; import java.nio.file.InvalidPathException; import java.time.Instant; import java.util.Iterator; public interface CatalogReader { @Nullable CatalogDirectory findDirectory(CatalogPath path); Iterator<CatalogFile> walk(); class CatalogFile { public final CatalogPath path; public final Instant lastModifiedTime; public final ImmutableList<PersistedBlock> blockList; CatalogFile(CatalogPath filePath, Gletscher.FileEntry proto) { this.path = filePath; this.lastModifiedTime = Instant.ofEpochMilli(proto.getLastModifiedMillis()); ImmutableList.Builder<PersistedBlock> builder = ImmutableList.builder(); for (Gletscher.PersistedBlock persistedBlock : proto.getBlockList()) { builder.add(PersistedBlock.fromProto(persistedBlock)); } this.blockList = builder.build(); } public long getOriginalSize() { return blockList.stream().mapToLong(PersistedBlock::getOriginalLength).sum(); } } class CatalogDirectory { private final PersistedBlock address; private final Gletscher.Directory proto; private final ImmutableMap<String, CatalogFile> fileEntries; CatalogDirectory(CatalogPath dirPath, PersistedBlock address, Gletscher.Directory proto) { this.address = address; this.proto = proto; ImmutableMap.Builder<String, CatalogReader.CatalogFile> filesBuilder = ImmutableMap.builder(); for (Gletscher.DirectoryEntry entry : proto.getEntryList()) { if (entry.getTypeCase() == Gletscher.DirectoryEntry.TypeCase.FILE) { Gletscher.FileEntry fileProto = entry.getFile(); String fileName = fileProto.getName(); try { CatalogPath filePath = dirPath.makeChild(fileName); filesBuilder.put(fileName, new CatalogFile(filePath, fileProto)); } catch (InvalidPathException ipe) { System.err.printf("\n*** failed to resolve \"%s\" from \"%s\": %s\n", fileName, dirPath, ipe); } } } fileEntries = filesBuilder.build(); } @Nullable public CatalogFile findFileInformation(String fileName) { return fileEntries.get(fileName); } public PersistedBlock getAddress() { return address; } public Gletscher.Directory getProto() { return proto; } } }
apache-2.0
adamcin/net.adamcin.oakpal
core/src/test/java/net/adamcin/oakpal/it/ChecklistIT.java
2355
/* * Copyright 2020 Mark Adamcin * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package net.adamcin.oakpal.it; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import java.util.Collections; import java.util.List; import java.util.stream.Collectors; import java.util.stream.Stream; import net.adamcin.oakpal.core.CheckSpec; import net.adamcin.oakpal.core.ChecklistPlanner; import org.junit.Test; /** * Integration test to enforce expectations for the exported OakPAL checklist. Must be done during ITs because manifest * is generated at package time. */ public class ChecklistIT { public static final String OAKPAL_MODULE_NAME = "net.adamcin.oakpal.core"; public static final String OAKPAL_CHECKLIST_BASIC = "basic"; @Test public void testLoadChecklists() throws Exception { ChecklistPlanner planner = new ChecklistPlanner(Collections.singletonList(OAKPAL_CHECKLIST_BASIC)); planner.discoverChecklists(); assertEquals("expect one init stage, representing the one active checklist", 1, planner.getInitStages().size()); List<CheckSpec> specs = planner.getEffectiveCheckSpecs(Collections.emptyList()); List<String> expectNames = Stream.of( "paths", "subpackages", "acHandling", "filterSets", "overlaps", "composite-store-alignment", "sling-jcr-installer" ).map(name -> OAKPAL_MODULE_NAME + "/" + OAKPAL_CHECKLIST_BASIC + "/" + name) .collect(Collectors.toList()); for (String expectName : expectNames) { assertTrue("expect effective check: " + expectName, specs.stream().anyMatch(spec -> expectName.equals(spec.getName()))); } } }
apache-2.0
Gaonaifeng/eLong-OpenAPI-H5-demo
nb_demo_h5/src/main/java/com/elong/nb/model/elong/EnumValidateResult.java
1681
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.8-b130911.1802 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2016.10.09 at 10:10:23 AM CST // package com.elong.nb.model.elong; import javax.xml.bind.annotation.XmlEnum; import javax.xml.bind.annotation.XmlEnumValue; import javax.xml.bind.annotation.XmlType; /** * <p>Java class for EnumValidateResult. * * <p>The following schema fragment specifies the expected content contained within this class. * <p> * <pre> * &lt;simpleType name="EnumValidateResult"> * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}string"> * &lt;enumeration value="OK"/> * &lt;enumeration value="Product"/> * &lt;enumeration value="Inventory"/> * &lt;enumeration value="Rate"/> * &lt;/restriction> * &lt;/simpleType> * </pre> * */ @XmlType(name = "EnumValidateResult") @XmlEnum public enum EnumValidateResult { OK("OK"), @XmlEnumValue("Product") Product("Product"), @XmlEnumValue("Inventory") Inventory("Inventory"), @XmlEnumValue("Rate") Rate("Rate"); private final String value; EnumValidateResult(String v) { value = v; } public String value() { return value; } public static EnumValidateResult fromValue(String v) { for (EnumValidateResult c: EnumValidateResult.values()) { if (c.value.equals(v)) { return c; } } throw new IllegalArgumentException(v); } }
apache-2.0
KoehlerSB747/sd-tools
src/test/java/org/sd/wordnet/util/TestNormalizeUtil.java
2382
/* Copyright 2008-2016 Semantic Discovery, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.sd.wordnet.util; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; /** * JUnit Tests for the NormalizeUtil class. * <p> * @author Spence Koehler */ public class TestNormalizeUtil extends TestCase { public TestNormalizeUtil(String name) { super(name); } public void testNormalize() { doNormalizeTest("This is a @#$%! Test", "this is a test"); doNormalizeTest("This's 'a' \"Test\"", "this's a test"); doNormalizeTest("sanders'", "sanders"); doNormalizeTest("'a", "'a"); doNormalizeTest("3.14159", "3.14159"); doNormalizeTest(".14159", ".14159"); } public void testTrimPossessive() { doNormalizeTest("Jacob's", "jacob"); doNormalizeTest("brother's", "brother"); doNormalizeTest("MAJESTY'S", "majesty"); doNormalizeTest("ass's", "ass"); } public void testTrimDigits() { doTrimDigitsTest("order1", "order"); doTrimDigitsTest("say9", "say"); doTrimDigitsTest("99", "99"); doTrimDigitsTest("okay", "okay"); doTrimDigitsTest("fall8", "fall"); doTrimDigitsTest("light12", "light"); doTrimDigitsTest("3.14159", "3.14159"); doTrimDigitsTest(".14159", ".14159"); } private final void doNormalizeTest(String input, String expected) { final String norm = NormalizeUtil.normalizeForLookup(input); assertEquals(expected, norm); } private final void doTrimDigitsTest(String input, String expected) { final String trimmed = NormalizeUtil.trimDigits(input); assertEquals(expected, trimmed); } public static Test suite() { TestSuite suite = new TestSuite(TestNormalizeUtil.class); return suite; } public static void main(String[] args) { junit.textui.TestRunner.run(suite()); } }
apache-2.0
dagnir/aws-sdk-java
aws-java-sdk-iam/src/main/java/com/amazonaws/services/identitymanagement/model/transform/NoSuchEntityExceptionUnmarshaller.java
1574
/* * Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.identitymanagement.model.transform; import org.w3c.dom.Node; import javax.annotation.Generated; import com.amazonaws.AmazonServiceException; import com.amazonaws.transform.StandardErrorUnmarshaller; import com.amazonaws.services.identitymanagement.model.NoSuchEntityException; @Generated("com.amazonaws:aws-java-sdk-code-generator") public class NoSuchEntityExceptionUnmarshaller extends StandardErrorUnmarshaller { public NoSuchEntityExceptionUnmarshaller() { super(NoSuchEntityException.class); } @Override public AmazonServiceException unmarshall(Node node) throws Exception { // Bail out if this isn't the right error code that this // marshaller understands String errorCode = parseErrorCode(node); if (errorCode == null || !errorCode.equals("NoSuchEntity")) return null; NoSuchEntityException e = (NoSuchEntityException) super.unmarshall(node); return e; } }
apache-2.0
dylanplecki/keycloak
common/src/main/java/org/keycloak/common/util/KeycloakUriBuilder.java
27446
package org.keycloak.common.util; import java.net.URI; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author <a href="mailto:bill@burkecentral.com">Bill Burke</a> * @version $Revision: 1 $ */ public class KeycloakUriBuilder { private String host; private String scheme; private int port = -1; private String userInfo; private String path; private String query; private String fragment; private String ssp; private String authority; public static KeycloakUriBuilder fromUri(URI uri) { return new KeycloakUriBuilder().uri(uri); } public static KeycloakUriBuilder fromUri(String uriTemplate) { return new KeycloakUriBuilder().uri(uriTemplate); } public static KeycloakUriBuilder fromPath(String path) throws IllegalArgumentException { return new KeycloakUriBuilder().path(path); } public KeycloakUriBuilder clone() { KeycloakUriBuilder impl = new KeycloakUriBuilder(); impl.host = host; impl.scheme = scheme; impl.port = port; impl.userInfo = userInfo; impl.path = path; impl.query = query; impl.fragment = fragment; impl.ssp = ssp; impl.authority = authority; return impl; } private static final Pattern opaqueUri = Pattern.compile("^([^:/?#]+):([^/].*)"); private static final Pattern hierarchicalUri = Pattern.compile("^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\\?([^#]*))?(#(.*))?"); private static final Pattern hostPortPattern = Pattern.compile("([^/:]+):(\\d+)"); public static boolean compare(String s1, String s2) { if (s1 == s2) return true; if (s1 == null || s2 == null) return false; return s1.equals(s2); } public static URI relativize(URI from, URI to) { if (!compare(from.getScheme(), to.getScheme())) return to; if (!compare(from.getHost(), to.getHost())) return to; if (from.getPort() != to.getPort()) return to; if (from.getPath() == null && to.getPath() == null) return URI.create(""); else if (from.getPath() == null) return URI.create(to.getPath()); else if (to.getPath() == null) return to; String fromPath = from.getPath(); if (fromPath.startsWith("/")) fromPath = fromPath.substring(1); String[] fsplit = fromPath.split("/"); String toPath = to.getPath(); if (toPath.startsWith("/")) toPath = toPath.substring(1); String[] tsplit = toPath.split("/"); int f = 0; for (; f < fsplit.length && f < tsplit.length; f++) { if (!fsplit[f].equals(tsplit[f])) break; } KeycloakUriBuilder builder = KeycloakUriBuilder.fromPath(""); for (int i = f; i < fsplit.length; i++) builder.path(".."); for (int i = f; i < tsplit.length; i++) builder.path(tsplit[i]); return builder.build(); } /** * You may put path parameters anywhere within the uriTemplate except port * * @param uriTemplate * @return */ public static KeycloakUriBuilder fromTemplate(String uriTemplate) { KeycloakUriBuilder impl = new KeycloakUriBuilder(); impl.uriTemplate(uriTemplate); return impl; } /** * You may put path parameters anywhere within the uriTemplate except port * * @param uriTemplate * @return */ public KeycloakUriBuilder uriTemplate(String uriTemplate) { if (uriTemplate == null) throw new IllegalArgumentException("uriTemplate parameter is null"); Matcher opaque = opaqueUri.matcher(uriTemplate); if (opaque.matches()) { this.authority = null; this.host = null; this.port = -1; this.userInfo = null; this.query = null; this.scheme = opaque.group(1); this.ssp = opaque.group(2); return this; } else { Matcher match = hierarchicalUri.matcher(uriTemplate); if (match.matches()) { ssp = null; return parseHierarchicalUri(uriTemplate, match); } } throw new IllegalArgumentException("Illegal uri template: " + uriTemplate); } protected KeycloakUriBuilder parseHierarchicalUri(String uriTemplate, Matcher match) { boolean scheme = match.group(2) != null; if (scheme) this.scheme = match.group(2); String authority = match.group(4); if (authority != null) { this.authority = null; String host = match.group(4); int at = host.indexOf('@'); if (at > -1) { String user = host.substring(0, at); host = host.substring(at + 1); this.userInfo = user; } Matcher hostPortMatch = hostPortPattern.matcher(host); if (hostPortMatch.matches()) { this.host = hostPortMatch.group(1); try { this.port = Integer.parseInt(hostPortMatch.group(2)); } catch (NumberFormatException e) { throw new IllegalArgumentException("Illegal uri template: " + uriTemplate, e); } } else { this.host = host; } } if (match.group(5) != null) { String group = match.group(5); if (!scheme && !"".equals(group) && !group.startsWith("/") && group.indexOf(':') > -1) throw new IllegalArgumentException("Illegal uri template: " + uriTemplate); if (!"".equals(group)) replacePath(group); } if (match.group(7) != null) replaceQuery(match.group(7)); if (match.group(9) != null) fragment(match.group(9)); return this; } public KeycloakUriBuilder uri(String uriTemplate) throws IllegalArgumentException { return uriTemplate(uriTemplate); } public KeycloakUriBuilder uri(URI uri) throws IllegalArgumentException { if (uri == null) throw new IllegalArgumentException("URI was null"); if (uri.getRawFragment() != null) fragment = uri.getRawFragment(); if (uri.isOpaque()) { scheme = uri.getScheme(); ssp = uri.getRawSchemeSpecificPart(); return this; } if (uri.getScheme() == null) { if (ssp != null) { if (uri.getRawSchemeSpecificPart() != null) { ssp = uri.getRawSchemeSpecificPart(); return this; } } } else { scheme = uri.getScheme(); } ssp = null; if (uri.getRawAuthority() != null) { if (uri.getRawUserInfo() == null && uri.getHost() == null && uri.getPort() == -1) { authority = uri.getRawAuthority(); userInfo = null; host = null; port = -1; } else { authority = null; if (uri.getRawUserInfo() != null) { userInfo = uri.getRawUserInfo(); } if (uri.getHost() != null) { host = uri.getHost(); } if (uri.getPort() != -1) { port = uri.getPort(); } } } if (uri.getRawPath() != null && uri.getRawPath().length() > 0) { path = uri.getRawPath(); } if (uri.getRawQuery() != null && uri.getRawQuery().length() > 0) { query = uri.getRawQuery(); } return this; } public KeycloakUriBuilder scheme(String scheme) throws IllegalArgumentException { this.scheme = scheme; return this; } public KeycloakUriBuilder schemeSpecificPart(String ssp) throws IllegalArgumentException { if (ssp == null) throw new IllegalArgumentException("schemeSpecificPart was null"); StringBuilder sb = new StringBuilder(); if (scheme != null) sb.append(scheme).append(':'); if (ssp != null) sb.append(ssp); if (fragment != null && fragment.length() > 0) sb.append('#').append(fragment); URI uri = URI.create(sb.toString()); if (uri.getRawSchemeSpecificPart() != null && uri.getRawPath() == null) { this.ssp = uri.getRawSchemeSpecificPart(); } else { this.ssp = null; userInfo = uri.getRawUserInfo(); host = uri.getHost(); port = uri.getPort(); path = uri.getRawPath(); query = uri.getRawQuery(); } return this; } public KeycloakUriBuilder userInfo(String ui) { this.userInfo = ui; return this; } public KeycloakUriBuilder host(String host) throws IllegalArgumentException { if (host != null && host.equals("")) throw new IllegalArgumentException("invalid host"); this.host = host; return this; } public KeycloakUriBuilder port(int port) throws IllegalArgumentException { if (port < -1) throw new IllegalArgumentException("Invalid port value"); this.port = port; return this; } protected static String paths(boolean encode, String basePath, String... segments) { String path = basePath; if (path == null) path = ""; for (String segment : segments) { if ("".equals(segment)) continue; if (path.endsWith("/")) { if (segment.startsWith("/")) { segment = segment.substring(1); if ("".equals(segment)) continue; } if (encode) segment = Encode.encodePath(segment); path += segment; } else { if (encode) segment = Encode.encodePath(segment); if ("".equals(path)) { path = segment; } else if (segment.startsWith("/")) { path += segment; } else { path += "/" + segment; } } } return path; } public KeycloakUriBuilder path(String segment) throws IllegalArgumentException { if (segment == null) throw new IllegalArgumentException("path was null"); path = paths(true, path, segment); return this; } public KeycloakUriBuilder replaceMatrix(String matrix) throws IllegalArgumentException { if (matrix == null) matrix = ""; if (!matrix.startsWith(";")) matrix = ";" + matrix; matrix = Encode.encodePath(matrix); if (path == null) { path = matrix; } else { int start = path.lastIndexOf('/'); if (start < 0) start = 0; int matrixIndex = path.indexOf(';', start); if (matrixIndex > -1) path = path.substring(0, matrixIndex) + matrix; else path += matrix; } return this; } public KeycloakUriBuilder replaceQuery(String query) throws IllegalArgumentException { if (query == null || query.length() == 0) { this.query = null; return this; } this.query = Encode.encodeQueryString(query); return this; } public KeycloakUriBuilder fragment(String fragment) throws IllegalArgumentException { if (fragment == null) { this.fragment = null; return this; } this.fragment = Encode.encodeFragment(fragment); return this; } /** * Only replace path params in path of URI. This changes state of URIBuilder. * * @param name * @param value * @param isEncoded * @return */ public KeycloakUriBuilder substitutePathParam(String name, Object value, boolean isEncoded) { if (path != null) { StringBuffer buffer = new StringBuffer(); replacePathParameter(name, value.toString(), isEncoded, path, buffer, false); path = buffer.toString(); } return this; } public URI buildFromMap(Map<String, ?> values) throws IllegalArgumentException { if (values == null) throw new IllegalArgumentException("values parameter is null"); return buildUriFromMap(values, false, true); } public URI buildFromEncodedMap(Map<String, ?> values) throws IllegalArgumentException { if (values == null) throw new IllegalArgumentException("values parameter is null"); return buildUriFromMap(values, true, false); } public URI buildFromMap(Map<String, ?> values, boolean encodeSlashInPath) throws IllegalArgumentException { if (values == null) throw new IllegalArgumentException("values parameter is null"); return buildUriFromMap(values, false, encodeSlashInPath); } protected URI buildUriFromMap(Map<String, ?> paramMap, boolean fromEncodedMap, boolean encodeSlash) throws IllegalArgumentException { String buf = buildString(paramMap, fromEncodedMap, false, encodeSlash); try { return URI.create(buf); } catch (Exception e) { throw new RuntimeException("Failed to create URI: " + buf, e); } } private String buildString(Map<String, ?> paramMap, boolean fromEncodedMap, boolean isTemplate, boolean encodeSlash) { for (Map.Entry<String, ? extends Object> entry : paramMap.entrySet()) { if (entry.getKey() == null) throw new IllegalArgumentException("map key is null"); if (entry.getValue() == null) throw new IllegalArgumentException("map value is null"); } StringBuffer buffer = new StringBuffer(); if (scheme != null) replaceParameter(paramMap, fromEncodedMap, isTemplate, scheme, buffer, encodeSlash).append(":"); if (ssp != null) { buffer.append(ssp); } else if (userInfo != null || host != null || port != -1) { buffer.append("//"); if (userInfo != null) replaceParameter(paramMap, fromEncodedMap, isTemplate, userInfo, buffer, encodeSlash).append("@"); if (host != null) { if ("".equals(host)) throw new RuntimeException("empty host name"); replaceParameter(paramMap, fromEncodedMap, isTemplate, host, buffer, encodeSlash); } if (port != -1) buffer.append(":").append(Integer.toString(port)); } else if (authority != null) { buffer.append("//"); replaceParameter(paramMap, fromEncodedMap, isTemplate, authority, buffer, encodeSlash); } if (path != null) { StringBuffer tmp = new StringBuffer(); replaceParameter(paramMap, fromEncodedMap, isTemplate, path, tmp, encodeSlash); String tmpPath = tmp.toString(); if (userInfo != null || host != null) { if (!tmpPath.startsWith("/")) buffer.append("/"); } buffer.append(tmpPath); } if (query != null) { buffer.append("?"); replaceQueryStringParameter(paramMap, fromEncodedMap, isTemplate, query, buffer); } if (fragment != null) { buffer.append("#"); replaceParameter(paramMap, fromEncodedMap, isTemplate, fragment, buffer, encodeSlash); } return buffer.toString(); } protected StringBuffer replacePathParameter(String name, String value, boolean isEncoded, String string, StringBuffer buffer, boolean encodeSlash) { Matcher matcher = createUriParamMatcher(string); while (matcher.find()) { String param = matcher.group(1); if (!param.equals(name)) continue; if (!isEncoded) { if (encodeSlash) value = Encode.encodePath(value); else value = Encode.encodePathSegment(value); } else { value = Encode.encodeNonCodes(value); } // if there is a $ then we must backslash it or it will screw up regex group substitution value = value.replace("$", "\\$"); matcher.appendReplacement(buffer, value); } matcher.appendTail(buffer); return buffer; } public static Matcher createUriParamMatcher(String string) { return PathHelper.URI_PARAM_PATTERN.matcher(PathHelper.replaceEnclosedCurlyBraces(string)); } protected StringBuffer replaceParameter(Map<String, ?> paramMap, boolean fromEncodedMap, boolean isTemplate, String string, StringBuffer buffer, boolean encodeSlash) { Matcher matcher = createUriParamMatcher(string); while (matcher.find()) { String param = matcher.group(1); Object valObj = paramMap.get(param); if (valObj == null && !isTemplate) { throw new IllegalArgumentException("NULL value for template parameter: " + param); } else if (valObj == null && isTemplate) { matcher.appendReplacement(buffer, matcher.group()); continue; } String value = valObj.toString(); if (value != null) { if (!fromEncodedMap) { if (encodeSlash) value = Encode.encodePathSegmentAsIs(value); else value = Encode.encodePathAsIs(value); } else { if (encodeSlash) value = Encode.encodePathSegmentSaveEncodings(value); else value = Encode.encodePathSaveEncodings(value); } matcher.appendReplacement(buffer, Matcher.quoteReplacement(value)); } else { throw new IllegalArgumentException("path param " + param + " has not been provided by the parameter map"); } } matcher.appendTail(buffer); return buffer; } protected StringBuffer replaceQueryStringParameter(Map<String, ?> paramMap, boolean fromEncodedMap, boolean isTemplate, String string, StringBuffer buffer) { Matcher matcher = createUriParamMatcher(string); while (matcher.find()) { String param = matcher.group(1); Object valObj = paramMap.get(param); if (valObj == null && !isTemplate) { throw new IllegalArgumentException("NULL value for template parameter: " + param); } else if (valObj == null && isTemplate) { matcher.appendReplacement(buffer, matcher.group()); continue; } String value = valObj.toString(); if (value != null) { if (!fromEncodedMap) { value = Encode.encodeQueryParamAsIs(value); } else { value = Encode.encodeQueryParamSaveEncodings(value); } matcher.appendReplacement(buffer, value); } else { throw new IllegalArgumentException("path param " + param + " has not been provided by the parameter map"); } } matcher.appendTail(buffer); return buffer; } /** * Return a unique order list of path params * * @return */ public List<String> getPathParamNamesInDeclarationOrder() { List<String> params = new ArrayList<String>(); HashSet<String> set = new HashSet<String>(); if (scheme != null) addToPathParamList(params, set, scheme); if (userInfo != null) addToPathParamList(params, set, userInfo); if (host != null) addToPathParamList(params, set, host); if (path != null) addToPathParamList(params, set, path); if (query != null) addToPathParamList(params, set, query); if (fragment != null) addToPathParamList(params, set, fragment); return params; } private void addToPathParamList(List<String> params, HashSet<String> set, String string) { Matcher matcher = PathHelper.URI_PARAM_PATTERN.matcher(PathHelper.replaceEnclosedCurlyBraces(string)); while (matcher.find()) { String param = matcher.group(1); if (set.contains(param)) continue; else { set.add(param); params.add(param); } } } public URI build(Object... values) throws IllegalArgumentException { if (values == null) throw new IllegalArgumentException("values parameter is null"); return buildFromValues(true, false, values); } protected URI buildFromValues(boolean encodeSlash, boolean encoded, Object... values) { List<String> params = getPathParamNamesInDeclarationOrder(); if (values.length < params.size()) throw new IllegalArgumentException("You did not supply enough values to fill path parameters"); Map<String, Object> pathParams = new HashMap<String, Object>(); for (int i = 0; i < params.size(); i++) { String pathParam = params.get(i); Object val = values[i]; if (val == null) throw new IllegalArgumentException("A value was null"); pathParams.put(pathParam, val.toString()); } String buf = null; try { buf = buildString(pathParams, encoded, false, encodeSlash); return new URI(buf); //return URI.create(buf); } catch (Exception e) { throw new RuntimeException("Failed to create URI: " + buf, e); } } public KeycloakUriBuilder matrixParam(String name, Object... values) throws IllegalArgumentException { if (name == null) throw new IllegalArgumentException("name parameter is null"); if (values == null) throw new IllegalArgumentException("values parameter is null"); if (path == null) path = ""; for (Object val : values) { if (val == null) throw new IllegalArgumentException("null value"); path += ";" + Encode.encodeMatrixParam(name) + "=" + Encode.encodeMatrixParam(val.toString()); } return this; } private static final Pattern PARAM_REPLACEMENT = Pattern.compile("_resteasy_uri_parameter"); public KeycloakUriBuilder queryParam(String name, Object... values) throws IllegalArgumentException { if (name == null) throw new IllegalArgumentException("name parameter is null"); if (values == null) throw new IllegalArgumentException("values parameter is null"); for (Object value : values) { if (value == null) throw new IllegalArgumentException("A passed in value was null"); if (query == null) query = ""; else query += "&"; query += Encode.encodeQueryParam(name) + "=" + Encode.encodeQueryParam(value.toString()); } return this; } public KeycloakUriBuilder replaceQueryParam(String name, Object... values) throws IllegalArgumentException { if (name == null) throw new IllegalArgumentException("name parameter is null"); if (query == null || query.equals("")) { if (values != null) return queryParam(name, values); return this; } String[] params = query.split("&"); query = null; String replacedName = Encode.encodeQueryParam(name); for (String param : params) { int pos = param.indexOf('='); if (pos >= 0) { String paramName = param.substring(0, pos); if (paramName.equals(replacedName)) continue; } else { if (param.equals(replacedName)) continue; } if (query == null) query = ""; else query += "&"; query += param; } // don't set values if values is null if (values == null) return this; return queryParam(name, values); } public String getHost() { return host; } public String getScheme() { return scheme; } public int getPort() { return port; } public String getUserInfo() { return userInfo; } public String getPath() { return path; } public String getQuery() { return query; } public String getFragment() { return fragment; } public KeycloakUriBuilder segment(String... segments) throws IllegalArgumentException { if (segments == null) throw new IllegalArgumentException("segments parameter was null"); for (String segment : segments) { if (segment == null) throw new IllegalArgumentException("A segment is null"); path(Encode.encodePathSegment(segment)); } return this; } public KeycloakUriBuilder replacePath(String path) { if (path == null) { this.path = null; return this; } this.path = Encode.encodePath(path); return this; } public URI build(Object[] values, boolean encodeSlashInPath) throws IllegalArgumentException { if (values == null) throw new IllegalArgumentException("values param is null"); return buildFromValues(encodeSlashInPath, false, values); } public String toTemplate() { return buildString(new HashMap<String, Object>(), true, true, true); } public KeycloakUriBuilder resolveTemplate(String name, Object value) throws IllegalArgumentException { if (name == null) throw new IllegalArgumentException("name param is null"); if (value == null) throw new IllegalArgumentException("value param is null"); HashMap<String, Object> vals = new HashMap<String, Object>(); vals.put(name, value); return resolveTemplates(vals); } public KeycloakUriBuilder resolveTemplates(Map<String, Object> templateValues) throws IllegalArgumentException { if (templateValues == null) throw new IllegalArgumentException("templateValues param null"); String str = buildString(templateValues, false, true, true); return fromTemplate(str); } public KeycloakUriBuilder resolveTemplate(String name, Object value, boolean encodeSlashInPath) throws IllegalArgumentException { if (name == null) throw new IllegalArgumentException("name param is null"); if (value == null) throw new IllegalArgumentException("value param is null"); HashMap<String, Object> vals = new HashMap<String, Object>(); vals.put(name, value); String str = buildString(vals, false, true, encodeSlashInPath); return fromTemplate(str); } public KeycloakUriBuilder resolveTemplates(Map<String, Object> templateValues, boolean encodeSlashInPath) throws IllegalArgumentException { if (templateValues == null) throw new IllegalArgumentException("templateValues param null"); String str = buildString(templateValues, false, true, encodeSlashInPath); return fromTemplate(str); } public KeycloakUriBuilder resolveTemplatesFromEncoded(Map<String, Object> templateValues) throws IllegalArgumentException { if (templateValues == null) throw new IllegalArgumentException("templateValues param null"); String str = buildString(templateValues, true, true, true); return fromTemplate(str); } }
apache-2.0
GaneshSPatil/gocd
server/src/test-integration/java/com/thoughtworks/go/server/dao/JobInstanceSqlMapDaoIntegrationTest.java
54290
/* * Copyright 2021 ThoughtWorks, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.thoughtworks.go.server.dao; import ch.qos.logback.classic.Level; import com.thoughtworks.go.config.Agents; import com.thoughtworks.go.config.CaseInsensitiveString; import com.thoughtworks.go.config.GoConfigDao; import com.thoughtworks.go.config.PipelineConfig; import com.thoughtworks.go.config.elastic.ClusterProfile; import com.thoughtworks.go.config.elastic.ElasticProfile; import com.thoughtworks.go.domain.*; import com.thoughtworks.go.domain.config.ConfigurationKey; import com.thoughtworks.go.domain.config.ConfigurationProperty; import com.thoughtworks.go.domain.config.ConfigurationValue; import com.thoughtworks.go.helper.BuildPlanMother; import com.thoughtworks.go.helper.JobInstanceMother; import com.thoughtworks.go.helper.PipelineMother; import com.thoughtworks.go.server.cache.GoCache; import com.thoughtworks.go.server.service.InstanceFactory; import com.thoughtworks.go.server.service.JobInstanceService; import com.thoughtworks.go.server.service.ScheduleService; import com.thoughtworks.go.server.transaction.SqlMapClientTemplate; import com.thoughtworks.go.server.transaction.TransactionTemplate; import com.thoughtworks.go.server.ui.SortOrder; import com.thoughtworks.go.util.GoConfigFileHelper; import com.thoughtworks.go.util.LogFixture; import com.thoughtworks.go.util.TimeProvider; import com.thoughtworks.go.util.command.EnvironmentVariableContext; import org.assertj.core.api.Assertions; import org.hamcrest.Matchers; import org.joda.time.DateTime; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.dao.DataRetrievalFailureException; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; import java.sql.SQLException; import java.util.*; import static com.thoughtworks.go.helper.JobInstanceMother.*; import static com.thoughtworks.go.helper.ModificationsMother.modifySomeFiles; import static com.thoughtworks.go.server.dao.PersistentObjectMatchers.hasSameId; import static com.thoughtworks.go.util.DataStructureUtils.a; import static com.thoughtworks.go.util.GoConstants.DEFAULT_APPROVED_BY; import static com.thoughtworks.go.util.LogFixture.logFixtureFor; import static java.util.stream.Collectors.toList; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.*; @ExtendWith(SpringExtension.class) @ContextConfiguration(locations = { "classpath:/applicationContext-global.xml", "classpath:/applicationContext-dataLocalAccess.xml", "classpath:/testPropertyConfigurer.xml", "classpath:/spring-all-servlet.xml", }) public class JobInstanceSqlMapDaoIntegrationTest { @Autowired private JobInstanceSqlMapDao jobInstanceDao; @Autowired private JobInstanceService jobInstanceService; @Autowired private EnvironmentVariableDao environmentVariableDao; @Autowired private PipelineDao pipelineDao; @Autowired private DatabaseAccessHelper dbHelper; @Autowired private StageDao stageDao; @Autowired private GoCache goCache; @Autowired private TransactionTemplate transactionTemplate; @Autowired private ScheduleService scheduleService; @Autowired private GoConfigDao goConfigDao; @Autowired private InstanceFactory instanceFactory; private GoConfigFileHelper configHelper = new GoConfigFileHelper(); private long stageId; private static final String JOB_NAME = "functional"; private static final String JOB_NAME_IN_DIFFERENT_CASE = "FUnctiONAl"; private final String projectOne = "project1"; private static final String STAGE_NAME = "mingle"; private static final String PIPELINE_NAME = "pipeline"; private PipelineConfig pipelineConfig; private Pipeline savedPipeline; private Stage savedStage; private static final Date MOST_RECENT_DATE = new DateTime().plusMinutes(20).toDate(); private int counter; private static final String OTHER_JOB_NAME = "unit"; private DefaultSchedulingContext schedulingContext; private SqlMapClientTemplate actualSqlClientTemplate; @BeforeEach public void setUp() throws Exception { dbHelper.onSetUp(); goCache.clear(); pipelineConfig = PipelineMother.withSingleStageWithMaterials(PIPELINE_NAME, STAGE_NAME, BuildPlanMother.withBuildPlans(JOB_NAME, OTHER_JOB_NAME)); schedulingContext = new DefaultSchedulingContext(DEFAULT_APPROVED_BY); savedPipeline = instanceFactory.createPipelineInstance(pipelineConfig, modifySomeFiles(pipelineConfig), schedulingContext, "md5-test", new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(savedPipeline); actualSqlClientTemplate = jobInstanceDao.getSqlMapClientTemplate(); savedStage = savedPipeline.getFirstStage(); stageId = savedStage.getId(); counter = savedPipeline.getFirstStage().getCounter(); JobInstance job = savedPipeline.getStages().first().getJobInstances().first(); job.setIgnored(true); goCache.clear(); configHelper.usingCruiseConfigDao(goConfigDao); configHelper.onSetUp(); } @AfterEach public void teardown() throws Exception { jobInstanceDao.setSqlMapClientTemplate(actualSqlClientTemplate); dbHelper.onTearDown(); configHelper.onTearDown(); } @Test public void shouldSaveAndRetrieveIncompleteBuild() { JobInstance expected = scheduled(JOB_NAME, new Date()); expected = jobInstanceDao.save(stageId, expected); assertThat(expected.getId(), is(not(0L))); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(expected.getId()); assertThat(actual, hasSameId(expected)); } @Test public void shouldGetBuildIngBuildAsMostRecentBuildByPipelineLabelAndStageCounter() { JobInstance expected = JobInstanceMother.building(JOB_NAME); expected.setScheduledDate(MOST_RECENT_DATE); expected = jobInstanceDao.save(stageId, expected); JobInstance actual = jobInstanceDao.mostRecentJobWithTransitions( new JobIdentifier(PIPELINE_NAME, savedPipeline.getCounter(), savedPipeline.getLabel(), STAGE_NAME, String.valueOf(counter), JOB_NAME)); assertThat(actual.getId(), is(expected.getId())); assertThat("JobInstance should match", actual.getId(), is(expected.getId())); assertThat(actual.getTransitions(), is(expected.getTransitions())); } @Test public void shouldGetCompletedBuildAsMostRecentBuildByPipelineLabelAndStageCounter() { JobInstance expected = JobInstanceMother.completed(JOB_NAME, JobResult.Unknown); expected.setScheduledDate(MOST_RECENT_DATE); expected = jobInstanceDao.save(stageId, expected); JobInstance actual = jobInstanceDao.mostRecentJobWithTransitions( new JobIdentifier(PIPELINE_NAME, savedPipeline.getCounter(), savedPipeline.getLabel(), STAGE_NAME, String.valueOf(counter), JOB_NAME)); assertThat("JobInstance should match", actual.getId(), is(expected.getId())); } @Test public void shouldGetAllRunningJobs() { //setup schedules 2 jobs, keeping it as is. List<JobInstance> runningJobs = jobInstanceDao.getRunningJobs(); assertThat(runningJobs.size(), is(2)); List<String> jobNames = runningJobs.stream().map(JobInstance::getName).collect(toList()); assertThat(jobNames, containsInAnyOrder(JOB_NAME, OTHER_JOB_NAME)); } @Test public void shouldNotIncludeCompletedJobsAsPartOfRunningJobs() { JobInstance completed = JobInstanceMother.completed("Completed_Job", JobResult.Unknown); completed.setScheduledDate(MOST_RECENT_DATE); completed = jobInstanceDao.save(stageId, completed); List<JobInstance> runningJobs = jobInstanceDao.getRunningJobs(); List<String> jobNames = runningJobs.stream().map(JobInstance::getName).collect(toList()); assertThat(runningJobs.size(), is(2)); assertThat(jobNames, containsInAnyOrder(JOB_NAME, OTHER_JOB_NAME)); assertThat(runningJobs.contains(completed), is(false)); } @Test public void shouldFindJobByPipelineCounterWhenTwoPipelinesWithSameLabel() { pipelineConfig.setLabelTemplate("fixed-label"); Pipeline oldPipeline = createNewPipeline(pipelineConfig); Pipeline newPipeline = createNewPipeline(pipelineConfig); JobInstance expected = oldPipeline.getFirstStage().getJobInstances().first(); JobInstance actual = jobInstanceDao.mostRecentJobWithTransitions( new JobIdentifier(oldPipeline, oldPipeline.getFirstStage(), expected)); assertThat(actual.getId(), is(expected.getId())); } private Pipeline createNewPipeline(PipelineConfig pipelineConfig) { Pipeline pipeline = instanceFactory.createPipelineInstance(pipelineConfig, modifySomeFiles(pipelineConfig), new DefaultSchedulingContext( DEFAULT_APPROVED_BY), "md5-test", new TimeProvider()); dbHelper.savePipelineWithStagesAndMaterials(pipeline); return pipeline; } @Test public void shouldFindJobIdByPipelineCounter() { long actual = jobInstanceDao.findOriginalJobIdentifier(new StageIdentifier(savedPipeline, savedStage), JOB_NAME).getBuildId(); assertThat(actual, is(savedStage.getJobInstances().getByName(JOB_NAME).getId())); } @Test public void shouldInvalidateSessionAndFetchJobIdentifier_WhenNewJobIsInserted() { configHelper.addPipeline(pipelineConfig); configHelper.turnOffSecurity(); StageIdentifier stageIdentifier = new StageIdentifier(savedPipeline.getName(), savedPipeline.getCounter(), savedPipeline.getLabel(), savedStage.getName(), Integer.toString(savedStage.getCounter() + 1)); assertThat(jobInstanceDao.findOriginalJobIdentifier(stageIdentifier, JOB_NAME), is(nullValue())); dbHelper.passStage(savedStage); Stage stage = scheduleService.rerunStage(savedPipeline.getName(), savedPipeline.getCounter(), savedStage.getName()); JobIdentifier actual = jobInstanceDao.findOriginalJobIdentifier(stageIdentifier, JOB_NAME); assertThat(actual, is(notNullValue())); assertThat(actual, is(new JobIdentifier(stageIdentifier, JOB_NAME, stage.getFirstJob().getId()))); } @Test public void shouldFindJobIdByPipelineLabel() { long actual = jobInstanceDao.findOriginalJobIdentifier(new StageIdentifier(PIPELINE_NAME, savedPipeline.getCounter(), null, STAGE_NAME, String.valueOf(counter)), JOB_NAME).getBuildId(); assertThat(actual, is(savedStage.getJobInstances().getByName(JOB_NAME).getId())); } @Test public void findByJobIdShouldBeJobNameCaseAgnostic() { long actual = jobInstanceDao.findOriginalJobIdentifier(new StageIdentifier(PIPELINE_NAME, savedPipeline.getCounter(), null, STAGE_NAME, String.valueOf(counter)), JOB_NAME_IN_DIFFERENT_CASE).getBuildId(); assertThat(actual, is(savedStage.getJobInstances().getByName(JOB_NAME).getId())); } @Test public void findByJobIdShouldLoadOriginalJobWhenCopiedForJobRerun() { Stage firstOldStage = savedPipeline.getStages().get(0); Stage newStage = instanceFactory.createStageForRerunOfJobs(firstOldStage, a(JOB_NAME), new DefaultSchedulingContext("loser", new Agents()), pipelineConfig.get(0), new TimeProvider(), "md5"); stageDao.saveWithJobs(savedPipeline, newStage); dbHelper.passStage(newStage); JobIdentifier oldJobIdentifierThroughOldJob = jobInstanceDao.findOriginalJobIdentifier(new StageIdentifier(PIPELINE_NAME, savedPipeline.getCounter(), null, STAGE_NAME, String.valueOf(counter)), OTHER_JOB_NAME); JobIdentifier oldJobIdentifierThroughCopiedNewJob = jobInstanceDao.findOriginalJobIdentifier(new StageIdentifier(PIPELINE_NAME, savedPipeline.getCounter(), null, STAGE_NAME, String.valueOf(newStage.getCounter())), OTHER_JOB_NAME); JobIdentifier newJobIdentifierThroughRerunJob = jobInstanceDao.findOriginalJobIdentifier(new StageIdentifier(PIPELINE_NAME, savedPipeline.getCounter(), null, STAGE_NAME, String.valueOf(newStage.getCounter())), JOB_NAME); assertThat(oldJobIdentifierThroughOldJob, is(firstOldStage.getJobInstances().getByName(OTHER_JOB_NAME).getIdentifier())); assertThat(oldJobIdentifierThroughCopiedNewJob, is(firstOldStage.getJobInstances().getByName(OTHER_JOB_NAME).getIdentifier())); assertThat(newJobIdentifierThroughRerunJob, is(newStage.getJobInstances().getByName(JOB_NAME).getIdentifier())); } @Test public void findJobIdShouldExcludeIgnoredJob() { JobInstance oldJob = savedStage.getJobInstances().getByName(JOB_NAME); jobInstanceDao.ignore(oldJob); JobInstance expected = JobInstanceMother.scheduled(JOB_NAME); expected = jobInstanceDao.save(stageId, expected); long actual = jobInstanceDao.findOriginalJobIdentifier(new StageIdentifier(savedPipeline, savedStage), JOB_NAME).getBuildId(); assertThat(actual, is(expected.getId())); } @Test public void shouldFindAllInstancesOfJobsThatAreRunOnAllAgents() { List<JobInstance> before = stageDao.mostRecentJobsForStage(PIPELINE_NAME, STAGE_NAME); String uuid1 = UUID.randomUUID().toString(); String uuid2 = UUID.randomUUID().toString(); JobInstance instance1 = savedJobForAgent(JOB_NAME + "-" + uuid1, uuid1, true, false); JobInstance instance2 = savedJobForAgent(JOB_NAME + "-" + uuid2, uuid2, true, false); List<JobInstance> after = stageDao.mostRecentJobsForStage(PIPELINE_NAME, STAGE_NAME); after.removeAll(before); assertThat(after.toArray(), hasItemInArray(hasProperty("name", is(instance1.getName())))); assertThat(after.toArray(), hasItemInArray(hasProperty("name", is(instance2.getName())))); assertThat("Expected 2 but got " + after, after.size(), is(2)); } @Test public void shouldFindAllInstancesOfJobsThatAreRunMultipleInstance() { List<JobInstance> before = stageDao.mostRecentJobsForStage(PIPELINE_NAME, STAGE_NAME); JobInstance instance1 = savedJobForAgent(RunMultipleInstance.CounterBasedJobNameGenerator.appendMarker("job", 1), null, false, true); JobInstance instance2 = savedJobForAgent(RunMultipleInstance.CounterBasedJobNameGenerator.appendMarker("job", 2), null, false, true); List<JobInstance> after = stageDao.mostRecentJobsForStage(PIPELINE_NAME, STAGE_NAME); after.removeAll(before); assertThat("Expected 2 but got " + after, after.size(), is(2)); assertThat(after.toArray(), hasItemInArray(hasProperty("name", is(instance1.getName())))); assertThat(after.toArray(), hasItemInArray(hasProperty("name", is(instance2.getName())))); } @Test public void shouldLoadOldestBuild() { JobStateTransition jobStateTransition = jobInstanceDao.oldestBuild(); assertThat(jobStateTransition.getId(), is(stageDao.stageById(stageId).getJobInstances().first().getTransitions().first().getId())); } private JobInstance savedJobForAgent(final String jobName, final String uuid, final boolean runOnAllAgents, final boolean runMultipleInstance) { return (JobInstance) transactionTemplate.execute(new TransactionCallback() { @Override public Object doInTransaction(TransactionStatus status) { JobInstance jobInstance = scheduled(jobName, new DateTime().plusMinutes(1).toDate()); jobInstance.setRunOnAllAgents(runOnAllAgents); jobInstance.setRunMultipleInstance(runMultipleInstance); jobInstanceService.save(savedStage.getIdentifier(), stageId, jobInstance); jobInstance.changeState(JobState.Building); jobInstance.setAgentUuid(uuid); jobInstanceDao.updateStateAndResult(jobInstance); return jobInstance; } }); } @Test public void shouldThrowWhenNoBuildInstanceFound() { assertThatThrownBy(() -> jobInstanceDao.buildByIdWithTransitions(999)) .isInstanceOf(DataRetrievalFailureException.class); } @Test public void shouldReturnBuildInstanceIfItExists() { JobInstance jobInstance = JobInstanceMother.completed("Baboon", JobResult.Passed); JobInstance instance = jobInstanceDao.save(stageId, jobInstance); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(instance.getId()); assertThat(actual.isNull(), is(false)); } @Test public void shouldLogStatusUpdatesOfCompletedJobs() { try (LogFixture logFixture = logFixtureFor(JobInstanceSqlMapDao.class, Level.DEBUG)) { JobInstance instance = runningJob("1"); completeJobs(instance); instance.schedule(); jobInstanceDao.updateStateAndResult(instance); assertThat(logFixture.getLog(), logFixture.contains(Level.WARN, "State change for a completed Job is not allowed."), is(true)); } } private JobInstance[] completeJobs(JobInstance... instances) { for (JobInstance instance : instances) { complete(instance); } return instances; } private JobInstance runningJob(final String name) { JobInstance jobInstance = JobInstanceMother.buildingInstance("pipeline", "stage", name, "1"); jobInstanceDao.save(stageId, jobInstance); return jobInstance; } private void complete(JobInstance jobInstance) { jobInstance.completing(JobResult.Passed); jobInstance.completed(new Date()); jobInstanceDao.updateStateAndResult(jobInstance); } @Test public void shouldUpdateBuildResult() { JobInstance jobInstance = JobInstanceMother.scheduled("Baboon"); jobInstanceDao.save(stageId, jobInstance); jobInstance.cancel(); jobInstanceDao.updateStateAndResult(jobInstance); JobInstance instance = jobInstanceDao.buildByIdWithTransitions(jobInstance.getId()); assertThat(instance.getResult(), is(JobResult.Cancelled)); jobInstance.fail(); jobInstanceDao.updateStateAndResult(jobInstance); instance = jobInstanceDao.buildByIdWithTransitions(jobInstance.getId()); assertThat(instance.getResult(), is(JobResult.Failed)); } @Test public void shouldDeleteJobPlanAssociatedEntities() { JobInstance jobInstance = JobInstanceMother.building("Baboon"); JobPlan jobPlan = JobInstanceMother.jobPlanWithAssociatedEntities(jobInstance.getName(), jobInstance.getId(), artifactPlans()); jobInstance.setPlan(jobPlan); jobInstanceDao.save(stageId, jobInstance); JobPlan jobPlanFromDb = jobInstanceDao.loadPlan(jobInstance.getId()); assertThat(jobPlanFromDb.getArtifactPlans(), is(jobPlan.getArtifactPlans())); assertThat(jobPlanFromDb.getResources(), is(jobPlan.getResources())); assertThat(jobPlanFromDb.getVariables(), is(jobPlan.getVariables())); assertThat(jobPlanFromDb.getElasticProfile(), is(jobPlan.getElasticProfile())); jobInstanceDao.deleteJobPlanAssociatedEntities(jobInstance); jobPlanFromDb = jobInstanceDao.loadPlan(jobInstance.getId()); assertThat(jobPlanFromDb.getArtifactPlans().size(), is(2)); assertThat(jobPlanFromDb.getArtifactPlans(), Matchers.containsInAnyOrder( new ArtifactPlan(ArtifactPlanType.unit, "unit", "unit"), new ArtifactPlan(ArtifactPlanType.unit, "integration", "integration") )); assertThat(jobPlanFromDb.getResources().size(), is(0)); assertThat(jobPlanFromDb.getVariables().size(), is(0)); assertThat(jobPlanFromDb.getElasticProfile(), is(nullValue())); } @Test public void shouldDeleteVariablesAttachedToJobAfterTheJobReschedules() { JobInstance jobInstance = JobInstanceMother.building("Baboon"); JobPlan jobPlan = JobInstanceMother.jobPlanWithAssociatedEntities(jobInstance.getName(), jobInstance.getId(), artifactPlans()); jobInstance.setPlan(jobPlan); jobInstanceDao.save(stageId, jobInstance); JobPlan jobPlanFromDb = jobInstanceDao.loadPlan(jobInstance.getId()); assertThat(jobPlanFromDb.getArtifactPlans().size(), is(4)); assertThat(jobPlanFromDb.getResources(), is(jobPlan.getResources())); assertThat(jobPlanFromDb.getVariables(), is(jobPlan.getVariables())); assertThat(jobPlanFromDb.getElasticProfile(), is(jobPlan.getElasticProfile())); jobInstance.setState(JobState.Rescheduled); jobInstanceDao.ignore(jobInstance); jobPlanFromDb = jobInstanceDao.loadPlan(jobInstance.getId()); assertThat(jobPlanFromDb.getArtifactPlans().size(), is(2)); assertThat(jobPlanFromDb.getArtifactPlans(), Matchers.containsInAnyOrder( new ArtifactPlan(ArtifactPlanType.unit, "unit", "unit"), new ArtifactPlan(ArtifactPlanType.unit, "integration", "integration") )); assertThat(jobPlanFromDb.getResources().size(), is(0)); assertThat(jobPlanFromDb.getVariables().size(), is(0)); assertThat(jobPlanFromDb.getElasticProfile(), is(nullValue())); } @Test public void shouldThrowIfItNotExists() { assertThatThrownBy(() -> jobInstanceDao.buildByIdWithTransitions(1)) .isInstanceOf(DataRetrievalFailureException.class); } @Test public void shouldNotGetJobsFromBeforeTheJobNameIsChanged() throws Exception { String oldName = "oldName"; createSomeJobs(oldName, 15); String newName = "newName"; createSomeJobs(newName, 10); JobInstances myinstances = jobInstanceDao.latestCompletedJobs(PIPELINE_NAME, STAGE_NAME, newName, 25); assertThat(myinstances.size(), is(10)); assertThat(myinstances.get(0).getName(), is(not(oldName))); assertThat(myinstances.get(0).getName(), is(newName)); } private long createSomeJobs(String jobName, int count) { long stageId = 0; for (int i = 0; i < count; i++) { Pipeline newPipeline = createNewPipeline(pipelineConfig); stageId = newPipeline.getFirstStage().getId(); JobInstance job = JobInstanceMother.completed(jobName, JobResult.Passed); jobInstanceDao.save(stageId, job); } return stageId; } private void createCopiedJobs(long stageId, String jobName, int count) { for (int i = 0; i < count; i++) { JobInstance job = JobInstanceMother.completed(jobName, JobResult.Failed); job.setOriginalJobId(1L); jobInstanceDao.save(stageId, job); } } @Test public void shouldGetMostRecentCompletedBuildsWhenTotalBuildsIsLessThan25() throws Exception { JobInstance jobInstance = JobInstanceMother.completed("shouldnotload", JobResult.Passed); jobInstanceDao.save(stageId, jobInstance); createSomeJobs(JOB_NAME, 3); JobInstances instances = jobInstanceDao.latestCompletedJobs(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 25); assertThat(instances.size(), is(3)); } @Test public void shouldLoadStageCounter() throws Exception { JobInstance jobInstance = JobInstanceMother.completed("shouldnotload", JobResult.Passed); jobInstanceDao.save(stageId, jobInstance); createSomeJobs(JOB_NAME, 3); JobInstances instances = jobInstanceDao.latestCompletedJobs(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 25); for (JobInstance instance : instances) { Pipeline pipeline = pipelineDao.pipelineWithMaterialsAndModsByBuildId(instance.getId()); String locator = pipeline.getName() + "/" + pipeline.getLabel() + "/" + savedStage.getName() + "/1/" + JOB_NAME; assertThat(instance.getIdentifier().buildLocator(), is(locator)); } } @Test public void shouldGet25Builds() throws Exception { createSomeJobs(JOB_NAME, 30); JobInstances instances = jobInstanceDao.latestCompletedJobs(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 25); assertThat(instances.size(), is(25)); for (JobInstance instance : instances) { assertThat(instance.getIdentifier().getBuildName(), is(JOB_NAME)); } } @Test public void shouldGet25Builds_AlthoughFirst5AreCopied() throws Exception { long stageId = createSomeJobs(JOB_NAME, 30); createCopiedJobs(stageId, JOB_NAME, 5); JobInstances instances = jobInstanceDao.latestCompletedJobs(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 25); assertThat(instances.size(), is(25)); for (JobInstance instance : instances) { assertThat(instance.getIdentifier().getBuildName(), is(JOB_NAME)); assertThat("Should not have retrieved copied-over jobs", instance.isCopy(), is(false)); } } @Test public void shouldGetMostRecentCompletedBuildsWhenTwoStagesWithIdenticalStageNamesAndBuildPlanNames() { Pipeline otherPipeline = PipelineMother.passedPipelineInstance(PIPELINE_NAME + "2", STAGE_NAME, JOB_NAME); dbHelper.savePipelineWithStagesAndMaterials(otherPipeline); for (int i = 0; i < 2; i++) { Pipeline completedPipeline = PipelineMother.passedPipelineInstance(PIPELINE_NAME, STAGE_NAME, JOB_NAME); dbHelper.savePipelineWithStagesAndMaterials(completedPipeline); } assertThat(jobInstanceDao.latestCompletedJobs(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 25).size(), is(2)); assertThat(jobInstanceDao.latestCompletedJobs(otherPipeline.getName(), STAGE_NAME, JOB_NAME, 25).size(), is(1)); } @Test public void shouldIgnoreBuildingBuilds() { JobInstance instance = JobInstanceMother.completed("shouldnotload", JobResult.Passed); jobInstanceDao.save(stageId, instance); JobInstance building = JobInstanceMother.building(JOB_NAME); JobInstance saved = jobInstanceDao.save(stageId, building); JobInstances instances = jobInstanceDao.latestCompletedJobs(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 25); assertThat(instances.size(), is(0)); } @Test public void shouldCorrectly_getJobHistoryCount_findJobHistoryPage() throws Exception { // has a scheduled job long stageId = createSomeJobs(JOB_NAME, 2); // create 4 instances completed, scheduled, completed, scheduled createCopiedJobs(stageId, JOB_NAME, 2); JobInstance shouldNotLoadInstance = JobInstanceMother.completed("shouldnotload", JobResult.Passed); // create job with a different name jobInstanceDao.save(stageId, shouldNotLoadInstance); JobInstance building = JobInstanceMother.building(JOB_NAME); // create a building job JobInstance saved = jobInstanceDao.save(stageId, building); int jobHistoryCount = jobInstanceDao.getJobHistoryCount(PIPELINE_NAME, STAGE_NAME, JOB_NAME); assertThat(jobHistoryCount, is(6)); JobInstances instances = jobInstanceDao.findJobHistoryPage(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 4, 0); assertThat(instances.size(), is(4)); assertThat(instances.get(0).getState(), is(JobState.Building)); assertThat(instances.get(1).getState(), is(JobState.Completed)); assertThat(instances.get(2).getState(), is(JobState.Scheduled)); assertThat(instances.get(3).getState(), is(JobState.Completed)); assertJobHistoryCorrectness(instances, JOB_NAME); instances = jobInstanceDao.findJobHistoryPage(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 4, 4); assertThat(instances.size(), is(2)); assertThat(instances.get(0).getState(), is(JobState.Scheduled)); assertThat(instances.get(1).getState(), is(JobState.Scheduled)); assertJobHistoryCorrectness(instances, JOB_NAME); } private void assertJobHistoryCorrectness(JobInstances instances, String jobName) { for (JobInstance instance : instances) { assertThat(instance.getIdentifier().getBuildName(), is(jobName)); assertThat("Should not have retrieved copied-over jobs", instance.isCopy(), is(false)); } } @Test public void shouldLoadRerunOfCounterValueForScheduledBuilds() { List<JobPlan> jobPlans = jobInstanceDao.orderedScheduledBuilds(); assertThat(jobPlans.size(), is(2)); assertThat(jobPlans.get(0).getIdentifier().getRerunOfCounter(), is(nullValue())); assertThat(jobPlans.get(1).getIdentifier().getRerunOfCounter(), is(nullValue())); dbHelper.passStage(savedStage); Stage stage = instanceFactory.createStageForRerunOfJobs(savedStage, a(JOB_NAME), schedulingContext, pipelineConfig.getStage(new CaseInsensitiveString(STAGE_NAME)), new TimeProvider(), "md5"); dbHelper.saveStage(savedPipeline, stage, stage.getOrderId() + 1); jobPlans = jobInstanceDao.orderedScheduledBuilds(); assertThat(jobPlans.size(), is(1)); assertThat(jobPlans.get(0).getIdentifier().getRerunOfCounter(), is(savedStage.getCounter())); } @Test public void shouldGetAllScheduledBuildsInOrder() { // in setup, we created 2 scheduled builds assertThat(jobInstanceDao.orderedScheduledBuilds().size(), is(2)); JobIdentifier jobIdentifier = new JobIdentifier(PIPELINE_NAME, "LABEL-1", STAGE_NAME, "1", JOB_NAME); long newestId = schedule(JOB_NAME, stageId, new Date(10001), jobIdentifier); long olderId = schedule(JOB_NAME, stageId, new Date(10000), jobIdentifier); long oldestId = schedule(JOB_NAME, stageId, new Date(999), jobIdentifier); List<JobPlan> jobPlans = jobInstanceDao.orderedScheduledBuilds(); assertThat(jobPlans.size(), is(5)); assertJobInstance(jobPlans.get(0), oldestId, PIPELINE_NAME, STAGE_NAME); assertJobInstance(jobPlans.get(1), olderId, PIPELINE_NAME, STAGE_NAME); assertJobInstance(jobPlans.get(2), newestId, PIPELINE_NAME, STAGE_NAME); } private long schedule(String jobName, long stageId, Date date, JobIdentifier jobIdentifier) { JobInstance newest = new JobInstance(jobName); newest.setScheduledDate(date); jobInstanceDao.save(stageId, newest); jobInstanceDao.save(newest.getId(), new DefaultJobPlan(new Resources(), new ArrayList<>(), -1, jobIdentifier, null, new EnvironmentVariables(), new EnvironmentVariables(), null, null)); return newest.getId(); } private void assertJobInstance(JobPlan actual, long expect, String pipelineName, String stageName) { assertThat(actual.getPipelineName(), is(pipelineName)); assertThat(actual.getStageName(), is(stageName)); assertThat("JobInstance should match", actual.getJobId(), is(expect)); } @Test public void shouldUpdateStateTransitions() { JobInstance expected = scheduled(JOB_NAME, new Date(1000)); jobInstanceDao.save(stageId, expected); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(expected.getId()); assertThat(actual.getTransitions(), Matchers.iterableWithSize(1)); expected.changeState(JobState.Assigned); jobInstanceDao.updateStateAndResult(expected); actual = jobInstanceDao.buildByIdWithTransitions(expected.getId()); assertThat(actual.getTransitions(), Matchers.iterableWithSize(2)); for (JobStateTransition transition : actual.getTransitions()) { assertThat(transition.getStageId(), is(stageId)); } } @Test public void shouldUpdateBuildStatus() { JobInstance expected = scheduled(JOB_NAME); jobInstanceDao.save(stageId, expected); expected.changeState(JobState.Building); jobInstanceDao.updateStateAndResult(expected); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(expected.getId()); assertThat(actual.getState(), is(JobState.Building)); assertThat(actual.getTransitions().size(), is(2)); } @Test public void shouldUpdateAssignedInfo() { JobInstance expected = scheduled(JOB_NAME); jobInstanceDao.save(stageId, expected); expected.changeState(JobState.Building); expected.setAgentUuid("uuid"); jobInstanceDao.updateAssignedInfo(expected); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(expected.getId()); assertThat(actual.getState(), is(JobState.Building)); assertThat(actual.getTransitions().size(), is(2)); assertThat(actual.getAgentUuid(), is("uuid")); } @Test public void shouldUpdateCompletingInfo() { JobInstance expected = scheduled(JOB_NAME); jobInstanceDao.save(stageId, expected); expected.changeState(JobState.Completing); expected.setResult(JobResult.Failed); jobInstanceDao.updateStateAndResult(expected); JobInstance actual = jobInstanceDao.buildByIdWithTransitions(expected.getId()); assertThat(actual.getState(), is(JobState.Completing)); assertThat(actual.getTransitions().size(), is(2)); assertThat(actual.getResult(), is(JobResult.Failed)); } @Test public void shouldSaveTransitionsCorrectly() { JobInstance jobInstance = scheduled(projectOne, new Date(1)); jobInstance.completing(JobResult.Failed, new Date(3)); jobInstanceDao.save(stageId, jobInstance); JobInstance loaded = jobInstanceDao.buildByIdWithTransitions(jobInstance.getId()); JobStateTransitions actualTransitions = loaded.getTransitions(); assertThat(actualTransitions, Matchers.iterableWithSize(2)); assertThat(actualTransitions.first().getCurrentState(), is(JobState.Scheduled)); } @Test public void shouldSaveResources() { JobInstance instance = jobInstanceDao.save(stageId, new JobInstance(JOB_NAME)); instance.setIdentifier(new JobIdentifier(savedPipeline, savedStage, instance)); JobPlan plan = new DefaultJobPlan(new Resources("something"), new ArrayList<>(), instance.getId(), instance.getIdentifier(), null, new EnvironmentVariables(), new EnvironmentVariables(), null, null); jobInstanceDao.save(instance.getId(), plan); JobPlan retrieved = jobInstanceDao.loadPlan(plan.getJobId()); assertThat(retrieved, is(plan)); } @Test public void shouldSaveJobAgentMetadata_WhenClusterProfileIsAssociatedWithElasticAgentProfile() { JobInstance instance = jobInstanceDao.save(stageId, new JobInstance(JOB_NAME)); instance.setIdentifier(new JobIdentifier(savedPipeline, savedStage, instance)); ElasticProfile elasticProfile = new ElasticProfile("foo", "clusterId", Arrays.asList(new ConfigurationProperty(new ConfigurationKey("key"), new ConfigurationValue("value")))); ClusterProfile clusterProfile = new ClusterProfile("clusterId", "cd.go.elastic-agent:docker", Arrays.asList(new ConfigurationProperty(new ConfigurationKey("key"), new ConfigurationValue("value")))); JobPlan plan = new DefaultJobPlan(new Resources("something"), new ArrayList<>(), instance.getId(), instance.getIdentifier(), null, new EnvironmentVariables(), new EnvironmentVariables(), elasticProfile, clusterProfile); jobInstanceDao.save(instance.getId(), plan); JobPlan retrieved = jobInstanceDao.loadPlan(plan.getJobId()); assertThat(retrieved.getElasticProfile(), is(elasticProfile)); assertThat(retrieved.getClusterProfile(), is(clusterProfile)); } @Test public void shouldNotThrowUpWhenJobAgentMetadataIsNull() { JobInstance instance = jobInstanceDao.save(stageId, new JobInstance(JOB_NAME)); instance.setIdentifier(new JobIdentifier(savedPipeline, savedStage, instance)); ElasticProfile elasticProfile = null; JobPlan plan = new DefaultJobPlan(new Resources("something"), new ArrayList<>(), instance.getId(), instance.getIdentifier(), null, new EnvironmentVariables(), new EnvironmentVariables(), elasticProfile, null); jobInstanceDao.save(instance.getId(), plan); JobPlan retrieved = jobInstanceDao.loadPlan(plan.getJobId()); assertThat(retrieved.getElasticProfile(), is(elasticProfile)); } @Test public void shouldSaveEnvironmentVariables() { JobInstance instance = jobInstanceDao.save(stageId, new JobInstance(JOB_NAME)); instance.setIdentifier(new JobIdentifier(savedPipeline, savedStage, instance)); EnvironmentVariables variables = new EnvironmentVariables(); variables.add("VARIABLE_NAME", "variable value"); variables.add("TRIGGER_VAR", "junk val"); JobPlan plan = new DefaultJobPlan(new Resources(), new ArrayList<>(), instance.getId(), instance.getIdentifier(), null, variables, new EnvironmentVariables(), null, null); jobInstanceDao.save(instance.getId(), plan); environmentVariableDao.save(savedPipeline.getId(), EnvironmentVariableType.Trigger, environmentVariables("TRIGGER_VAR", "trigger val")); JobPlan retrieved = jobInstanceDao.loadPlan(plan.getJobId()); assertThat(retrieved.getVariables(), is(plan.getVariables())); EnvironmentVariableContext context = new EnvironmentVariableContext(); retrieved.applyTo(context); assertThat(context.getProperty("VARIABLE_NAME"), is("variable value")); assertThat(context.getProperty("TRIGGER_VAR"), is("trigger val")); } private EnvironmentVariables environmentVariables(String name, String value) { return new EnvironmentVariables(Arrays.asList(new EnvironmentVariable(name, value, false))); } @Test public void shouldLoadEnvironmentVariablesForScheduledJobs() { JobInstance newInstance = new JobInstance(JOB_NAME); newInstance.schedule(); JobInstance instance = jobInstanceDao.save(stageId, newInstance); instance.setIdentifier(new JobIdentifier(savedPipeline, savedStage, instance)); EnvironmentVariables variables = new EnvironmentVariables(); variables.add("VARIABLE_NAME", "variable value"); variables.add("TRIGGER_VAR", "junk val"); JobPlan plan = new DefaultJobPlan(new Resources(), new ArrayList<>(), instance.getId(), instance.getIdentifier(), null, variables, new EnvironmentVariables(), null, null); jobInstanceDao.save(instance.getId(), plan); environmentVariableDao.save(savedPipeline.getId(), EnvironmentVariableType.Trigger, environmentVariables("TRIGGER_VAR", "trigger val")); List<JobPlan> retrieved = jobInstanceDao.orderedScheduledBuilds(); JobPlan reloadedPlan = planForJob(retrieved, plan.getJobId()); EnvironmentVariableContext context = new EnvironmentVariableContext(); reloadedPlan.applyTo(context); assertThat(reloadedPlan.getVariables(), is(plan.getVariables())); assertThat(context.getProperty("VARIABLE_NAME"), is("variable value")); assertThat(context.getProperty("TRIGGER_VAR"), is("trigger val")); } private JobPlan planForJob(List<JobPlan> retrieved, long expectedJobId) { for (JobPlan loadedJobPlan : retrieved) { if (loadedJobPlan.getJobId() == expectedJobId) { return loadedJobPlan; } } return null; } @Test public void shouldLoadArtifactsAndResourcesForAssignment() { JobInstance instance = jobInstanceDao.save(stageId, new JobInstance(projectOne)); instance.setIdentifier(new JobIdentifier(savedPipeline, savedStage, instance)); Resources resources = new Resources("one, two, three"); JobPlan savedPlan = new DefaultJobPlan(resources, artifactPlans(), instance.getId(), instance.getIdentifier(), null, new EnvironmentVariables(), new EnvironmentVariables(), null, null); jobInstanceDao.save(instance.getId(), savedPlan); final List<JobPlan> planList = jobInstanceDao.orderedScheduledBuilds(); final List<JobPlan> plans = findPlans(planList, projectOne); assertThat(plans.size(), is(1)); assertThat(plans.get(0).getResources(), is(resources)); } @Test public void shouldLoadJobIdentifierForAssignment() { JobInstance jobInstance = scheduled(projectOne); jobInstanceDao.save(stageId, jobInstance); JobPlan job = findPlan(jobInstanceDao.orderedScheduledBuilds(), projectOne); assertThat(job.getIdentifier(), is(jobIdentifier(jobInstance))); } @Test public void shouldLoadAgentUuidForAssignment() { JobInstance jobInstance = scheduled(projectOne); jobInstance.setAgentUuid("uuid1"); jobInstanceDao.save(stageId, jobInstance); JobPlan job = findPlan(jobInstanceDao.orderedScheduledBuilds(), projectOne); assertThat(job.getAgentUuid(), is("uuid1")); } @Test public void shouldLoadRunOnAllAgentsForAssignment() { JobInstance jobInstance = scheduled(projectOne); jobInstance.setRunOnAllAgents(true); jobInstanceDao.save(stageId, jobInstance); JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(jobInstance.getId()); assertThat(reloaded.isRunOnAllAgents(), is(true)); } @Test public void shouldLoadRunMultipleInstanceForAssignment() { JobInstance jobInstance = scheduled(projectOne); jobInstance.setRunMultipleInstance(true); jobInstanceDao.save(stageId, jobInstance); JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(jobInstance.getId()); assertThat(reloaded.isRunMultipleInstance(), is(true)); } private JobIdentifier jobIdentifier(JobInstance jobInstance) { return new JobIdentifier(savedPipeline, savedStage, jobInstance); } private JobPlan findPlan(List<JobPlan> list, String jobName) { final List<JobPlan> planList = findPlans(list, jobName); if (planList.size() > 0) { return planList.get(0); } return null; } private List<JobPlan> findPlans(List<JobPlan> list, String jobName) { List<JobPlan> result = new ArrayList<>(); for (JobPlan buildNameBean : list) { if (jobName.equals(buildNameBean.getName())) { result.add(buildNameBean); } } return result; } @Test public void shouldGetLatestInProgressBuildByAgentUuid() { JobInstance buildingJob = building(projectOne, new Date(1)); final String uuid = "uuid"; buildingJob.setAgentUuid(uuid); jobInstanceDao.save(stageId, buildingJob); JobInstance completedJob = JobInstanceMother.completed("anotherBuild", JobResult.Passed); completedJob.setAgentUuid(uuid); jobInstanceDao.save(stageId, completedJob); JobInstance jobInstance = jobInstanceDao.getLatestInProgressBuildByAgentUuid(uuid); assertThat(jobInstance, hasSameId(buildingJob)); assertThat(jobInstance.getIdentifier(), is(jobIdentifier(jobInstance))); } @Test public void shouldGetInProgressJobs() { JobInstance buildingJob1 = building(projectOne, new Date(1)); buildingJob1.setAgentUuid("uuid1"); jobInstanceDao.save(stageId, buildingJob1); JobInstance buildingJob2 = building("project2", new Date(2)); buildingJob2.setAgentUuid("uuid2"); jobInstanceDao.save(stageId, buildingJob2); JobInstance buildingJob3 = building("project3", new Date(3)); buildingJob3.setAgentUuid("uuid3"); jobInstanceDao.save(stageId, buildingJob3); List<String> liveAgentIds = new ArrayList<String>() { { add("uuid1"); add("uuid2"); } }; JobInstances list = jobInstanceDao.findHungJobs(liveAgentIds); assertThat(list.size(), is(1)); JobInstance reloaded = list.get(0); assertThat(reloaded, hasSameId(buildingJob3)); assertThat(reloaded.getIdentifier(), is(jobIdentifier(buildingJob3))); } @Test public void shouldIgnore() { JobInstance instance = scheduled(projectOne); jobInstanceDao.save(stageId, instance); jobInstanceDao.ignore(instance); JobInstance reloaded = jobInstanceDao.buildByIdWithTransitions(instance.getId()); assertThat(reloaded.isIgnored(), is(true)); } @Test public void shouldGetCompletedJobsOnAgentForARange() { String agentUuid = "special_uuid"; JobInstance buildingJob = building("job1", new Date(1)); buildingJob.setAgentUuid(agentUuid); jobInstanceDao.save(stageId, buildingJob); JobInstance completedJob = completed("job2", JobResult.Passed, new Date(1)); completedJob.setAgentUuid(agentUuid); jobInstanceDao.save(stageId, completedJob); JobInstance cancelledJob = cancelled("job3"); cancelledJob.setAgentUuid("something_different");//Different UUID. Should not be considered jobInstanceDao.save(stageId, cancelledJob); JobInstance rescheduledJob = rescheduled("rescheduled", agentUuid); jobInstanceDao.save(stageId, rescheduledJob); jobInstanceDao.ignore(rescheduledJob); List<JobInstance> jobInstances = jobInstanceDao.completedJobsOnAgent(agentUuid, JobInstanceService.JobHistoryColumns.stage, SortOrder.ASC, 0, 10); assertThat(jobInstances.size(), is(2)); JobInstance actual = jobInstances.get(0); assertThat(actual.getName(), is(completedJob.getName())); completedJob.setIdentifier(actual.getIdentifier()); assertThat(actual, is(completedJob)); actual = jobInstances.get(1); assertThat(actual.getName(), is(rescheduledJob.getName())); rescheduledJob.setIdentifier(actual.getIdentifier()); assertThat(actual, is(rescheduledJob)); } @Test public void shouldGetTotalNumberOfCompletedJobsForAnAgent() { String agentUuid = "special_uuid"; JobInstance buildingJob = building("job1", new Date(1)); buildingJob.setAgentUuid(agentUuid); jobInstanceDao.save(stageId, buildingJob); JobInstance completedJob = completed("job2", JobResult.Passed, new Date(1)); completedJob.setAgentUuid(agentUuid); jobInstanceDao.save(stageId, completedJob); JobInstance rescheduledJob = rescheduled("rescheduled", agentUuid); jobInstanceDao.save(stageId, rescheduledJob); jobInstanceDao.ignore(rescheduledJob); JobInstance cancelledJob = cancelled("job3"); cancelledJob.setAgentUuid("something_different");//Different UUID. Should not be counted jobInstanceDao.save(stageId, cancelledJob); JobInstance simpleJob = failed("simpleJob"); simpleJob.setAgentUuid(agentUuid); jobInstanceDao.save(stageId, simpleJob); assertThat(jobInstanceDao.totalCompletedJobsOnAgent(agentUuid), is(3)); } @Test public void shouldGetJobInstanceBasedOnParametersProvided() throws SQLException { long stageId = createSomeJobs(JOB_NAME, 1); // create 2 instances completed, scheduled JobInstance jobInstance = jobInstanceDao.findJobInstance(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 1, 1); assertThat(jobInstance.isNull(), is(false)); } @Test public void shouldReturnNullJobInstanceWhenTheSaidCountersAreNotYetRun() throws SQLException { long stageId = createSomeJobs(JOB_NAME, 1); // create 2 instances completed, scheduled JobInstance jobInstance = jobInstanceDao.findJobInstance(PIPELINE_NAME, STAGE_NAME, JOB_NAME, 10, 10); assertThat(jobInstance.isNull(), is(true)); } private List<ArtifactPlan> artifactPlans() { List<ArtifactPlan> artifactPlans = new ArrayList<>(); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.file, "src", "dest")); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.file, "src1", "dest2")); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.unit, "unit", "unit")); artifactPlans.add(new ArtifactPlan(ArtifactPlanType.unit, "integration", "integration")); return artifactPlans; } @Test public void shouldReturnTheLatestAndOldestRunForGivenIdentifier() { JobInstances jobInstances = new JobInstances(); String pipelineName = PIPELINE_NAME + "-" + UUID.randomUUID(); pipelineConfig = PipelineMother.withSingleStageWithMaterials(pipelineName, STAGE_NAME, BuildPlanMother.withBuildPlans(JOB_NAME)); Pipeline newPipeline = createNewPipeline(pipelineConfig); jobInstances.add(newPipeline.getFirstStage().getFirstJob()); for (int i = 0; i < 2; i++) { stageId = newPipeline.getFirstStage().getId(); JobInstance scheduled = JobInstanceMother.completed(JOB_NAME); jobInstanceDao.save(stageId, scheduled); jobInstances.add(scheduled); } PipelineRunIdInfo runIdInfo = jobInstanceDao.getOldestAndLatestJobInstanceId(pipelineName, STAGE_NAME, JOB_NAME); assertThat(runIdInfo.getLatestRunId(), is(jobInstances.last().getId())); assertThat(runIdInfo.getOldestRunId(), is(jobInstances.first().getId())); } @Test public void findDetailedJobHistoryViaCursor_getLatestRecords() { JobInstances jobInstances = new JobInstances(); String pipelineName = PIPELINE_NAME + "-" + UUID.randomUUID(); pipelineConfig = PipelineMother.withSingleStageWithMaterials(pipelineName, STAGE_NAME, BuildPlanMother.withBuildPlans(JOB_NAME)); Pipeline newPipeline = createNewPipeline(pipelineConfig); jobInstances.add(newPipeline.getFirstStage().getFirstJob()); for (int i = 0; i < 4; i++) { stageId = newPipeline.getFirstStage().getId(); JobInstance scheduled = JobInstanceMother.completed(JOB_NAME); jobInstanceDao.save(stageId, scheduled); jobInstances.add(scheduled); } JobInstances history = jobInstanceDao.findDetailedJobHistoryViaCursor(pipelineName, STAGE_NAME, JOB_NAME, FeedModifier.Latest, 0, 3); Collections.reverse(jobInstances); assertThat(history.size(), is(3)); assertThat(history.get(0).getId(), is(jobInstances.get(0).getId())); assertThat(history.get(1).getId(), is(jobInstances.get(1).getId())); assertThat(history.get(2).getId(), is(jobInstances.get(2).getId())); } @Test public void findDetailedJobHistoryViaCursor_getRecordsAfterTheSpecifiedCursor() { //older records JobInstances jobInstances = new JobInstances(); String pipelineName = PIPELINE_NAME + "-" + UUID.randomUUID(); pipelineConfig = PipelineMother.withSingleStageWithMaterials(pipelineName, STAGE_NAME, BuildPlanMother.withBuildPlans(JOB_NAME)); Pipeline newPipeline = createNewPipeline(pipelineConfig); jobInstances.add(newPipeline.getFirstStage().getFirstJob()); for (int i = 0; i < 4; i++) { stageId = newPipeline.getFirstStage().getId(); JobInstance scheduled = JobInstanceMother.completed(JOB_NAME); jobInstanceDao.save(stageId, scheduled); jobInstances.add(scheduled); } JobInstances history = jobInstanceDao.findDetailedJobHistoryViaCursor(pipelineName, STAGE_NAME, JOB_NAME, FeedModifier.After, jobInstances.get(2).getId(), 3); Assertions.assertThat(history).hasSize(2); Assertions.assertThat(history.stream().map(JobInstance::getId).collect(toList())) .containsExactly(jobInstances.get(1).getId(), jobInstances.get(0).getId()); } @Test public void findDetailedJobHistoryViaCursor_getRecordsBeforeTheSpecifiedCursor() { //newer records JobInstances jobInstances = new JobInstances(); String pipelineName = PIPELINE_NAME + "-" + UUID.randomUUID(); pipelineConfig = PipelineMother.withSingleStageWithMaterials(pipelineName, STAGE_NAME, BuildPlanMother.withBuildPlans(JOB_NAME)); Pipeline newPipeline = createNewPipeline(pipelineConfig); jobInstances.add(newPipeline.getFirstStage().getFirstJob()); for (int i = 0; i < 4; i++) { stageId = newPipeline.getFirstStage().getId(); JobInstance scheduled = JobInstanceMother.completed(JOB_NAME); jobInstanceDao.save(stageId, scheduled); jobInstances.add(scheduled); } Collections.reverse(jobInstances); JobInstances history = jobInstanceDao.findDetailedJobHistoryViaCursor(pipelineName, STAGE_NAME, JOB_NAME, FeedModifier.Before, jobInstances.get(2).getId(), 3); Assertions.assertThat(history).hasSize(2); Assertions.assertThat(history.stream().map(JobInstance::getId).collect(toList())) .containsExactly(jobInstances.get(0).getId(), jobInstances.get(1).getId()); } }
apache-2.0
leonchen83/redis-cluster-watchdog
src/main/java/com/moilioncircle/redis/cluster/watchdog/message/RCmbMessage.java
736
/* * Copyright 2016-2018 Leon Chen * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.moilioncircle.redis.cluster.watchdog.message; /** * @author Leon Chen * @since 1.0.0 */ public interface RCmbMessage { }
apache-2.0
Zelldon/Texpad
Camunda-Texpad/src/main/java/de/zell/texpad/camunda/resources/TexpadServlet.java
1860
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package de.zell.texpad.camunda.resources; import de.zell.texpad.camunda.entities.TexEntity; import java.util.logging.Logger; import javax.ws.rs.Consumes; import javax.ws.rs.InternalServerErrorException; import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.Produces; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import org.camunda.bpm.BpmPlatform; import org.camunda.bpm.engine.ProcessEngine; import org.camunda.bpm.engine.runtime.ProcessInstance; /** * * @author Christopher Zell <christopher.zell@camunda.com> */ @Path(TexpadServlet.PROC_DEF_KEY) public class TexpadServlet { public static final String PROC_DEF_KEY = "Texpad"; private final UriBuilder builder = UriBuilder.fromResource(TexpadServlet.class); private static final Logger LOGGER = Logger.getLogger(TexpadServlet.class.getName()); @POST @Consumes(MediaType.APPLICATION_JSON) @Produces(MediaType.APPLICATION_JSON) public Response buildTexFile(TexEntity entity) { startProcessInstance(entity); return Response.created(builder.clone().path("").build()).build(); } private void startProcessInstance(TexEntity entity) { ProcessEngine engine = BpmPlatform.getProcessEngineService().getDefaultProcessEngine(); if (engine == null) throw new InternalServerErrorException("No process engine available!"); // ProcessDefinition processDefinition = engine.getRepositoryService().createProcessDefinitionQuery().processDefinitionKey(PROC_DEF_KEY).singleResult(); ProcessInstance instance = engine.getRuntimeService().startProcessInstanceByKey(PROC_DEF_KEY, entity.getAttributes()); } }
apache-2.0
FTFL02-ANDROID/Faravy
iCareFaravy/src/com/faravy/icare/AddCareCenterActivity.java
2976
package com.faravy.icare; import java.util.ArrayList; import android.app.ActionBar; import android.app.Activity; import android.content.Intent; import android.graphics.Color; import android.graphics.drawable.ColorDrawable; import android.os.Bundle; import android.view.Menu; import android.view.MenuItem; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.EditText; import android.widget.ImageButton; import android.widget.ListView; import android.widget.Toast; import com.faravy.adapter.DoctorAdapter; import com.faravy.database.DoctorDataSource; import com.faravy.database.HealthCenterDataSource; import com.faravy.modelclass.Doctor; import com.faravy.modelclass.HealthCenter; import com.faravy.modelclass.Profile; public class AddCareCenterActivity extends Activity { HealthCenterDataSource mDataSource = null; HealthCenter mHealthCenter; EditText mEtName; EditText mEtAddress; EditText mEtLatitude; EditText mEtLongitude; String mName; String mAddress; String mLatitude; String mLongitude; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_care_center); ActionBar ab = getActionBar(); ColorDrawable colorDrawable = new ColorDrawable( Color.parseColor("#0080FF")); ab.setBackgroundDrawable(colorDrawable); mEtName = (EditText) findViewById(R.id.addName); mEtAddress = (EditText) findViewById(R.id.addAddress); mEtLatitude = (EditText) findViewById(R.id.addLatitude); mEtLongitude = (EditText) findViewById(R.id.addLongitude); mDataSource = new HealthCenterDataSource(this); } @Override public boolean onCreateOptionsMenu(Menu menu) { // Inflate the menu; this adds items to the action bar if it is present. getMenuInflater().inflate(R.menu.add_profile, menu); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle action bar item clicks here. The action bar will // automatically handle clicks on the Home/Up button, so long // as you specify a parent activity in AndroidManifest.xml. int id = item.getItemId(); if (id == R.id.save) { save(); return true; } return super.onOptionsItemSelected(item); } private void save() { mName = mEtName.getText().toString(); mAddress = mEtAddress.getText().toString(); mLatitude = mEtLatitude.getText().toString(); mLongitude = mEtLongitude.getText().toString(); mHealthCenter = new HealthCenter(mName, mAddress, mLatitude, mLongitude); long inserted = mDataSource.insertData(mHealthCenter); if (inserted >= 0) { Toast.makeText(getApplicationContext(), "Data inserted", Toast.LENGTH_LONG).show(); Intent mIntent = new Intent(getApplicationContext(), DrawerActivity.class); startActivity(mIntent); finish(); } else { Toast.makeText(getApplicationContext(), "Insertion failed", Toast.LENGTH_LONG).show(); } } }
apache-2.0
horie-t/anago
src/com/akashi_tech/anago/succinct/package-info.java
732
/* * Copyright 2015 HORIE Tetsuya. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Succinct data structure package. */ /** * @author tetsuya * */ package com.akashi_tech.anago.succinct;
apache-2.0
aws/aws-sdk-java
aws-java-sdk-auditmanager/src/main/java/com/amazonaws/services/auditmanager/model/transform/EvidenceInsightsJsonUnmarshaller.java
3335
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.auditmanager.model.transform; import java.math.*; import javax.annotation.Generated; import com.amazonaws.services.auditmanager.model.*; import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*; import com.amazonaws.transform.*; import com.fasterxml.jackson.core.JsonToken; import static com.fasterxml.jackson.core.JsonToken.*; /** * EvidenceInsights JSON Unmarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class EvidenceInsightsJsonUnmarshaller implements Unmarshaller<EvidenceInsights, JsonUnmarshallerContext> { public EvidenceInsights unmarshall(JsonUnmarshallerContext context) throws Exception { EvidenceInsights evidenceInsights = new EvidenceInsights(); int originalDepth = context.getCurrentDepth(); String currentParentElement = context.getCurrentParentElement(); int targetDepth = originalDepth + 1; JsonToken token = context.getCurrentToken(); if (token == null) token = context.nextToken(); if (token == VALUE_NULL) { return null; } while (true) { if (token == null) break; if (token == FIELD_NAME || token == START_OBJECT) { if (context.testExpression("noncompliantEvidenceCount", targetDepth)) { context.nextToken(); evidenceInsights.setNoncompliantEvidenceCount(context.getUnmarshaller(Integer.class).unmarshall(context)); } if (context.testExpression("compliantEvidenceCount", targetDepth)) { context.nextToken(); evidenceInsights.setCompliantEvidenceCount(context.getUnmarshaller(Integer.class).unmarshall(context)); } if (context.testExpression("inconclusiveEvidenceCount", targetDepth)) { context.nextToken(); evidenceInsights.setInconclusiveEvidenceCount(context.getUnmarshaller(Integer.class).unmarshall(context)); } } else if (token == END_ARRAY || token == END_OBJECT) { if (context.getLastParsedParentElement() == null || context.getLastParsedParentElement().equals(currentParentElement)) { if (context.getCurrentDepth() <= originalDepth) break; } } token = context.nextToken(); } return evidenceInsights; } private static EvidenceInsightsJsonUnmarshaller instance; public static EvidenceInsightsJsonUnmarshaller getInstance() { if (instance == null) instance = new EvidenceInsightsJsonUnmarshaller(); return instance; } }
apache-2.0
google/depan
DepanNodeUI/prod/src/com/google/devtools/depan/nodes/filters/persistence/NodeKindDocConfig.java
1275
/* * Copyright 2017 The Depan Project Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.depan.nodes.filters.persistence; import com.google.devtools.depan.persistence.AbstractDocXmlPersist; import com.google.devtools.depan.persistence.plugins.ResourceDocumentConfig; import org.eclipse.core.resources.IFile; /** * @author <a href="mailto:leeca@pnambic.com">Lee Carver</a> */ public class NodeKindDocConfig implements ResourceDocumentConfig { @Override public boolean forExtension(String docExt) { return NodeKindResources.EXTENSION.equals(docExt); } @Override public AbstractDocXmlPersist<?> getDocXmlPersist( IFile file, boolean readable) { return NodeKindDocXmlPersist.build(readable) ; } }
apache-2.0
everttigchelaar/camel-svn
camel-core/src/test/java/org/apache/camel/processor/ChannelTest.java
2427
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.processor; import org.apache.camel.ContextTestSupport; import org.apache.camel.Exchange; import org.apache.camel.Processor; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.mock.MockEndpoint; /** * @version */ public class ChannelTest extends ContextTestSupport { private static int counter; @Override protected void setUp() throws Exception { disableJMX(); super.setUp(); } public void testChannel() throws Exception { counter = 0; MockEndpoint mock = getMockEndpoint("mock:result"); mock.expectedMessageCount(2); template.sendBody("direct:start", "Hello World"); template.sendBody("direct:start", "Bye World"); assertMockEndpointsSatisfied(); } @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { @Override public void configure() throws Exception { errorHandler(deadLetterChannel("mock:dead").maximumRedeliveries(2).redeliveryDelay(0).logStackTrace(false)); from("direct:start") .process(new Processor() { public void process(Exchange exchange) throws Exception { if (counter++ < 1) { throw new IllegalArgumentException("Damn"); } } }).delay(10).to("mock:result"); } }; } }
apache-2.0
mixed2004/borisovm
chapter_002/src/main/java/ru/job4j/strategypattern/package-info.java
164
/** * Package for strategypattern. * * @author Maxim Borisov (mailto: mixed2004@mail.ru) * @version 1 * @since 25.02.2017 */ package ru.job4j.strategypattern;
apache-2.0
digitalpetri/opc-ua-stack
stack-core/src/main/java/com/digitalpetri/opcua/stack/core/types/structured/CreateSessionRequest.java
6487
/* * Copyright 2015 Kevin Herron * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.digitalpetri.opcua.stack.core.types.structured; import com.digitalpetri.opcua.stack.core.Identifiers; import com.digitalpetri.opcua.stack.core.serialization.DelegateRegistry; import com.digitalpetri.opcua.stack.core.serialization.UaDecoder; import com.digitalpetri.opcua.stack.core.serialization.UaEncoder; import com.digitalpetri.opcua.stack.core.serialization.UaRequestMessage; import com.digitalpetri.opcua.stack.core.types.UaDataType; import com.digitalpetri.opcua.stack.core.types.builtin.ByteString; import com.digitalpetri.opcua.stack.core.types.builtin.NodeId; import com.digitalpetri.opcua.stack.core.types.builtin.unsigned.UInteger; @UaDataType("CreateSessionRequest") public class CreateSessionRequest implements UaRequestMessage { public static final NodeId TypeId = Identifiers.CreateSessionRequest; public static final NodeId BinaryEncodingId = Identifiers.CreateSessionRequest_Encoding_DefaultBinary; public static final NodeId XmlEncodingId = Identifiers.CreateSessionRequest_Encoding_DefaultXml; protected final RequestHeader _requestHeader; protected final ApplicationDescription _clientDescription; protected final String _serverUri; protected final String _endpointUrl; protected final String _sessionName; protected final ByteString _clientNonce; protected final ByteString _clientCertificate; protected final Double _requestedSessionTimeout; protected final UInteger _maxResponseMessageSize; public CreateSessionRequest() { this._requestHeader = null; this._clientDescription = null; this._serverUri = null; this._endpointUrl = null; this._sessionName = null; this._clientNonce = null; this._clientCertificate = null; this._requestedSessionTimeout = null; this._maxResponseMessageSize = null; } public CreateSessionRequest(RequestHeader _requestHeader, ApplicationDescription _clientDescription, String _serverUri, String _endpointUrl, String _sessionName, ByteString _clientNonce, ByteString _clientCertificate, Double _requestedSessionTimeout, UInteger _maxResponseMessageSize) { this._requestHeader = _requestHeader; this._clientDescription = _clientDescription; this._serverUri = _serverUri; this._endpointUrl = _endpointUrl; this._sessionName = _sessionName; this._clientNonce = _clientNonce; this._clientCertificate = _clientCertificate; this._requestedSessionTimeout = _requestedSessionTimeout; this._maxResponseMessageSize = _maxResponseMessageSize; } public RequestHeader getRequestHeader() { return _requestHeader; } public ApplicationDescription getClientDescription() { return _clientDescription; } public String getServerUri() { return _serverUri; } public String getEndpointUrl() { return _endpointUrl; } public String getSessionName() { return _sessionName; } public ByteString getClientNonce() { return _clientNonce; } public ByteString getClientCertificate() { return _clientCertificate; } public Double getRequestedSessionTimeout() { return _requestedSessionTimeout; } public UInteger getMaxResponseMessageSize() { return _maxResponseMessageSize; } @Override public NodeId getTypeId() { return TypeId; } @Override public NodeId getBinaryEncodingId() { return BinaryEncodingId; } @Override public NodeId getXmlEncodingId() { return XmlEncodingId; } public static void encode(CreateSessionRequest createSessionRequest, UaEncoder encoder) { encoder.encodeSerializable("RequestHeader", createSessionRequest._requestHeader != null ? createSessionRequest._requestHeader : new RequestHeader()); encoder.encodeSerializable("ClientDescription", createSessionRequest._clientDescription != null ? createSessionRequest._clientDescription : new ApplicationDescription()); encoder.encodeString("ServerUri", createSessionRequest._serverUri); encoder.encodeString("EndpointUrl", createSessionRequest._endpointUrl); encoder.encodeString("SessionName", createSessionRequest._sessionName); encoder.encodeByteString("ClientNonce", createSessionRequest._clientNonce); encoder.encodeByteString("ClientCertificate", createSessionRequest._clientCertificate); encoder.encodeDouble("RequestedSessionTimeout", createSessionRequest._requestedSessionTimeout); encoder.encodeUInt32("MaxResponseMessageSize", createSessionRequest._maxResponseMessageSize); } public static CreateSessionRequest decode(UaDecoder decoder) { RequestHeader _requestHeader = decoder.decodeSerializable("RequestHeader", RequestHeader.class); ApplicationDescription _clientDescription = decoder.decodeSerializable("ClientDescription", ApplicationDescription.class); String _serverUri = decoder.decodeString("ServerUri"); String _endpointUrl = decoder.decodeString("EndpointUrl"); String _sessionName = decoder.decodeString("SessionName"); ByteString _clientNonce = decoder.decodeByteString("ClientNonce"); ByteString _clientCertificate = decoder.decodeByteString("ClientCertificate"); Double _requestedSessionTimeout = decoder.decodeDouble("RequestedSessionTimeout"); UInteger _maxResponseMessageSize = decoder.decodeUInt32("MaxResponseMessageSize"); return new CreateSessionRequest(_requestHeader, _clientDescription, _serverUri, _endpointUrl, _sessionName, _clientNonce, _clientCertificate, _requestedSessionTimeout, _maxResponseMessageSize); } static { DelegateRegistry.registerEncoder(CreateSessionRequest::encode, CreateSessionRequest.class, BinaryEncodingId, XmlEncodingId); DelegateRegistry.registerDecoder(CreateSessionRequest::decode, CreateSessionRequest.class, BinaryEncodingId, XmlEncodingId); } }
apache-2.0
zyxist/chainsaw
src/main/java/com/zyxist/chainsaw/jigsaw/JigsawCLI.java
2727
/* * Copyright 2017-2018 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.zyxist.chainsaw.jigsaw; import com.zyxist.chainsaw.jigsaw.cli.*; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; /** * A model of Jigsaw command-line interface, used for generating the necessary * JVM and compiler CLI flags. */ public class JigsawCLI { private final String modulePath; private ModuleFlag moduleFlag = new ModuleFlag(); private ModuleVersionFlag moduleVersion = new ModuleVersionFlag(); private final AddModuleFlag addModuleFlag = new AddModuleFlag(); private final PatchListFlag patchList = new PatchListFlag(); private final ExportListFlag exportList = new ExportListFlag(); private final ReadListFlag readList = new ReadListFlag(); private final OpenListFlag openList = new OpenListFlag(); public JigsawCLI(String modulePath) { this.modulePath = modulePath; } public JigsawCLI module(String moduleName, String mainClassName) { this.moduleFlag = new ModuleFlag(moduleName, mainClassName); return this; } public JigsawCLI version(String version) { this.moduleVersion = new ModuleVersionFlag(version); return this; } public AddModuleFlag addModules() { return this.addModuleFlag; } public PatchListFlag patchList() { return this.patchList; } public ExportListFlag exportList() { return this.exportList; } public ReadListFlag readList() { return this.readList; } public OpenListFlag openList() { return openList; } public void toArgs(List<String> args) { generateModulePath(args); moduleVersion.toArgs(args); addModuleFlag.toArgs(args); exportList.toArgs(args); openList.toArgs(args); readList.toArgs(args); patchList.toArgs(args); moduleFlag.toArgs(args); } public List<String> generateArgs() { List<String> args = new ArrayList<>(); toArgs(args); return args; } @Override public String toString() { return generateArgs().stream().collect(Collectors.joining(" ")); } public String getModulePath() { return modulePath; } private void generateModulePath(List<String> args) { args.add(JigsawFlags.MODULE_PATH); args.add(modulePath); } }
apache-2.0
jakewins/holon
src/main/java/holon/contrib/template/mustache/UncachedMustacheFactory.java
1381
package holon.contrib.template.mustache; import java.io.File; import java.io.Reader; import java.io.Writer; import com.github.mustachejava.Mustache; import com.github.mustachejava.MustacheFactory; import com.github.mustachejava.MustacheVisitor; import com.github.mustachejava.ObjectHandler; public class UncachedMustacheFactory implements MustacheFactory { private final File basePath; public UncachedMustacheFactory( File basePath ) { this.basePath = basePath; } @Override public MustacheVisitor createMustacheVisitor() { throw new UnsupportedOperationException(); } @Override public Reader getReader( String s ) { throw new UnsupportedOperationException(); } @Override public void encode( String s, Writer writer ) { throw new UnsupportedOperationException(); } @Override public ObjectHandler getObjectHandler() { throw new UnsupportedOperationException(); } @Override public Mustache compile( String templatePath ) { return new UncachedMustache(templatePath, basePath); } @Override public Mustache compile( Reader reader, String s ) { throw new UnsupportedOperationException(); } @Override public String translate( String s ) { throw new UnsupportedOperationException(); } }
apache-2.0
softindex/datakernel
core-datastream/src/main/java/io/datakernel/datastream/processor/StreamBuffer.java
4134
/* * Copyright (C) 2015 SoftIndex LLC. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.datakernel.datastream.processor; import io.datakernel.common.ApplicationSettings; import io.datakernel.common.Check; import io.datakernel.datastream.*; import static io.datakernel.common.Preconditions.checkArgument; import static io.datakernel.common.Preconditions.checkState; import static java.lang.Integer.numberOfLeadingZeros; /** * A stream transformer that changes each item according to given function. */ public final class StreamBuffer<T> implements StreamTransformer<T, T> { private static final boolean CHECK = Check.isEnabled(StreamBuffer.class); private static final boolean NULLIFY_ON_TAKE_OUT = ApplicationSettings.getBoolean(StreamBuffer.class, "nullifyOnTakeOut", true); private final Input input; private final Output output; private final Object[] elements; private int tail; private int head; private final int bufferMinSize; private final int bufferMaxSize; private final StreamDataAcceptor<T> toBuffer; private StreamBuffer(int bufferMinSize, int bufferMaxSize) { checkArgument(bufferMaxSize > 0 && bufferMinSize >= 0); this.bufferMinSize = bufferMinSize; this.bufferMaxSize = bufferMaxSize; this.elements = new Object[1 << (32 - numberOfLeadingZeros(this.bufferMaxSize - 1))]; this.input = new Input(); this.output = new Output(); this.toBuffer = item -> { doAdd(item); if (size() >= bufferMaxSize) { input.suspend(); output.flush(); } }; input.getAcknowledgement() .whenException(output::closeEx); output.getEndOfStream() .whenResult(input::acknowledge) .whenException(input::closeEx); } public static <T> StreamBuffer<T> create(int bufferMinSize, int bufferMaxSize) { return new StreamBuffer<T>(bufferMinSize, bufferMaxSize); } public boolean isSaturated() { return size() >= bufferMaxSize; } public boolean isExhausted() { return size() <= bufferMinSize; } public boolean isEmpty() { return tail == head; } public int size() { return tail - head; } private void doAdd(T value) { elements[(tail++) & (elements.length - 1)] = value; } @Override public StreamConsumer<T> getInput() { return input; } @Override public StreamSupplier<T> getOutput() { return output; } private final class Input extends AbstractStreamConsumer<T> { @Override protected void onStarted() { sync(); } @Override protected void onEndOfStream() { output.flush(); } } private final class Output extends AbstractStreamSupplier<T> { @Override protected void onResumed() { flush(); } void flush() { int head = StreamBuffer.this.head; int tail = StreamBuffer.this.tail; StreamDataAcceptor<T> acceptor; while (true) { acceptor = getDataAcceptor(); if (acceptor == null) break; if (head == tail) break; int pos = (head++) & (elements.length - 1); //noinspection unchecked T item = (T) elements[pos]; if (NULLIFY_ON_TAKE_OUT) { elements[pos] = null; } acceptor.accept(item); } if (CHECK) checkState(tail == StreamBuffer.this.tail, "New items have been added to buffer while flushing"); StreamBuffer.this.head = head; if (isEmpty() && input.isEndOfStream()) { sendEndOfStream(); } sync(); } @Override protected void onSuspended() { sync(); } } private void sync() { if (size() >= bufferMaxSize) { input.suspend(); } else if (size() <= bufferMinSize) { if (isEmpty() && output.isReady()) { input.resume(output.getDataAcceptor()); } else { input.resume(toBuffer); } } } }
apache-2.0
Chaparqanatoos/kaggle-knowledge
src/main/java/com/github/chaparqanatoos/kaggle/knowledge/amazon/Trainer.java
10214
package com.github.chaparqanatoos.kaggle.knowledge.amazon; import java.io.BufferedReader; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.io.Reader; import java.io.Writer; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Scanner; import java.util.Set; import java.util.TreeMap; import edu.berkeley.compbio.jlibsvm.ImmutableSvmParameter; import edu.berkeley.compbio.jlibsvm.ImmutableSvmParameterPoint; import edu.berkeley.compbio.jlibsvm.SVM; import edu.berkeley.compbio.jlibsvm.binary.BinaryClassificationProblem; import edu.berkeley.compbio.jlibsvm.binary.BinaryModel; import edu.berkeley.compbio.jlibsvm.binary.BooleanClassificationProblemImpl; import edu.berkeley.compbio.jlibsvm.binary.C_SVC; import edu.berkeley.compbio.jlibsvm.kernel.GaussianRBFKernel; import edu.berkeley.compbio.jlibsvm.kernel.LinearKernel; import edu.berkeley.compbio.jlibsvm.scaler.LinearScalingModelLearner; import edu.berkeley.compbio.jlibsvm.util.SparseVector; public class Trainer { /** * @param args */ public static void main(String[] args) { if (args.length < 2) { System.err .println("Usage: java com.kaggle.nabeelmukhtar.amazon.Trainer data/train.csv.train.split.csv data/train.csv.test.split.csv"); System.exit(-1); } System.out.println("Started training....."); BinaryModel<Boolean, SparseVector> model = train(args[0]); System.out.println("Finished training....."); // System.out.println("Starting evalation....."); // double score = evaluate(args[1], model); // System.out.println("Finished evaluation....."); // System.out.println("Score: " + score); // System.out.println("Starting prediction....."); // Map<Integer, Boolean> results = predict(args[1], model); // System.out.println("Finished prediction....."); // // saveResults(results, "data/submission.csv"); } public static BinaryModel<Boolean, SparseVector> train(String fileName) { float gamma = 0.2f; GaussianRBFKernel kernel = new GaussianRBFKernel(gamma); Set<SparseVector> trueExamples = new HashSet<SparseVector>(); Set<SparseVector> falseExamples = new HashSet<SparseVector>(); Map<SparseVector, Integer> exampleIds = new HashMap<SparseVector, Integer>(); loadTrainingData(fileName, trueExamples, falseExamples, exampleIds); BinaryClassificationProblem<Boolean, SparseVector> problem = new BooleanClassificationProblemImpl<Boolean, SparseVector>( Boolean.class, Boolean.TRUE, trueExamples, Boolean.FALSE, falseExamples, exampleIds); SVM<Boolean, SparseVector, BinaryClassificationProblem<Boolean, SparseVector>> svm = new C_SVC<Boolean, SparseVector>(); double C_arr[] = {0.01, 0.1, 0.5, 1.0, 10.0, 100.0}; // double p_arr[] = {0.1, 0.5, 1.0, 10.0, 100.0}; // double eps_arr[] = {10.0, 1.0, 0.1, 0.01, 0.001, 0.0001, 0.00001}; // double nu_arr[] = {0.1, 0.5, 1.0, 10.0, 100.0}; for (int i = 0; i < C_arr.length; i++) { // for (int j = 0; j < p_arr.length; j++) { // for (int k = 0; k < eps_arr.length; k++) { // for (int l = 0; l < nu_arr.length; l++) { ImmutableSvmParameterPoint.Builder<Boolean, SparseVector> builder = new ImmutableSvmParameterPoint.Builder<Boolean, SparseVector>(); builder.kernel = kernel; // builder.nu = (float) nu_arr[l]; builder.cache_size = 100; builder.eps = 0.0001f; // builder.p = (float) p_arr[j]; builder.C = (float) C_arr[i]; builder.shrinking = true; builder.probability = false; builder.redistributeUnbalancedC = true; // builder.gridsearchBinaryMachinesIndependently = true; builder.scaleBinaryMachinesIndependently = true; builder.scalingModelLearner = new LinearScalingModelLearner(500, true); ImmutableSvmParameter<Boolean, SparseVector> param = builder.build(); System.out.println("C:" + C_arr[i]); do_cross_validation(svm, problem, param); // } // } // } } // set svm parameters // BinaryModel<Boolean, SparseVector> model = (BinaryModel<Boolean, SparseVector>) svm.train(problem, param); // return null; } public static double evaluate(String testFileName, BinaryModel<Boolean, SparseVector> model) { Set<SparseVector> trueExamples = new HashSet<SparseVector>(); Set<SparseVector> falseExamples = new HashSet<SparseVector>(); Map<SparseVector, Integer> exampleIds = new HashMap<SparseVector, Integer>(); loadTrainingData(testFileName, trueExamples, falseExamples, exampleIds); int tp = 0, tn = 0, fp = 0, fn = 0; for (SparseVector x : falseExamples) { if (model.predictLabel(x)) { fp++; } else { tn++; } } for (SparseVector x : trueExamples) { if (model.predictLabel(x)) { tp++; } else { fn++; } } double prec = (double) tp / (double) (tp + fp); double recall = (double) tp / (double) (tp + fn); double f = (2 * prec * recall) / (prec + recall); System.out.println("prec:" + prec + ", recall:" + recall + ", f-measure:" + f); return f; } public static Map<Integer, Boolean> predict(String testFileName, BinaryModel<Boolean, SparseVector> model) { Map<Integer, SparseVector> tests = new HashMap<Integer, SparseVector>(); loadTestData(testFileName, tests); Map<Integer, Boolean> results = new TreeMap<Integer, Boolean>(); for (Integer id : tests.keySet()) { results.put(id, model.predictLabel(tests.get(id))); } return results; } private static void loadTrainingData(String fileName, Set<SparseVector> trueExamples, Set<SparseVector> falseExamples, Map<SparseVector, Integer> exampleIds) { BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(fileName)); Scanner scanner = new Scanner(reader); CSVParser parser = new CSVParser(); int id = 1; while (scanner.hasNextLine()) { AmazonEacRecord record = parser.parse(scanner.nextLine(), true); record.setId(id); SparseVector vector = convertToSparseVector(record); if (record.isAction()) { trueExamples.add(vector); } else { falseExamples.add(vector); } exampleIds.put(vector, id); id++; } System.out.println("Loaded " + id + " records...."); } catch (Exception e) { e.printStackTrace(); } finally { closeReader(reader); } } private static void loadTestData(String fileName, Map<Integer, SparseVector> tests) { BufferedReader reader = null; try { reader = new BufferedReader(new FileReader(fileName)); Scanner scanner = new Scanner(reader); CSVParser parser = new CSVParser(); boolean header = true; while (scanner.hasNextLine()) { if (header) { // skip.... scanner.nextLine(); header = false; } else { AmazonEacRecord record = parser.parse(scanner.nextLine(), false); tests.put(record.getId(), convertToSparseVector(record)); } } } catch (Exception e) { e.printStackTrace(); } finally { closeReader(reader); } } private static void saveResults(Map<Integer, Boolean> results, String outputFileName) { PrintWriter output = null; try { output = new PrintWriter(new FileWriter(outputFileName)); // header output.println("id,ACTION"); for (Integer id : results.keySet()) { output.println(id + "," + (results.get(id) ? "1" : "0")); } } catch (Exception e) { e.printStackTrace(); } finally { closeWriter(output); } } private static SparseVector convertToSparseVector(AmazonEacRecord record) { SparseVector vector = new SparseVector(10); vector.indexes[0] = 0; vector.values[0] = record.getRoleCode(); vector.indexes[1] = 1; vector.values[1] = record.getManagerId(); vector.indexes[2] = 2; vector.values[2] = record.getRoleRollup1(); vector.indexes[3] = 3; vector.values[3] = record.getRoleRollup2(); vector.indexes[4] = 4; vector.values[4] = record.getRoleDepartmentName(); vector.indexes[5] = 5; vector.values[5] = record.getRoleTitle(); vector.indexes[6] = 6; vector.values[6] = record.getResource(); vector.indexes[7] = 7; vector.values[7] = record.getRoleFamily(); // vector.indexes[8] = 8; // vector.values[8] = record.getRoleFamilyDescription(); return vector; } private static void do_cross_validation(SVM<Boolean, SparseVector, BinaryClassificationProblem<Boolean, SparseVector>> svm, BinaryClassificationProblem<Boolean, SparseVector> problem, ImmutableSvmParameter<Boolean, SparseVector> param) { // int i; int total_correct = 0; int total_unknown = 0; double total_error = 0; double sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0; // double[] target = new double[problem.l]; int numExamples = problem.getNumExamples(); Map cvResult = svm.discreteCrossValidation(problem, param); for (SparseVector p : problem.getBooleanExamples().keySet()) // for (i = 0; i < numExamples; i++) { Object prediction = cvResult.get(p); if (prediction == null) { ++total_unknown; } else if (prediction.equals(problem.getTargetValue(p))) { ++total_correct; } } int classifiedExamples = numExamples - total_unknown; System.out.print("Cross Validation Classified = " + 100.0 * classifiedExamples / numExamples + "%\n"); System.out.print("Cross Validation Accuracy (of those classified) = " + 100.0 * total_correct / classifiedExamples + "%\n"); System.out.print("Cross Validation Accuracy (of total) = " + 100.0 * total_correct / numExamples + "%\n"); } /** * */ protected static void closeReader(Reader reader) { if (reader != null) { try { reader.close(); } catch (IOException e) { e.printStackTrace(); } } } /** * */ protected static void closeWriter(Writer writer) { if (writer != null) { try { writer.close(); } catch (IOException e) { e.printStackTrace(); } } } }
apache-2.0
datanucleus/tests
jdo/rdbms/src/java/org/datanucleus/samples/rdbms/schema/SchemaClass2.java
1155
/********************************************************************** Copyright (c) 2008 Andy Jefferson and others. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. Contributors: ... **********************************************************************/ package org.datanucleus.samples.rdbms.schema; /** * Test class for schema handling. */ public class SchemaClass2 { private long id; String name; long value; public long getId() { return id; } public String getName() { return name; } public long getValue() { return value; } }
apache-2.0