gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Copyright (C) 2011 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.net; import static com.google.common.base.Charsets.UTF_8; import static com.google.common.net.MediaType.ANY_APPLICATION_TYPE; import static com.google.common.net.MediaType.ANY_AUDIO_TYPE; import static com.google.common.net.MediaType.ANY_IMAGE_TYPE; import static com.google.common.net.MediaType.ANY_TEXT_TYPE; import static com.google.common.net.MediaType.ANY_TYPE; import static com.google.common.net.MediaType.ANY_VIDEO_TYPE; import static com.google.common.net.MediaType.HTML_UTF_8; import static com.google.common.net.MediaType.JPEG; import static com.google.common.net.MediaType.PLAIN_TEXT_UTF_8; import com.google.common.annotations.GwtCompatible; import com.google.common.base.Optional; import com.google.common.collect.ImmutableListMultimap; import com.google.common.collect.ImmutableMultimap; import com.google.common.testing.EqualsTester; import junit.framework.TestCase; import java.nio.charset.IllegalCharsetNameException; import java.nio.charset.UnsupportedCharsetException; /** * Tests for {@link MediaType}. * * @author Gregory Kick */ @GwtCompatible(emulated = true) public class MediaTypeTest extends TestCase { public void testCreate_invalidType() { try { MediaType.create("te><t", "plaintext"); fail(); } catch (IllegalArgumentException expected) {} } public void testCreate_invalidSubtype() { try { MediaType.create("text", "pl@intext"); fail(); } catch (IllegalArgumentException expected) {} } public void testCreate_wildcardTypeDeclaredSubtype() { try { MediaType.create("*", "text"); fail(); } catch (IllegalArgumentException expected) {} } public void testCreateApplicationType() { MediaType newType = MediaType.createApplicationType("yams"); assertEquals("application", newType.type()); assertEquals("yams", newType.subtype()); } public void testCreateAudioType() { MediaType newType = MediaType.createAudioType("yams"); assertEquals("audio", newType.type()); assertEquals("yams", newType.subtype()); } public void testCreateImageType() { MediaType newType = MediaType.createImageType("yams"); assertEquals("image", newType.type()); assertEquals("yams", newType.subtype()); } public void testCreateTextType() { MediaType newType = MediaType.createTextType("yams"); assertEquals("text", newType.type()); assertEquals("yams", newType.subtype()); } public void testCreateVideoType() { MediaType newType = MediaType.createVideoType("yams"); assertEquals("video", newType.type()); assertEquals("yams", newType.subtype()); } public void testGetType() { assertEquals("text", MediaType.parse("text/plain").type()); assertEquals("application", MediaType.parse("application/atom+xml; charset=utf-8").type()); } public void testGetSubtype() { assertEquals("plain", MediaType.parse("text/plain").subtype()); assertEquals("atom+xml", MediaType.parse("application/atom+xml; charset=utf-8").subtype()); } private static final ImmutableListMultimap<String, String> PARAMETERS = ImmutableListMultimap.of("a", "1", "a", "2", "b", "3"); public void testGetParameters() { assertEquals(ImmutableListMultimap.of(), MediaType.parse("text/plain").parameters()); assertEquals(ImmutableListMultimap.of("charset", "utf-8"), MediaType.parse("application/atom+xml; charset=utf-8").parameters()); assertEquals(PARAMETERS, MediaType.parse("application/atom+xml; a=1; a=2; b=3").parameters()); } public void testWithoutParameters() { assertSame(MediaType.parse("image/gif"), MediaType.parse("image/gif").withoutParameters()); assertEquals(MediaType.parse("image/gif"), MediaType.parse("image/gif; foo=bar").withoutParameters()); } public void testWithParameters() { assertEquals(MediaType.parse("text/plain; a=1; a=2; b=3"), MediaType.parse("text/plain").withParameters(PARAMETERS)); assertEquals(MediaType.parse("text/plain; a=1; a=2; b=3"), MediaType.parse("text/plain; a=1; a=2; b=3").withParameters(PARAMETERS)); } public void testWithParameters_invalidAttribute() { MediaType mediaType = MediaType.parse("text/plain"); ImmutableListMultimap<String, String> parameters = ImmutableListMultimap.of("a", "1", "@", "2", "b", "3"); try { mediaType.withParameters(parameters); fail(); } catch (IllegalArgumentException expected) {} } public void testWithParameter() { assertEquals(MediaType.parse("text/plain; a=1"), MediaType.parse("text/plain").withParameter("a", "1")); assertEquals(MediaType.parse("text/plain; a=1"), MediaType.parse("text/plain; a=1; a=2").withParameter("a", "1")); assertEquals(MediaType.parse("text/plain; a=3"), MediaType.parse("text/plain; a=1; a=2").withParameter("a", "3")); assertEquals(MediaType.parse("text/plain; a=1; a=2; b=3"), MediaType.parse("text/plain; a=1; a=2").withParameter("b", "3")); } public void testWithParameter_invalidAttribute() { MediaType mediaType = MediaType.parse("text/plain"); try { mediaType.withParameter("@", "2"); fail(); } catch (IllegalArgumentException expected) {} } public void testWithCharset() { assertEquals(MediaType.parse("text/plain; charset=utf-8"), MediaType.parse("text/plain").withCharset(UTF_8)); assertEquals(MediaType.parse("text/plain; charset=utf-8"), MediaType.parse("text/plain; charset=utf-16").withCharset(UTF_8)); } public void testHasWildcard() { assertFalse(PLAIN_TEXT_UTF_8.hasWildcard()); assertFalse(JPEG.hasWildcard()); assertTrue(ANY_TYPE.hasWildcard()); assertTrue(ANY_APPLICATION_TYPE.hasWildcard()); assertTrue(ANY_AUDIO_TYPE.hasWildcard()); assertTrue(ANY_IMAGE_TYPE.hasWildcard()); assertTrue(ANY_TEXT_TYPE.hasWildcard()); assertTrue(ANY_VIDEO_TYPE.hasWildcard()); } public void testIs() { assertTrue(PLAIN_TEXT_UTF_8.is(ANY_TYPE)); assertTrue(JPEG.is(ANY_TYPE)); assertTrue(ANY_TEXT_TYPE.is(ANY_TYPE)); assertTrue(PLAIN_TEXT_UTF_8.is(ANY_TEXT_TYPE)); assertTrue(PLAIN_TEXT_UTF_8.withoutParameters().is(ANY_TEXT_TYPE)); assertFalse(JPEG.is(ANY_TEXT_TYPE)); assertTrue(PLAIN_TEXT_UTF_8.is(PLAIN_TEXT_UTF_8)); assertTrue(PLAIN_TEXT_UTF_8.is(PLAIN_TEXT_UTF_8.withoutParameters())); assertFalse(PLAIN_TEXT_UTF_8.withoutParameters().is(PLAIN_TEXT_UTF_8)); assertFalse(PLAIN_TEXT_UTF_8.is(HTML_UTF_8)); assertFalse(PLAIN_TEXT_UTF_8.withParameter("charset", "UTF-16").is(PLAIN_TEXT_UTF_8)); assertFalse(PLAIN_TEXT_UTF_8.is(PLAIN_TEXT_UTF_8.withParameter("charset", "UTF-16"))); } public void testParse_empty() { try { MediaType.parse(""); fail(); } catch (IllegalArgumentException expected) {} } public void testParse_badInput() { try { MediaType.parse("/"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("te<t/plain"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/pl@in"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain;"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; "); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a="); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a=@"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a=\"@"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a=1;"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a=1; "); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a=1; b"); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a=1; b="); fail(); } catch (IllegalArgumentException expected) {} try { MediaType.parse("text/plain; a=\u2025"); fail(); } catch (IllegalArgumentException expected) {} } public void testGetCharset() { assertEquals(Optional.absent(), MediaType.parse("text/plain").charset()); assertEquals(Optional.of(UTF_8), MediaType.parse("text/plain; charset=utf-8").charset()); } public void testGetCharset_tooMany() { MediaType mediaType = MediaType.parse("text/plain; charset=utf-8; charset=utf-16"); try { mediaType.charset(); fail(); } catch (IllegalStateException expected) {} } public void testGetCharset_illegalCharset() { MediaType mediaType = MediaType.parse( "text/plain; charset=\"!@#$%^&*()\""); try { mediaType.charset(); fail(); } catch (IllegalCharsetNameException expected) {} } public void testGetCharset_unsupportedCharset() { MediaType mediaType = MediaType.parse( "text/plain; charset=utf-wtf"); try { mediaType.charset(); fail(); } catch (UnsupportedCharsetException expected) {} } public void testEquals() { new EqualsTester() .addEqualityGroup(MediaType.create("text", "plain"), MediaType.create("TEXT", "PLAIN"), MediaType.parse("text/plain"), MediaType.parse("TEXT/PLAIN"), MediaType.create("text", "plain").withParameter("a", "1").withoutParameters()) .addEqualityGroup( MediaType.create("text", "plain").withCharset(UTF_8), MediaType.create("text", "plain").withParameter("CHARSET", "UTF-8"), MediaType.create("text", "plain").withParameters( ImmutableMultimap.of("charset", "utf-8")), MediaType.parse("text/plain;charset=utf-8"), MediaType.parse("text/plain; charset=utf-8"), MediaType.parse("text/plain; charset=utf-8"), MediaType.parse("text/plain; \tcharset=utf-8"), MediaType.parse("text/plain; \r\n\tcharset=utf-8"), MediaType.parse("text/plain; CHARSET=utf-8"), MediaType.parse("text/plain; charset=\"utf-8\""), MediaType.parse("text/plain; charset=\"\\u\\tf-\\8\""), MediaType.parse("text/plain; charset=UTF-8")) .addEqualityGroup(MediaType.parse("text/plain; charset=utf-8; charset=utf-8")) .addEqualityGroup(MediaType.create("text", "plain").withParameter("a", "value"), MediaType.create("text", "plain").withParameter("A", "value")) .addEqualityGroup(MediaType.create("text", "plain").withParameter("a", "VALUE"), MediaType.create("text", "plain").withParameter("A", "VALUE")) .addEqualityGroup( MediaType.create("text", "plain") .withParameters(ImmutableListMultimap.of("a", "1", "a", "2")), MediaType.create("text", "plain") .withParameters(ImmutableListMultimap.of("a", "2", "a", "1"))) .addEqualityGroup(MediaType.create("text", "csv")) .addEqualityGroup(MediaType.create("application", "atom+xml")) .testEquals(); } public void testToString() { assertEquals("text/plain", MediaType.create("text", "plain").toString()); assertEquals("text/plain; something=\"cr@zy\"; something-else=\"crazy with spaces\"", MediaType.create("text", "plain") .withParameter("something", "cr@zy") .withParameter("something-else", "crazy with spaces") .toString()); } }
/* * Copyright (c) 2011-2013, Peter Abeles. All Rights Reserved. * * This file is part of BoofCV (http://boofcv.org). * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package boofcv.alg.geo.calibration; import georegression.geometry.RotationMatrixGenerator; import georegression.struct.point.Point2D_F64; import georegression.struct.se.Se3_F64; import org.ddogleg.optimization.FactoryOptimization; import org.ddogleg.optimization.UnconstrainedLeastSquares; import org.ejml.data.DenseMatrix64F; import java.util.ArrayList; import java.util.List; /** * <p> * Full implementation of the Zhang99 camera calibration algorithm using planar calibration targets. First * linear approximations of camera parameters are computed, which are then refined using non-linear estimation. * </p> * * <p> * When processing the results be sure to take in account the coordinate system being left or right handed. Calibration * works just fine with either coordinate system, but most 3D geometric algorithms assume a right handed coordinate * system while most images are left handed. * </p> * * <p> * A listener can be provide that will give status updates and allows requests for early termination. If a request * for early termination is made then a RuntimeException will be thrown. * </p> * * <p> * [1] Zhengyou Zhang, "Flexible Camera Calibration By Viewing a Plane From Unknown Orientations,", * International Conference on Computer Vision (ICCV'99), Corfu, Greece, pages 666-673, September 1999. * </p> * * @author Peter Abeles */ public class CalibrationPlanarGridZhang99 { // estimation algorithms private Zhang99ComputeTargetHomography computeHomography; private Zhang99CalibrationMatrixFromHomographies computeK; private RadialDistortionEstimateLinear computeRadial; private Zhang99DecomposeHomography decomposeH = new Zhang99DecomposeHomography(); // contains found parameters private Zhang99Parameters optimized; // description of the calibration target with point locations private PlanarCalibrationTarget target; // if true the intrinsic calibration matrix will have the skew parameter set to zero private boolean assumeZeroSkew; // optimization algorithm private UnconstrainedLeastSquares optimizer; // provides information on calibration status private Listener listener; /** * Configures calibration process. * * @param target Description of the known calibration target * @param assumeZeroSkew Should it assumed the camera has zero skew. Typically true. * @param numRadialParam Number of radial distortion parameters to consider. Typically 0,1,2. */ public CalibrationPlanarGridZhang99(PlanarCalibrationTarget target, boolean assumeZeroSkew, int numRadialParam) { computeHomography = new Zhang99ComputeTargetHomography(target); computeK = new Zhang99CalibrationMatrixFromHomographies(assumeZeroSkew); computeRadial = new RadialDistortionEstimateLinear(target,numRadialParam); this.target = target; this.assumeZeroSkew = assumeZeroSkew; optimized = new Zhang99Parameters(assumeZeroSkew,numRadialParam); } /** * Used to listen in on progress and request that processing be stopped * * @param listener The listener */ public void setListener(Listener listener) { this.listener = listener; } /** * Processes observed calibration point coordinates and computes camera intrinsic and extrinsic * parameters. * * @param observations Set of observed grid locations in pixel coordinates. * @return true if successful and false if it failed */ public boolean process( List<List<Point2D_F64>> observations ) { optimized.setNumberOfViews(observations.size()); // compute initial parameter estimates using linear algebra Zhang99Parameters initial = initialParam(observations); if( initial == null ) return false; status("Non-linear refinement"); // perform non-linear optimization to improve results if( !optimizedParam(observations,target.points,initial,optimized,optimizer)) return false; return true; } /** * Find an initial estimate for calibration parameters using linear techniques. */ protected Zhang99Parameters initialParam( List<List<Point2D_F64>> observations ) { status("Estimating Homographies"); List<DenseMatrix64F> homographies = new ArrayList<DenseMatrix64F>(); List<Se3_F64> motions = new ArrayList<Se3_F64>(); for( List<Point2D_F64> obs : observations ) { if( !computeHomography.computeHomography(obs) ) return null; DenseMatrix64F H = computeHomography.getHomography(); homographies.add(H); } status("Estimating Calibration Matrix"); computeK.process(homographies); DenseMatrix64F K = computeK.getCalibrationMatrix(); decomposeH.setCalibrationMatrix(K); for( DenseMatrix64F H : homographies ) { motions.add(decomposeH.decompose(H)); } status("Estimating Radial Distortion"); computeRadial.process(K,homographies,observations); double distort[] = computeRadial.getParameters(); return convertIntoZhangParam(motions, K,assumeZeroSkew, distort); } private void status( String message ) { if( listener != null ) { if( !listener.zhangUpdate(message) ) throw new RuntimeException("User requested termination of calibration"); } } /** * Use non-linear optimization to improve the parameter estimates * * @param observations Observations of calibration points in each image * @param grid Location of calibration points on calibration target * @param initial Initial estimate of calibration parameters. * @param found The refined calibration parameters. * @param optimizer Algorithm used to optimize parameters */ public boolean optimizedParam( List<List<Point2D_F64>> observations , List<Point2D_F64> grid , Zhang99Parameters initial , Zhang99Parameters found , UnconstrainedLeastSquares optimizer ) { if( optimizer == null ) { // optimizer = FactoryOptimization.leastSquaresTrustRegion(1, // RegionStepType.DOG_LEG_FTF,true); optimizer = FactoryOptimization.leastSquaresLM(1e-3,true); // optimizer = FactoryOptimization.leastSquareLevenberg(1e-3); } double model[] = new double[ initial.size() ]; initial.convertToParam(model); Zhang99OptimizationFunction func = new Zhang99OptimizationFunction( initial.createNew(), grid,observations); // Both the numerical and analytical Jacobian appear to provide the same results, but the // unit test tolerance is so crude that I trust the numerical Jacobian more // Zhang99OptimizationJacobian jacobian = new Zhang99OptimizationJacobian( // initial.assumeZeroSkew,initial.distortion.length,observations.size(),grid); optimizer.setFunction(func,null); optimizer.initialize(model,1e-10,1e-25*observations.size()); for( int i = 0; i < 500; i++ ) { if( optimizer.iterate() ) { break; } else { if( i % 25 == 0 ) status("Progress "+(100*i/500.0)+"%"); } } double param[] = optimizer.getParameters(); found.setFromParam(param); return true; } /** * Converts results fond in the linear algorithms into {@link Zhang99Parameters} */ public static Zhang99Parameters convertIntoZhangParam(List<Se3_F64> motions, DenseMatrix64F K, boolean assumeZeroSkew, double[] distort) { Zhang99Parameters ret = new Zhang99Parameters(); ret.assumeZeroSkew = assumeZeroSkew; ret.a = K.get(0,0); ret.b = K.get(1,1); ret.c = K.get(0,1); ret.x0 = K.get(0,2); ret.y0 = K.get(1,2); ret.distortion = distort; ret.views = new Zhang99Parameters.View[motions.size()]; for( int i = 0; i < ret.views.length; i++ ) { Se3_F64 m = motions.get(i); Zhang99Parameters.View v = new Zhang99Parameters.View(); v.T = m.getT(); RotationMatrixGenerator.matrixToRodrigues(m.getR(), v.rotation); ret.views[i] = v; } return ret; } /** * Applies radial distortion to the point. * * @param pt point in calibrated pixel coordinates * @param radial radial distortion parameters */ public static void applyDistortion(Point2D_F64 pt, double[] radial) { double a = 0; double r2 = pt.x*pt.x + pt.y*pt.y; double r = r2; for( int i = 0; i < radial.length; i++ ) { a += radial[i]*r; r *= r2; } pt.x += pt.x*a; pt.y += pt.y*a; } /** * Specify which optimization algorithm to use */ public void setOptimizer(UnconstrainedLeastSquares optimizer) { this.optimizer = optimizer; } public Zhang99Parameters getOptimized() { return optimized; } public static interface Listener { /** * Updated to update the status and request that processing be stopped * * @param taskName Name of the task being performed * @return true to continue and false to request a stop */ public boolean zhangUpdate( String taskName ); } }
package org.sagebionetworks.table.query; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.util.List; import org.junit.Test; import org.sagebionetworks.table.query.model.ColumnReference; import org.sagebionetworks.table.query.model.Predicate; import org.sagebionetworks.table.query.model.QuerySpecification; import org.sagebionetworks.table.query.model.SQLElement; import org.sagebionetworks.table.query.model.SearchCondition; import org.sagebionetworks.table.query.model.SelectList; import org.sagebionetworks.table.query.model.SetFunctionSpecification; import org.sagebionetworks.table.query.model.TableExpression; import org.sagebionetworks.table.query.model.UnsignedNumericLiteral; import org.sagebionetworks.table.query.model.ValueExpressionPrimary; public class TableQueryParserTest { @Test public void testSignedInteger() throws ParseException { UnsignedNumericLiteral element = new TableQueryParser(" 1234567890 ").unsignedNumericLiteral(); assertEquals("1234567890", element.toSql()); } @Test public void testColumnReferenceLiteralLHS() throws ParseException{ TableQueryParser parser = new TableQueryParser("foo"); ColumnReference columnReference = parser.columnReference(); assertNotNull(columnReference); String sql = toSQL(columnReference); assertEquals("foo", sql); } @Test public void testColumnReferenceLiteralLHSAndRHS() throws ParseException{ TableQueryParser parser = new TableQueryParser("foo.bar"); ColumnReference columnReference = parser.columnReference(); assertNotNull(columnReference); String sql = toSQL(columnReference); assertEquals("foo.bar", sql); } @Test public void testColumnReferenceStringLHS() throws ParseException{ TableQueryParser parser = new TableQueryParser("\"with space\""); ColumnReference columnReference = parser.columnReference(); assertNotNull(columnReference); String sql = toSQL(columnReference); assertEquals("\"with space\"", sql); } @Test public void testColumnReferenceStringLHSandRHS() throws ParseException{ TableQueryParser parser = new TableQueryParser("\"with space\".\"cat's\""); ColumnReference columnReference = parser.columnReference(); assertNotNull(columnReference); String sql = toSQL(columnReference); assertEquals("\"with space\".\"cat's\"", sql); } @Test public void testSetFunctionSpecification() throws ParseException{ TableQueryParser parser = new TableQueryParser("count( distinct \"name\")"); SetFunctionSpecification setFunction = parser.setFunctionSpecification(); assertNotNull(setFunction); String sql = toSQL(setFunction); assertEquals("COUNT(DISTINCT \"name\")", sql); } @Test public void testValueExpressionPrimaryColumnReference() throws ParseException{ TableQueryParser parser = new TableQueryParser("\"with space\".\"cat's\""); ValueExpressionPrimary valueExpressionPrimary = parser.valueExpressionPrimary(); assertNotNull(valueExpressionPrimary); String sql = toSQL(valueExpressionPrimary); assertEquals("\"with space\".\"cat's\"", sql); } @Test public void testSelectListStart() throws ParseException{ TableQueryParser parser = new TableQueryParser("*"); SelectList element = parser.selectList(); assertNotNull(element); String sql = toSQL(element); assertEquals("*", sql); } @Test public void testSelectListSingleLiteral() throws ParseException{ TableQueryParser parser = new TableQueryParser("foo, \"bar\", max(cats)"); SelectList element = parser.selectList(); assertNotNull(element); String sql = toSQL(element); assertEquals("foo, \"bar\", MAX(cats)", sql); } @Test public void testPredicateComparison() throws ParseException{ TableQueryParser parser = new TableQueryParser("foo.bar >= 10.1e-10"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("foo.bar >= 1.01E-9", sql); } @Test public void testPredicateSignedComparison() throws ParseException { TableQueryParser parser = new TableQueryParser("foo.bar >= -200"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("foo.bar >= -200", sql); } @Test public void testPredicateNull() throws ParseException{ TableQueryParser parser = new TableQueryParser("foo is not null"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("foo IS NOT NULL", sql); } @Test public void testPredicateNotBetween() throws ParseException{ TableQueryParser parser = new TableQueryParser("foo not between a and b"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("foo NOT BETWEEN a AND b", sql); } @Test public void testPredicateBetween() throws ParseException{ TableQueryParser parser = new TableQueryParser("foo between a and b"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("foo BETWEEN a AND b", sql); } @Test public void testPredicateNotLike() throws ParseException{ TableQueryParser parser = new TableQueryParser("bar not like '%a'"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("bar NOT LIKE '%a'", sql); } @Test public void testPredicateLike() throws ParseException{ TableQueryParser parser = new TableQueryParser("bar like '%a'"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("bar LIKE '%a'", sql); } @Test public void testPredicateNotIn() throws ParseException{ TableQueryParser parser = new TableQueryParser("bar not in(a, b,c)"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("bar NOT IN ( a, b, c )", sql); } @Test public void testPredicateIn() throws ParseException{ TableQueryParser parser = new TableQueryParser("bar in(a, b,c)"); Predicate element = parser.predicate(); String sql = toSQL(element); assertEquals("bar IN ( a, b, c )", sql); } @Test public void testSearchConditionOr() throws ParseException{ TableQueryParser parser = new TableQueryParser("bar <> a or foo > 3"); SearchCondition element = parser.searchCondition(); String sql = toSQL(element); assertEquals("bar <> a OR foo > 3", sql); } @Test public void testSearchConditionAnd() throws ParseException{ TableQueryParser parser = new TableQueryParser("bar =1 and foo = 2"); SearchCondition element = parser.searchCondition(); String sql = toSQL(element); assertEquals("bar = 1 AND foo = 2", sql); } @Test public void testSearchConditionNestedOr() throws ParseException{ TableQueryParser parser = new TableQueryParser("(bar =1 and foo = 2) or www is not null"); SearchCondition element = parser.searchCondition(); String sql = toSQL(element); assertEquals("( bar = 1 AND foo = 2 ) OR www IS NOT NULL", sql); } @Test public void testSearchConditionNestedAnd() throws ParseException{ TableQueryParser parser = new TableQueryParser("(bar =1 and foo = 2) and www is not null"); SearchCondition element = parser.searchCondition(); String sql = toSQL(element); assertEquals("( bar = 1 AND foo = 2 ) AND www IS NOT NULL", sql); } @Test public void testTableExpression() throws ParseException{ TableQueryParser parser = new TableQueryParser("from syn123"); TableExpression element = parser.tableExpression(); assertNotNull(element); String sql = toSQL(element); assertEquals("FROM syn123", sql); } @Test public void testTableExpressionWithWhere() throws ParseException{ TableQueryParser parser = new TableQueryParser("from syn123 where a > 'b'"); TableExpression element = parser.tableExpression(); assertNotNull(element); String sql = toSQL(element); assertEquals("FROM syn123 WHERE a > 'b'", sql); } /** * We must be able to pares all of the example SQL. */ @Test public void testAllExamples(){ List<SQLExample> exampleList = SQLExampleProvider.getSQLExamples(); assertNotNull(exampleList); assertTrue(exampleList.size() > 2); // Test each example for(SQLExample example: exampleList){ try{ // Make sure we can parse the SQL System.out.println("Parsing: "+example.getSql()); TableQueryParser.parserQuery(example.getSql()); } catch(ParseException e){ e.printStackTrace(); fail("Failed to parse: '"+example.getSql()+"' Error: "+e.getMessage()); } } } @Test public void testSelectStar() throws ParseException{ // Parse the query into a basic model object QuerySpecification sq = TableQueryParser.parserQuery("select * from syn123"); assertNotNull(sq); assertEquals(null, sq.getSetQuantifier()); assertNotNull(sq.getSelectList()); assertEquals("Simple select * was missing the asterisk", Boolean.TRUE, sq.getSelectList().getAsterisk()); assertEquals("Select * should not have any columns", null, sq.getSelectList().getColumns()); assertNotNull(sq.getTableExpression()); assertNotNull(sq.getTableExpression().getFromClause()); assertNotNull(sq.getTableExpression().getFromClause().getTableReference()); assertNotNull(sq.getTableExpression().getFromClause().getTableReference().getTableName()); assertEquals("syn123", sq.getTableExpression().getFromClause().getTableReference().getTableName()); } @Test public void testSelectTableNameSigned() throws ParseException { // Parse the query into a basic model object QuerySpecification sq = TableQueryParser.parserQuery("select * from syn123"); assertNotNull(sq); assertNotNull(sq.getTableExpression()); assertNotNull(sq.getTableExpression().getFromClause()); assertNotNull(sq.getTableExpression().getFromClause().getTableReference()); assertNotNull(sq.getTableExpression().getFromClause().getTableReference().getTableName()); assertEquals("syn123", sq.getTableExpression().getFromClause().getTableReference().getTableName()); } /** * Helper to convert a SQLElement to its SQL string. * @param element * @return */ public static String toSQL(SQLElement element){ return element.toString(); } @Test public void testSelectMultipleColumns() throws ParseException{ QuerySpecification element = TableQueryParser.parserQuery("select foo, bar, foobar from syn123"); String sql = toSQL(element); assertEquals("SELECT foo, bar, foobar FROM syn123", sql); } @Test public void testSelectDoubleQuotedColumnName() throws ParseException{ QuerySpecification element = TableQueryParser.parserQuery("select \"foo \"\"&\"\" Bar\" from syn123"); String sql = toSQL(element); assertEquals("SELECT \"foo \"\"&\"\" Bar\" FROM syn123", sql); } @Test public void testSelectGroupBy() throws ParseException{ QuerySpecification element = TableQueryParser.parserQuery("select foo, count(bar) from syn456 group by foo"); String sql = toSQL(element); assertEquals("SELECT foo, COUNT(bar) FROM syn456 GROUP BY foo", sql); } @Test public void testQueryAllParts() throws ParseException{ QuerySpecification element = TableQueryParser.parserQuery("select foo, count(bar) from syn456 where bar = 'cat''s' group by foo order by bar limit 1 offset 2"); String sql = toSQL(element); assertEquals("SELECT foo, COUNT(bar) FROM syn456 WHERE bar = 'cat''s' GROUP BY foo ORDER BY bar LIMIT 1 OFFSET 2", sql); } @Test (expected=ParseException.class) public void testQueryEndOfFile() throws ParseException{ // There must not be anything at the end of the query. TableQueryParser.parserQuery("select foo from syn456 limit 1 offset 2 select foo"); } /** * See PLFM-2618 * Validate that literals can contain key words but not be keywords. * @throws ParseException * */ @Test public void testKeyWordsInLiterals() throws ParseException{ QuerySpecification element = TableQueryParser.parserQuery("select doesNotExist, isIn, string, sAnd, sNot, WeLikeIt from SyN456 limit 1 offset 2"); assertNotNull(element); String sql = toSQL(element); assertEquals("SELECT doesNotExist, isIn, string, sAnd, sNot, WeLikeIt FROM SyN456 LIMIT 1 OFFSET 2", sql); } /** * See PLFM-3878 * @throws ParseException */ public void testCountDistinctMultipleColumns() throws ParseException{ QuerySpecification element = TableQueryParser.parserQuery("select count(distinct one, two) from SyN456"); assertNotNull(element); String sql = toSQL(element); assertEquals("", sql); } /** * Test for PLFM-4566 * @throws ParseException */ @Test public void testArithmetic() throws ParseException{ QuerySpecification element = TableQueryParser.parserQuery("select foo/100 from syn123"); assertEquals("SELECT foo/100 FROM syn123", element.toSql()); } /** * Test for PLFM-4510 * Make sure that all ASCII values are recognized by the parser */ @Test public void testAsciiTokens() { for (char c = 0; c < 256; c++) { try { TableQueryParser.parserQuery("select foo" + c + " from syn123"); } catch (ParseException pe) { // No problem } catch (TokenMgrError tme) { fail("Encountered an unexpected TokenMgrError: " + tme); } } } }
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: RowProcessor.proto package org.apache.hadoop.hbase.protobuf.generated; public final class RowProcessorProtos { private RowProcessorProtos() {} public static void registerAllExtensions( com.google.protobuf.ExtensionRegistry registry) { } public interface RowProcessorRequestOrBuilder extends com.google.protobuf.MessageOrBuilder { // required string rowProcessorClassName = 1; boolean hasRowProcessorClassName(); String getRowProcessorClassName(); // optional string rowProcessorInitializerMessageName = 2; boolean hasRowProcessorInitializerMessageName(); String getRowProcessorInitializerMessageName(); // optional bytes rowProcessorInitializerMessage = 3; boolean hasRowProcessorInitializerMessage(); com.google.protobuf.ByteString getRowProcessorInitializerMessage(); } public static final class RowProcessorRequest extends com.google.protobuf.GeneratedMessage implements RowProcessorRequestOrBuilder { // Use RowProcessorRequest.newBuilder() to construct. private RowProcessorRequest(Builder builder) { super(builder); } private RowProcessorRequest(boolean noInit) {} private static final RowProcessorRequest defaultInstance; public static RowProcessorRequest getDefaultInstance() { return defaultInstance; } public RowProcessorRequest getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable; } private int bitField0_; // required string rowProcessorClassName = 1; public static final int ROWPROCESSORCLASSNAME_FIELD_NUMBER = 1; private java.lang.Object rowProcessorClassName_; public boolean hasRowProcessorClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getRowProcessorClassName() { java.lang.Object ref = rowProcessorClassName_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { rowProcessorClassName_ = s; } return s; } } private com.google.protobuf.ByteString getRowProcessorClassNameBytes() { java.lang.Object ref = rowProcessorClassName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); rowProcessorClassName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional string rowProcessorInitializerMessageName = 2; public static final int ROWPROCESSORINITIALIZERMESSAGENAME_FIELD_NUMBER = 2; private java.lang.Object rowProcessorInitializerMessageName_; public boolean hasRowProcessorInitializerMessageName() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getRowProcessorInitializerMessageName() { java.lang.Object ref = rowProcessorInitializerMessageName_; if (ref instanceof String) { return (String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; String s = bs.toStringUtf8(); if (com.google.protobuf.Internal.isValidUtf8(bs)) { rowProcessorInitializerMessageName_ = s; } return s; } } private com.google.protobuf.ByteString getRowProcessorInitializerMessageNameBytes() { java.lang.Object ref = rowProcessorInitializerMessageName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((String) ref); rowProcessorInitializerMessageName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } // optional bytes rowProcessorInitializerMessage = 3; public static final int ROWPROCESSORINITIALIZERMESSAGE_FIELD_NUMBER = 3; private com.google.protobuf.ByteString rowProcessorInitializerMessage_; public boolean hasRowProcessorInitializerMessage() { return ((bitField0_ & 0x00000004) == 0x00000004); } public com.google.protobuf.ByteString getRowProcessorInitializerMessage() { return rowProcessorInitializerMessage_; } private void initFields() { rowProcessorClassName_ = ""; rowProcessorInitializerMessageName_ = ""; rowProcessorInitializerMessage_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRowProcessorClassName()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, getRowProcessorClassNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeBytes(2, getRowProcessorInitializerMessageNameBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { output.writeBytes(3, rowProcessorInitializerMessage_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, getRowProcessorClassNameBytes()); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(2, getRowProcessorInitializerMessageNameBytes()); } if (((bitField0_ & 0x00000004) == 0x00000004)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(3, rowProcessorInitializerMessage_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) obj; boolean result = true; result = result && (hasRowProcessorClassName() == other.hasRowProcessorClassName()); if (hasRowProcessorClassName()) { result = result && getRowProcessorClassName() .equals(other.getRowProcessorClassName()); } result = result && (hasRowProcessorInitializerMessageName() == other.hasRowProcessorInitializerMessageName()); if (hasRowProcessorInitializerMessageName()) { result = result && getRowProcessorInitializerMessageName() .equals(other.getRowProcessorInitializerMessageName()); } result = result && (hasRowProcessorInitializerMessage() == other.hasRowProcessorInitializerMessage()); if (hasRowProcessorInitializerMessage()) { result = result && getRowProcessorInitializerMessage() .equals(other.getRowProcessorInitializerMessage()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRowProcessorClassName()) { hash = (37 * hash) + ROWPROCESSORCLASSNAME_FIELD_NUMBER; hash = (53 * hash) + getRowProcessorClassName().hashCode(); } if (hasRowProcessorInitializerMessageName()) { hash = (37 * hash) + ROWPROCESSORINITIALIZERMESSAGENAME_FIELD_NUMBER; hash = (53 * hash) + getRowProcessorInitializerMessageName().hashCode(); } if (hasRowProcessorInitializerMessage()) { hash = (37 * hash) + ROWPROCESSORINITIALIZERMESSAGE_FIELD_NUMBER; hash = (53 * hash) + getRowProcessorInitializerMessage().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorRequest_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); rowProcessorClassName_ = ""; bitField0_ = (bitField0_ & ~0x00000001); rowProcessorInitializerMessageName_ = ""; bitField0_ = (bitField0_ & ~0x00000002); rowProcessorInitializerMessage_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000004); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest build() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.rowProcessorClassName_ = rowProcessorClassName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.rowProcessorInitializerMessageName_ = rowProcessorInitializerMessageName_; if (((from_bitField0_ & 0x00000004) == 0x00000004)) { to_bitField0_ |= 0x00000004; } result.rowProcessorInitializerMessage_ = rowProcessorInitializerMessage_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance()) return this; if (other.hasRowProcessorClassName()) { setRowProcessorClassName(other.getRowProcessorClassName()); } if (other.hasRowProcessorInitializerMessageName()) { setRowProcessorInitializerMessageName(other.getRowProcessorInitializerMessageName()); } if (other.hasRowProcessorInitializerMessage()) { setRowProcessorInitializerMessage(other.getRowProcessorInitializerMessage()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRowProcessorClassName()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; rowProcessorClassName_ = input.readBytes(); break; } case 18: { bitField0_ |= 0x00000002; rowProcessorInitializerMessageName_ = input.readBytes(); break; } case 26: { bitField0_ |= 0x00000004; rowProcessorInitializerMessage_ = input.readBytes(); break; } } } } private int bitField0_; // required string rowProcessorClassName = 1; private java.lang.Object rowProcessorClassName_ = ""; public boolean hasRowProcessorClassName() { return ((bitField0_ & 0x00000001) == 0x00000001); } public String getRowProcessorClassName() { java.lang.Object ref = rowProcessorClassName_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); rowProcessorClassName_ = s; return s; } else { return (String) ref; } } public Builder setRowProcessorClassName(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; rowProcessorClassName_ = value; onChanged(); return this; } public Builder clearRowProcessorClassName() { bitField0_ = (bitField0_ & ~0x00000001); rowProcessorClassName_ = getDefaultInstance().getRowProcessorClassName(); onChanged(); return this; } void setRowProcessorClassName(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000001; rowProcessorClassName_ = value; onChanged(); } // optional string rowProcessorInitializerMessageName = 2; private java.lang.Object rowProcessorInitializerMessageName_ = ""; public boolean hasRowProcessorInitializerMessageName() { return ((bitField0_ & 0x00000002) == 0x00000002); } public String getRowProcessorInitializerMessageName() { java.lang.Object ref = rowProcessorInitializerMessageName_; if (!(ref instanceof String)) { String s = ((com.google.protobuf.ByteString) ref).toStringUtf8(); rowProcessorInitializerMessageName_ = s; return s; } else { return (String) ref; } } public Builder setRowProcessorInitializerMessageName(String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000002; rowProcessorInitializerMessageName_ = value; onChanged(); return this; } public Builder clearRowProcessorInitializerMessageName() { bitField0_ = (bitField0_ & ~0x00000002); rowProcessorInitializerMessageName_ = getDefaultInstance().getRowProcessorInitializerMessageName(); onChanged(); return this; } void setRowProcessorInitializerMessageName(com.google.protobuf.ByteString value) { bitField0_ |= 0x00000002; rowProcessorInitializerMessageName_ = value; onChanged(); } // optional bytes rowProcessorInitializerMessage = 3; private com.google.protobuf.ByteString rowProcessorInitializerMessage_ = com.google.protobuf.ByteString.EMPTY; public boolean hasRowProcessorInitializerMessage() { return ((bitField0_ & 0x00000004) == 0x00000004); } public com.google.protobuf.ByteString getRowProcessorInitializerMessage() { return rowProcessorInitializerMessage_; } public Builder setRowProcessorInitializerMessage(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; rowProcessorInitializerMessage_ = value; onChanged(); return this; } public Builder clearRowProcessorInitializerMessage() { bitField0_ = (bitField0_ & ~0x00000004); rowProcessorInitializerMessage_ = getDefaultInstance().getRowProcessorInitializerMessage(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:RowProcessorRequest) } static { defaultInstance = new RowProcessorRequest(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RowProcessorRequest) } public interface RowProcessorResultOrBuilder extends com.google.protobuf.MessageOrBuilder { // required bytes rowProcessorResult = 1; boolean hasRowProcessorResult(); com.google.protobuf.ByteString getRowProcessorResult(); } public static final class RowProcessorResult extends com.google.protobuf.GeneratedMessage implements RowProcessorResultOrBuilder { // Use RowProcessorResult.newBuilder() to construct. private RowProcessorResult(Builder builder) { super(builder); } private RowProcessorResult(boolean noInit) {} private static final RowProcessorResult defaultInstance; public static RowProcessorResult getDefaultInstance() { return defaultInstance; } public RowProcessorResult getDefaultInstanceForType() { return defaultInstance; } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable; } private int bitField0_; // required bytes rowProcessorResult = 1; public static final int ROWPROCESSORRESULT_FIELD_NUMBER = 1; private com.google.protobuf.ByteString rowProcessorResult_; public boolean hasRowProcessorResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getRowProcessorResult() { return rowProcessorResult_; } private void initFields() { rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasRowProcessorResult()) { memoizedIsInitialized = 0; return false; } memoizedIsInitialized = 1; return true; } public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, rowProcessorResult_); } getUnknownFields().writeTo(output); } private int memoizedSerializedSize = -1; public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.google.protobuf.CodedOutputStream .computeBytesSize(1, rowProcessorResult_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; } private static final long serialVersionUID = 0L; @java.lang.Override protected java.lang.Object writeReplace() throws java.io.ObjectStreamException { return super.writeReplace(); } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other = (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) obj; boolean result = true; result = result && (hasRowProcessorResult() == other.hasRowProcessorResult()); if (hasRowProcessorResult()) { result = result && getRowProcessorResult() .equals(other.getRowProcessorResult()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; } @java.lang.Override public int hashCode() { int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasRowProcessorResult()) { hash = (37 * hash) + ROWPROCESSORRESULT_FIELD_NUMBER; hash = (53 * hash) + getRowProcessorResult().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); return hash; } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return newBuilder().mergeFrom(data, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom(java.io.InputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { Builder builder = newBuilder(); if (builder.mergeDelimitedFrom(input, extensionRegistry)) { return builder.buildParsed(); } else { return null; } } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return newBuilder().mergeFrom(input).buildParsed(); } public static org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return newBuilder().mergeFrom(input, extensionRegistry) .buildParsed(); } public static Builder newBuilder() { return Builder.create(); } public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } public static final class Builder extends com.google.protobuf.GeneratedMessage.Builder<Builder> implements org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_descriptor; } protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.internal_static_RowProcessorResult_fieldAccessorTable; } // Construct using org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { } } private static Builder create() { return new Builder(); } public Builder clear() { super.clear(); rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; bitField0_ = (bitField0_ & ~0x00000001); return this; } public Builder clone() { return create().mergeFrom(buildPartial()); } public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDescriptor(); } public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult getDefaultInstanceForType() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); } public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult build() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } private org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult buildParsed() throws com.google.protobuf.InvalidProtocolBufferException { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException( result).asInvalidProtocolBufferException(); } return result; } public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult buildPartial() { org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult result = new org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.rowProcessorResult_ = rowProcessorResult_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) { return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult other) { if (other == org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()) return this; if (other.hasRowProcessorResult()) { setRowProcessorResult(other.getRowProcessorResult()); } this.mergeUnknownFields(other.getUnknownFields()); return this; } public final boolean isInitialized() { if (!hasRowProcessorResult()) { return false; } return true; } public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder( this.getUnknownFields()); while (true) { int tag = input.readTag(); switch (tag) { case 0: this.setUnknownFields(unknownFields.build()); onChanged(); return this; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { this.setUnknownFields(unknownFields.build()); onChanged(); return this; } break; } case 10: { bitField0_ |= 0x00000001; rowProcessorResult_ = input.readBytes(); break; } } } } private int bitField0_; // required bytes rowProcessorResult = 1; private com.google.protobuf.ByteString rowProcessorResult_ = com.google.protobuf.ByteString.EMPTY; public boolean hasRowProcessorResult() { return ((bitField0_ & 0x00000001) == 0x00000001); } public com.google.protobuf.ByteString getRowProcessorResult() { return rowProcessorResult_; } public Builder setRowProcessorResult(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; rowProcessorResult_ = value; onChanged(); return this; } public Builder clearRowProcessorResult() { bitField0_ = (bitField0_ & ~0x00000001); rowProcessorResult_ = getDefaultInstance().getRowProcessorResult(); onChanged(); return this; } // @@protoc_insertion_point(builder_scope:RowProcessorResult) } static { defaultInstance = new RowProcessorResult(true); defaultInstance.initFields(); } // @@protoc_insertion_point(class_scope:RowProcessorResult) } public static abstract class RowProcessorService implements com.google.protobuf.Service { protected RowProcessorService() {} public interface Interface { public abstract void process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult> done); } public static com.google.protobuf.Service newReflectiveService( final Interface impl) { return new RowProcessorService() { @java.lang.Override public void process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult> done) { impl.process(controller, request, done); } }; } public static com.google.protobuf.BlockingService newReflectiveBlockingService(final BlockingInterface impl) { return new com.google.protobuf.BlockingService() { public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final com.google.protobuf.Message callBlockingMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request) throws com.google.protobuf.ServiceException { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callBlockingMethod() given method descriptor for " + "wrong service type."); } switch(method.getIndex()) { case 0: return impl.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)request); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } }; } public abstract void process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult> done); public static final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptor() { return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.getDescriptor().getServices().get(0); } public final com.google.protobuf.Descriptors.ServiceDescriptor getDescriptorForType() { return getDescriptor(); } public final void callMethod( com.google.protobuf.Descriptors.MethodDescriptor method, com.google.protobuf.RpcController controller, com.google.protobuf.Message request, com.google.protobuf.RpcCallback< com.google.protobuf.Message> done) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.callMethod() given method descriptor for wrong " + "service type."); } switch(method.getIndex()) { case 0: this.process(controller, (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest)request, com.google.protobuf.RpcUtil.<org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult>specializeCallback( done)); return; default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getRequestPrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getRequestPrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public final com.google.protobuf.Message getResponsePrototype( com.google.protobuf.Descriptors.MethodDescriptor method) { if (method.getService() != getDescriptor()) { throw new java.lang.IllegalArgumentException( "Service.getResponsePrototype() given method " + "descriptor for wrong service type."); } switch(method.getIndex()) { case 0: return org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(); default: throw new java.lang.AssertionError("Can't get here."); } } public static Stub newStub( com.google.protobuf.RpcChannel channel) { return new Stub(channel); } public static final class Stub extends org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorService implements Interface { private Stub(com.google.protobuf.RpcChannel channel) { this.channel = channel; } private final com.google.protobuf.RpcChannel channel; public com.google.protobuf.RpcChannel getChannel() { return channel; } public void process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request, com.google.protobuf.RpcCallback<org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult> done) { channel.callMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance(), com.google.protobuf.RpcUtil.generalizeCallback( done, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance())); } } public static BlockingInterface newBlockingStub( com.google.protobuf.BlockingRpcChannel channel) { return new BlockingStub(channel); } public interface BlockingInterface { public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) throws com.google.protobuf.ServiceException; } private static final class BlockingStub implements BlockingInterface { private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) { this.channel = channel; } private final com.google.protobuf.BlockingRpcChannel channel; public org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult process( com.google.protobuf.RpcController controller, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest request) throws com.google.protobuf.ServiceException { return (org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult) channel.callBlockingMethod( getDescriptor().getMethods().get(0), controller, request, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.getDefaultInstance()); } } } private static com.google.protobuf.Descriptors.Descriptor internal_static_RowProcessorRequest_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RowProcessorRequest_fieldAccessorTable; private static com.google.protobuf.Descriptors.Descriptor internal_static_RowProcessorResult_descriptor; private static com.google.protobuf.GeneratedMessage.FieldAccessorTable internal_static_RowProcessorResult_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n\022RowProcessor.proto\"\210\001\n\023RowProcessorReq" + "uest\022\035\n\025rowProcessorClassName\030\001 \002(\t\022*\n\"r" + "owProcessorInitializerMessageName\030\002 \001(\t\022" + "&\n\036rowProcessorInitializerMessage\030\003 \001(\014\"" + "0\n\022RowProcessorResult\022\032\n\022rowProcessorRes" + "ult\030\001 \002(\0142K\n\023RowProcessorService\0224\n\007proc" + "ess\022\024.RowProcessorRequest\032\023.RowProcessor" + "ResultBH\n*org.apache.hadoop.hbase.protob" + "uf.generatedB\022RowProcessorProtosH\001\210\001\001\240\001\001" }; com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { public com.google.protobuf.ExtensionRegistry assignDescriptors( com.google.protobuf.Descriptors.FileDescriptor root) { descriptor = root; internal_static_RowProcessorRequest_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_RowProcessorRequest_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RowProcessorRequest_descriptor, new java.lang.String[] { "RowProcessorClassName", "RowProcessorInitializerMessageName", "RowProcessorInitializerMessage", }, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorRequest.Builder.class); internal_static_RowProcessorResult_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_RowProcessorResult_fieldAccessorTable = new com.google.protobuf.GeneratedMessage.FieldAccessorTable( internal_static_RowProcessorResult_descriptor, new java.lang.String[] { "RowProcessorResult", }, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.class, org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.RowProcessorResult.Builder.class); return null; } }; com.google.protobuf.Descriptors.FileDescriptor .internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { }, assigner); } // @@protoc_insertion_point(outer_class_scope) }
/* * Copyright (C) 2013 salesforce.com, inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.auraframework.impl.root.parser.handler; import java.util.List; import java.util.Map; import java.util.Set; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import org.auraframework.Aura; import org.auraframework.builder.RootDefinitionBuilder; import org.auraframework.def.AttributeDef; import org.auraframework.def.AttributeDefRef; import org.auraframework.def.BaseComponentDef; import org.auraframework.def.BaseComponentDef.WhitespaceBehavior; import org.auraframework.def.ComponentDef; import org.auraframework.def.ComponentDefRef; import org.auraframework.def.ControllerDef; import org.auraframework.def.DefDescriptor; import org.auraframework.def.DocumentationDef; import org.auraframework.def.FlavoredStyleDef; import org.auraframework.def.HelperDef; import org.auraframework.def.InterfaceDef; import org.auraframework.def.MethodDef; import org.auraframework.def.ModelDef; import org.auraframework.def.ProviderDef; import org.auraframework.def.RendererDef; import org.auraframework.def.RequiredVersionDef; import org.auraframework.def.ResourceDef; import org.auraframework.def.SVGDef; import org.auraframework.def.StyleDef; import org.auraframework.def.design.DesignDef; import org.auraframework.expression.PropertyReference; import org.auraframework.impl.css.util.Flavors; import org.auraframework.impl.root.AttributeDefImpl; import org.auraframework.impl.root.AttributeDefRefImpl; import org.auraframework.impl.root.RequiredVersionDefImpl; import org.auraframework.impl.root.component.BaseComponentDefImpl.Builder; import org.auraframework.impl.root.event.RegisterEventDefImpl; import org.auraframework.impl.system.DefDescriptorImpl; import org.auraframework.impl.system.SubDefDescriptorImpl; import org.auraframework.impl.util.TextTokenizer; import org.auraframework.system.AuraContext; import org.auraframework.system.MasterDefRegistry; import org.auraframework.system.Source; import org.auraframework.system.SubDefDescriptor; import org.auraframework.throwable.quickfix.QuickFixException; import org.auraframework.util.AuraTextUtil; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.common.collect.Maps; import com.google.common.collect.Sets; /** */ public abstract class BaseComponentDefHandler<T extends BaseComponentDef, B extends Builder<T>> extends RootTagHandler<T> { private static final String ATTRIBUTE_RENDER = "render"; protected static final String ATTRIBUTE_TEMPLATE = "template"; private static final String ATTRIBUTE_PROVIDER = "provider"; private static final String ATTRIBUTE_EXTENSIBLE = "extensible"; private static final String ATTRIBUTE_ABSTRACT = "abstract"; private static final String ATTRIBUTE_IMPLEMENTS = "implements"; private static final String ATTRIBUTE_EXTENDS = "extends"; private static final String ATTRIBUTE_STYLE = "style"; private static final String ATTRIBUTE_HELPER = "helper"; private static final String ATTRIBUTE_RENDERER = "renderer"; private static final String ATTRIBUTE_MODEL = "model"; private static final String ATTRIBUTE_CONTROLLER = "controller"; private static final String ATTRIBUTE_WHITESPACE = "whitespace"; private static final String ATTRIBUTE_TOKEN_OVERRIDES = "tokens"; private static final String ATTRIBUTE_DEFAULT_FLAVOR = "defaultFlavor"; private static final String ATTRIBUTE_DYNAMICALLY_FLAVORABLE = "dynamicallyFlavorable"; protected static final Set<String> ALLOWED_ATTRIBUTES = new ImmutableSet.Builder<String>() .add(ATTRIBUTE_IMPLEMENTS, ATTRIBUTE_ACCESS, ATTRIBUTE_MODEL, ATTRIBUTE_CONTROLLER, ATTRIBUTE_EXTENDS, ATTRIBUTE_EXTENSIBLE, ATTRIBUTE_ABSTRACT, RootTagHandler.ATTRIBUTE_API_VERSION) .addAll(RootTagHandler.ALLOWED_ATTRIBUTES).build(); protected static final Set<String> INTERNAL_ALLOWED_ATTRIBUTES = new ImmutableSet.Builder<String>().add( ATTRIBUTE_RENDER, ATTRIBUTE_TEMPLATE, ATTRIBUTE_PROVIDER, ATTRIBUTE_STYLE, ATTRIBUTE_HELPER, ATTRIBUTE_RENDERER, ATTRIBUTE_WHITESPACE, ATTRIBUTE_TOKEN_OVERRIDES, ATTRIBUTE_DEFAULT_FLAVOR, ATTRIBUTE_DYNAMICALLY_FLAVORABLE) .addAll(ALLOWED_ATTRIBUTES).addAll(RootTagHandler.INTERNAL_ALLOWED_ATTRIBUTES) .build(); private int innerCount = 0; private final List<ComponentDefRef> body = Lists.newArrayList(); protected B builder; public BaseComponentDefHandler() { super(); } public BaseComponentDefHandler(DefDescriptor<T> componentDefDescriptor, Source<?> source, XMLStreamReader xmlReader) { super(componentDefDescriptor, source, xmlReader); builder = createBuilder(); builder.setLocation(getLocation()); builder.setDescriptor(componentDefDescriptor); if (source != null) { builder.setOwnHash(source.getHash()); } builder.events = Maps.newHashMap(); builder.interfaces = Sets.newLinkedHashSet(); builder.eventHandlers = Lists.newArrayList(); builder.imports = Lists.newArrayList(); builder.controllerDescriptors = Lists.newArrayList(); builder.facets = Lists.newArrayList(); } @Override public Set<String> getAllowedAttributes() { return isInInternalNamespace ? RootTagHandler.INTERNAL_ALLOWED_ATTRIBUTES : ALLOWED_ATTRIBUTES; } @Override protected void handleChildTag() throws XMLStreamException, QuickFixException { String tag = getTagName(); if (AttributeDefHandler.TAG.equalsIgnoreCase(tag)) { AttributeDefHandler<T> handler = new AttributeDefHandler<>(this, xmlReader, source); AttributeDefImpl attributeDef = handler.getElement(); DefDescriptor<AttributeDef> attributeDesc = attributeDef.getDescriptor(); DefDescriptor<MethodDef> methodDef = DefDescriptorImpl.getInstance(attributeDesc.getName(), MethodDef.class); if (builder.getAttributeDefs().containsKey(attributeDesc)) { tagError( "There is already an attribute named '%s' on %s '%s'.", handler.getParentHandler().getDefDescriptor(), attributeDesc.getName(), "%s", "%s" ); } if (builder.getMethodDefs().containsKey(methodDef)) { tagError("The attribute '%s' conflicts with a method of the same name on %s '%s'.", handler.getParentHandler().getDefDescriptor(), attributeDesc.getName(), "%s","%s" ); } builder.getAttributeDefs().put(attributeDef.getDescriptor(),attributeDef); } else if (isInInternalNamespace && RequiredVersionDefHandler.TAG.equalsIgnoreCase(tag)) { RequiredVersionDefHandler<T> handler = new RequiredVersionDefHandler<>(this,xmlReader, source); RequiredVersionDefImpl requiredVersionDef = handler.getElement(); DefDescriptor<RequiredVersionDef> requiredVersionDesc = requiredVersionDef.getDescriptor(); if (builder.getRequiredVersionDefs().containsKey(requiredVersionDesc)) { tagError( "There is already a namespace '%s' on %s '%s'.", handler.getParentHandler().getDefDescriptor(), requiredVersionDesc.getName(), "%s", "%s" ); } builder.getRequiredVersionDefs().put(requiredVersionDesc, requiredVersionDef); } else if (RegisterEventHandler.TAG.equalsIgnoreCase(tag)) { RegisterEventHandler<T> handler = new RegisterEventHandler<>(this, xmlReader, source); RegisterEventDefImpl regDef = handler.getElement(); DefDescriptor<MethodDef> methodDef = DefDescriptorImpl.getInstance(regDef.getAttributeName(), MethodDef.class); if (builder.events.containsKey(regDef.getAttributeName())) { tagError("There is already an event named '%s' registered on %s '%s'.", handler.getParentHandler().getDefDescriptor(), regDef.getAttributeName(), "%s", "%s" ); } if (builder.getMethodDefs().containsKey(methodDef)) { tagError("The event '%s' conflicts with a method of the same name on %s '%s'.", handler.getParentHandler().getDefDescriptor(), regDef.getAttributeName(), "%s","%s" ); } builder.events.put(regDef.getAttributeName(), regDef); } else if (EventHandlerDefHandler.TAG.equalsIgnoreCase(tag)) { builder.eventHandlers.add(new EventHandlerDefHandler(this, xmlReader, source).getElement()); } else if (LibraryDefRefHandler.TAG.equalsIgnoreCase(tag)) { builder.imports.add(new LibraryDefRefHandler(this, xmlReader, source).getElement()); } else if (AttributeDefRefHandler.TAG.equalsIgnoreCase(tag)) { builder.facets.add(new AttributeDefRefHandler<>(this, xmlReader, source).getElement()); } else if (DependencyDefHandler.TAG.equalsIgnoreCase(tag)) { builder.addDependency(new DependencyDefHandler<>(this, xmlReader, source).getElement()); } else if (ClientLibraryDefHandler.TAG.equalsIgnoreCase(tag)) { builder.addClientLibrary(new ClientLibraryDefHandler<>(this, xmlReader, source).getElement()); } else if (MethodDefHandler.TAG.equalsIgnoreCase(tag)) { MethodDefHandler<T> handler=new MethodDefHandler<>(this, xmlReader, source); MethodDef methodDef = handler.getElement(); DefDescriptor<MethodDef> methodDesc = methodDef.getDescriptor(); String methodName=methodDesc.getName(); if (builder.getAttributeDefs().containsKey(DefDescriptorImpl.getInstance(methodName, AttributeDef.class))) { tagError("The method '%s' conflicts with an attribute of the same name on %s '%s'.", handler.getParentHandler().getDefDescriptor(), methodName, "%s","%s" ); } if (builder.events.containsKey(methodName)) { tagError("The method '%s' conflicts with an event of the same name on %s '%s'.", handler.getParentHandler().getDefDescriptor(), methodName, "%s", "%s" ); } if (builder.getMethodDefs().containsKey(methodDesc)) { tagError("There is already a method named '%s' on %s '%s'.", handler.getParentHandler().getDefDescriptor(), methodName, "%s","%s" ); } builder.getMethodDefs().put(methodDef.getDescriptor(),methodDef); } else { // if it wasn't one of the above, it must be a defref, or an error ComponentDefRef cdr = getDefRefHandler(this).getElement(); if (cdr.isFlavorable() || cdr.hasFlavorableChild()) { builder.setHasFlavorableChild(true); } body.add(cdr); } } protected abstract B createBuilder(); @Override protected RootDefinitionBuilder<T> getBuilder() { return builder; } @Override protected void handleChildText() throws XMLStreamException, QuickFixException { String text = xmlReader.getText(); boolean skip = getWhitespaceBehavior() == WhitespaceBehavior.OPTIMIZE ? AuraTextUtil .isNullEmptyOrWhitespace(text) : AuraTextUtil .isNullOrEmpty(text); if (!skip) { TextTokenizer tokenizer = TextTokenizer.tokenize(text, getLocation(), getWhitespaceBehavior()); body.addAll(tokenizer.asComponentDefRefs(this)); } } /** * Bases the decision for allowing embedded scripts on the system attribute isTemplate * * @return - returns true is isTemplate is true */ @Override public boolean getAllowsScript() { return builder.isTemplate; } @SuppressWarnings("unchecked") @Override protected void readAttributes() throws QuickFixException { AuraContext context = Aura.getContextService().getCurrentContext(); MasterDefRegistry mdr = context.getDefRegistry(); context.pushCallingDescriptor(builder.getDescriptor()); try { super.readAttributes(); String controllerName = getAttributeValue(ATTRIBUTE_CONTROLLER); DefDescriptor<ControllerDef> controllerDescriptor = null; if (controllerName != null) { controllerDescriptor = getDefDescriptor(controllerName, ControllerDef.class); } else { String apexControllerName = String.format("apex://%s.%sController", defDescriptor.getNamespace(), AuraTextUtil.initCap(defDescriptor.getName())); DefDescriptor<ControllerDef> apexDescriptor = DefDescriptorImpl .getInstance(apexControllerName, ControllerDef.class); if (mdr.exists(apexDescriptor)) { controllerDescriptor = apexDescriptor; } } if (controllerDescriptor != null) { builder.controllerDescriptors.add(controllerDescriptor); } String modelName = getAttributeValue(ATTRIBUTE_MODEL); if (modelName != null) { builder.modelDefDescriptor = DefDescriptorImpl.getInstance( modelName, ModelDef.class); } else { String jsModelName = String.format("js://%s.%s", defDescriptor.getNamespace(), defDescriptor.getName()); DefDescriptor<ModelDef> jsDescriptor = DefDescriptorImpl .getInstance(jsModelName, ModelDef.class); if (mdr.exists(jsDescriptor)) { builder.modelDefDescriptor = jsDescriptor; } else { String apexModelName = String.format("apex://%s.%sModel", defDescriptor.getNamespace(), AuraTextUtil.initCap(defDescriptor.getName())); DefDescriptor<ModelDef> apexDescriptor = DefDescriptorImpl .getInstance(apexModelName, ModelDef.class); if (mdr.exists(apexDescriptor)) { builder.modelDefDescriptor = apexDescriptor; } } } // See if there is a clientController that has the same qname. String jsDescriptorName = String.format("js://%s.%s", defDescriptor.getNamespace(), defDescriptor.getName()); DefDescriptor<ControllerDef> jsDescriptor = DefDescriptorImpl .getInstance(jsDescriptorName, ControllerDef.class); if (mdr.exists(jsDescriptor)) { builder.controllerDescriptors.add(jsDescriptor); } // // TODO: W-1501702 // Need to handle dual renderers for aura:placeholder // String rendererName = getAttributeValue(ATTRIBUTE_RENDERER); if (rendererName != null) { List<String> rendererNames = AuraTextUtil.splitSimpleAndTrim( rendererName, ",", 0); for (String renderer : rendererNames) { builder.addRenderer(renderer); } } else { // See if there is a clientRenderer that has the same qname. DefDescriptor<RendererDef> jsRendererDescriptor = DefDescriptorImpl .getInstance(jsDescriptorName, RendererDef.class); if (mdr.exists(jsRendererDescriptor)) { builder.addRenderer(jsRendererDescriptor.getQualifiedName()); } } String helperName = getAttributeValue(ATTRIBUTE_HELPER); if (helperName != null) { List<String> helperNames = AuraTextUtil.splitSimpleAndTrim( helperName, ",", 0); for (String helper : helperNames) { builder.addHelper(helper); } } else { // See if there is a helper that has the same qname. DefDescriptor<HelperDef> jsHelperDescriptor = DefDescriptorImpl .getInstance(jsDescriptorName, HelperDef.class); if (mdr.exists(jsHelperDescriptor)) { builder.addHelper(jsHelperDescriptor.getQualifiedName()); } } DefDescriptor<ResourceDef> jsResourceDescriptor = DefDescriptorImpl .getInstance(jsDescriptorName, ResourceDef.class); if (mdr.exists(jsResourceDescriptor)) { builder.addResource(jsResourceDescriptor.getQualifiedName()); } // See if there is a style that has the same qname. String styleName = getAttributeValue(ATTRIBUTE_STYLE); if (AuraTextUtil.isNullEmptyOrWhitespace(styleName)) { styleName = String.format("css://%s.%s", defDescriptor.getNamespace(), defDescriptor.getName()); } DefDescriptor<StyleDef> cssDescriptor = DefDescriptorImpl.getInstance( styleName, StyleDef.class); if (mdr.exists(cssDescriptor)) { builder.styleDescriptor = cssDescriptor; } DefDescriptor<ResourceDef> cssResourceDescriptor = DefDescriptorImpl.getInstance(styleName, ResourceDef.class); if (mdr.exists(cssResourceDescriptor)) { builder.addResource(cssResourceDescriptor.getQualifiedName()); } // see if there is a flavored style def that has the same qname DefDescriptor<FlavoredStyleDef> flavorDesc = Flavors.standardFlavorDescriptor(defDescriptor); if (mdr.exists(flavorDesc)) { builder.flavoredStyleDescriptor = flavorDesc; } String extendsName = getAttributeValue(ATTRIBUTE_EXTENDS); if (extendsName != null) { builder.extendsDescriptor = getDefDescriptor(extendsName, (Class<T>) defDescriptor.getDefType().getPrimaryInterface()); } String implementsNames = getAttributeValue(ATTRIBUTE_IMPLEMENTS); if (implementsNames != null) { for (String implementsName : AuraTextUtil.splitSimple(",",implementsNames)) { builder.interfaces.add(getDefDescriptor((implementsName.trim()), InterfaceDef.class)); } } builder.isAbstract = getBooleanAttributeValue(ATTRIBUTE_ABSTRACT); // if a component is abstract, it should be extensible by default if (builder.isAbstract && getAttributeValue(ATTRIBUTE_EXTENSIBLE) == null) { // JBUCH: HALO: TODO: THEN THIS SHOULD THROW AN ERROR. builder.isExtensible = true; } else { builder.isExtensible = getBooleanAttributeValue(ATTRIBUTE_EXTENSIBLE); } String providerName = getAttributeValue(ATTRIBUTE_PROVIDER); if (providerName != null) { List<String> providerNames = AuraTextUtil.splitSimpleAndTrim( providerName, ",", 0); for (String provider : providerNames) { builder.addProvider(provider); } } else { String apexProviderName = String.format("apex://%s.%sProvider", defDescriptor.getNamespace(), AuraTextUtil.initCap(defDescriptor.getName())); DefDescriptor<ProviderDef> apexDescriptor = DefDescriptorImpl .getInstance(apexProviderName, ProviderDef.class); if (mdr.exists(apexDescriptor)) { builder.addProvider(apexDescriptor.getQualifiedName()); } } String templateName = getAttributeValue(ATTRIBUTE_TEMPLATE); if (templateName != null) { builder.templateDefDescriptor = DefDescriptorImpl.getInstance( templateName, ComponentDef.class); } DefDescriptor<DocumentationDef> documentationDescriptor = DefDescriptorImpl.getAssociateDescriptor( builder.getDescriptor(), DocumentationDef.class, DefDescriptor.MARKUP_PREFIX); if (mdr.exists(documentationDescriptor)) { builder.setDocumentation(documentationDescriptor.getQualifiedName()); } DefDescriptor<DesignDef> designDescriptor = DefDescriptorImpl.getAssociateDescriptor( builder.getDescriptor(), DesignDef.class, DefDescriptor.MARKUP_PREFIX); if (mdr.exists(designDescriptor)) { builder.designDefDescriptor = designDescriptor; } DefDescriptor<SVGDef> svgDescriptor = DefDescriptorImpl.getAssociateDescriptor(builder.getDescriptor(), SVGDef.class, DefDescriptor.MARKUP_PREFIX); if (mdr.exists(svgDescriptor)) { builder.svgDefDescriptor = svgDescriptor; } builder.render = getAttributeValue(ATTRIBUTE_RENDER); String whitespaceVal = getAttributeValue(ATTRIBUTE_WHITESPACE); builder.whitespaceBehavior = whitespaceVal == null ? WhitespaceBehavior.OPTIMIZE : WhitespaceBehavior.valueOf(whitespaceVal.toUpperCase()); builder.setAccess(readAccessAttribute()); String tokenOverrides = getAttributeValue(ATTRIBUTE_TOKEN_OVERRIDES); if (!AuraTextUtil.isNullEmptyOrWhitespace(tokenOverrides)) { builder.setTokenOverrides(tokenOverrides); } // flavor overrides can only be parsed in the app handler for now-- // need to figure out a solution to bring it here (see notes there) String defaultFlavor = getAttributeValue(ATTRIBUTE_DEFAULT_FLAVOR); if (!AuraTextUtil.isNullEmptyOrWhitespace(defaultFlavor)) { builder.setDefaultFlavor(defaultFlavor); } if (getBooleanAttributeValue(ATTRIBUTE_DYNAMICALLY_FLAVORABLE)) { builder.setDynamicallyFlavorable(true); } } finally { context.popCallingDescriptor(); } } public void setRender(String val) { builder.render = val; } @Override public void setWhitespaceBehavior(WhitespaceBehavior val) { builder.whitespaceBehavior = val; } @Override public WhitespaceBehavior getWhitespaceBehavior() { return builder.whitespaceBehavior; } public SubDefDescriptor<ComponentDef, T> createSubComponentDefDescriptor( String type) { return SubDefDescriptorImpl.getInstance(type + (innerCount++), getDefDescriptor(), ComponentDef.class); } @SuppressWarnings("unchecked") public void addSubDef( SubDefDescriptor<ComponentDef, ? extends BaseComponentDef> descriptor, ComponentDef inner) { builder.addSubDef((SubDefDescriptor<ComponentDef, T>) descriptor, inner); } @Override protected T createDefinition() throws QuickFixException { if (!body.isEmpty()) { AttributeDefRefImpl.Builder atBuilder = new AttributeDefRefImpl.Builder(); atBuilder.setDescriptor(DefDescriptorImpl.getInstance(AttributeDefRefImpl.BODY_ATTRIBUTE_NAME, AttributeDef.class)); atBuilder.setLocation(getLocation()); atBuilder.setValue(body); AttributeDefRef adr = atBuilder.build(); builder.facets.add(adr); } Map<DefDescriptor<RequiredVersionDef>, RequiredVersionDef> requiredVersionDefs = readRequiredVersionDefs(defDescriptor); if(requiredVersionDefs != null) { builder.setRequiredVersionDefs(requiredVersionDefs); } return builder.build(); } @Override public void addExpressionReferences(Set<PropertyReference> propRefs) { builder.addAllExpressionRefs(propRefs); } }
package com.beef.dataorigin.web.datacommittask.dao; import java.beans.IntrospectionException; import java.lang.reflect.InvocationTargetException; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.List; import org.apache.log4j.Logger; import com.beef.dataorigin.context.data.MDBTable; import com.beef.dataorigin.web.dao.DODataDao; import com.beef.dataorigin.web.data.DODataModificationCommitTask; import com.beef.dataorigin.web.data.DODataModificationCommitTaskBundle; import com.beef.dataorigin.web.util.DOServiceMsgUtil; import com.beef.dataorigin.web.util.DOServiceUtil; import com.salama.service.clouddata.util.dao.QueryDataDao; import com.salama.service.clouddata.util.dao.UpdateDataDao; import com.salama.util.db.JDBCUtil; public class DODataModificationCommitTaskSchedulerDao { private final static Logger logger = Logger.getLogger(DODataModificationCommitTaskSchedulerDao.class); public static enum DataModificationCommitTaskModType {ModTypeInsert, ModTypeUpdate, ModTypeDelete}; public final static int TASK_MOD_TYPE_UPDATE = 0; public final static int TASK_MOD_TYPE_INSERT = 1; public final static int TASK_MOD_TYPE_DELETE = -1; public final static int TASK_COMMIT_STATUS_WAIT_TO_COMMIT = 0; public final static int TASK_COMMIT_STATUS_SUCCESS = 1; public final static int TASK_COMMIT_STATUS_FAIL = -1; public final static int TASK_BUNDLE_STATUS_WAIT_TO_START = 0; public final static int TASK_BUNDLE_STATUS_STARTED = 1; public final static int TASK_BUNDLE_STATUS_FINISHED = 2; /* private static final String SQL_FIND_TASK_BUNDLE_WAIT_TO_EXECUTE = "select " + " `table_name`, schedule_commit_time, count(1) as cnt" + " from DODataModificationCommitTask" + " where commit_status = 0" + " group by `table_name`, schedule_commit_time" + " order by schedule_commit_time limit ?, ?" ; public static List<DODataModificationCommitTaskBundle> findTaskBundleWaitToExecute(Connection conn, int maxCount) throws SQLException { PreparedStatement stmt = null; try { stmt = conn.prepareStatement(SQL_FIND_TASK_BUNDLE_WAIT_TO_EXECUTE); int index = 1; stmt.setInt(index++, 0); stmt.setInt(index++, maxCount); ResultSet rs = stmt.executeQuery(); List<DODataModificationCommitTaskBundle> dataList = new ArrayList<DODataModificationCommitTaskBundle>(); DODataModificationCommitTaskBundle data; while(rs.next()) { data = new DODataModificationCommitTaskBundle(); data.setTable_name(rs.getString("table_name")); data.setData_row_count_of_total(rs.getInt("cnt")); data.setSchedule_commit_time(rs.getLong("schedule_commit_time")); data.setData_row_count_of_did_commit(0); data.setTask_bundle_status(TASK_BUNDLE_STATUS_WAIT_TO_START); dataList.add(data); } return dataList; } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } } */ private static final String SQL_FIND_TASK_BUNDLE_WAIT_TO_EXECUTE = "select " + " *" + " from DODataModificationCommitTaskBundle" + " where task_bundle_status = 0 and schedule_commit_time <= ?" + " order by schedule_commit_time limit ?, ?" ; public static List<DODataModificationCommitTaskBundle> findTaskBundleWaitToExecute(Connection conn, int maxCount, long maxScheduleTime ) throws SQLException, IntrospectionException, IllegalAccessException, InstantiationException, InvocationTargetException { PreparedStatement stmt = null; try { stmt = conn.prepareStatement(SQL_FIND_TASK_BUNDLE_WAIT_TO_EXECUTE); int index = 1; stmt.setLong(index++, maxScheduleTime); stmt.setInt(index++, 0); stmt.setInt(index++, maxCount); ResultSet rs = stmt.executeQuery(); List<DODataModificationCommitTaskBundle> dataList = new ArrayList<DODataModificationCommitTaskBundle>(); DODataModificationCommitTaskBundle data; boolean isIgnorePropertiesNotExist = true; while(rs.next()) { data = (DODataModificationCommitTaskBundle) JDBCUtil.ResultSetToData(rs, DODataModificationCommitTaskBundle.class, isIgnorePropertiesNotExist); dataList.add(data); } return dataList; } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } } private static final String SQL_FIND_TASK_TO_EXECUTE = "select * " + " from DODataModificationCommitTask" + " where `table_name` = ? and schedule_commit_time = ? and commit_status = 0 " + " limit ?, ?" ; public static List<DODataModificationCommitTask> findTaskToExecute( Connection conn, int maxCount, String table_name, long schedule_commit_time ) throws SQLException, InstantiationException, InvocationTargetException, IllegalAccessException { PreparedStatement stmt = null; try { stmt = conn.prepareStatement(SQL_FIND_TASK_TO_EXECUTE); int index = 1; stmt.setString(index++, table_name); stmt.setLong(index++, schedule_commit_time); stmt.setInt(index++, 0); stmt.setInt(index++, maxCount); List<DODataModificationCommitTask> dataList = QueryDataDao.findData(stmt, DODataModificationCommitTask.class); return dataList; } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } } private static final String SQL_UPDATE_COMMIT_TASK_STATUS = "update `DODataModificationCommitTask` set" + " commit_time = ?, retried_count = retried_count + 1, " + " commit_status = ?, error_msg = ? " + " where `table_name` = ? and schedule_commit_time = ? and sql_primary_key = ?" ; public static int updateDataCommitTaskStatus( Connection conn, DODataModificationCommitTask dataCommitTask ) throws SQLException, InstantiationException, InvocationTargetException, IllegalAccessException { PreparedStatement stmt = null; try { stmt = conn.prepareStatement(SQL_UPDATE_COMMIT_TASK_STATUS); int index = 1; stmt.setLong(index++, dataCommitTask.getCommit_time()); stmt.setInt(index++, dataCommitTask.getCommit_status()); stmt.setString(index++, dataCommitTask.getError_msg()); stmt.setString(index++, dataCommitTask.getTable_name()); stmt.setLong(index++, dataCommitTask.getSchedule_commit_time()); stmt.setString(index++, dataCommitTask.getSql_primary_key()); return stmt.executeUpdate(); } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } } private static final String SQL_GROUP_COUNT_DATA_COMMIT_TASK = "select " + " `table_name`, schedule_commit_time, commit_status, count(1) as cnt" + " from DODataModificationCommitTask" + " where table_name = ? and schedule_commit_time = ? " + " group by `table_name`, schedule_commit_time, commit_status " ; private static final String SQL_REFRESH_DATA_COMMIT_TASK_BUNDLE = " update DODataModificationCommitTaskBundle set" + " data_row_count_of_total = ?, " + " data_row_count_of_did_commit = ?, " + " update_time = ? " + " where table_name = ? and schedule_commit_time = ?" ; public static int refreshDataCommitTaskBundle( Connection conn, String table_name, long schedule_commit_time ) throws SQLException { PreparedStatement stmt = null; int countOfWaitToCommit = 0; int countOfSuccess = 0; int countOfFail = 0; try { stmt = conn.prepareStatement(SQL_GROUP_COUNT_DATA_COMMIT_TASK); int index = 1; stmt.setString(index++, table_name); stmt.setLong(index++, schedule_commit_time); ResultSet rs = stmt.executeQuery(); int cnt; int commit_status; while(rs.next()) { commit_status = rs.getInt("commit_status"); cnt = rs.getInt("cnt"); if(commit_status == TASK_COMMIT_STATUS_WAIT_TO_COMMIT) { countOfWaitToCommit = cnt; } else if(commit_status == TASK_COMMIT_STATUS_SUCCESS) { countOfSuccess = cnt; } else if(commit_status == TASK_COMMIT_STATUS_FAIL) { countOfFail = cnt; } } } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } try { int totalDataCount = countOfWaitToCommit + countOfSuccess + countOfFail; int committedDataCount = countOfSuccess + countOfFail; long curTime = System.currentTimeMillis(); stmt = conn.prepareStatement(SQL_REFRESH_DATA_COMMIT_TASK_BUNDLE); int index = 1; stmt.setInt(index++, totalDataCount); stmt.setInt(index++, committedDataCount); stmt.setLong(index++, curTime); stmt.setString(index++, table_name); stmt.setLong(index++, schedule_commit_time); int updCnt = stmt.executeUpdate(); if(updCnt == 0) { //insert DODataModificationCommitTaskBundle data = new DODataModificationCommitTaskBundle(); data.setTable_name(table_name); data.setSchedule_commit_time(schedule_commit_time); data.setData_row_count_of_total(totalDataCount); data.setData_row_count_of_did_commit(committedDataCount); data.setCommit_start_time(0); data.setCommit_finish_time(0); data.setUpdate_time(curTime); updCnt = UpdateDataDao.insertData(conn, "DODataModificationCommitTaskBundle", data); } return updCnt; } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } } public static int createDataCommitTask( Connection conn, MDBTable mDBTable, Object data, DataModificationCommitTaskModType modType, long schedule_commit_time, String adminId ) throws SQLException, InstantiationException, InvocationTargetException, IllegalAccessException, IllegalArgumentException, IntrospectionException { String sqlPrimaryKey = DODataDao.makeSqlConditionOfPrimaryKey(conn, mDBTable.getTableName(), data); return createDataCommitTaskBySqlPK(conn, mDBTable, sqlPrimaryKey, modType, schedule_commit_time, adminId); } public static int createDataCommitTaskBySqlPK( Connection conn, MDBTable mDBTable, String sqlPrimaryKey, DataModificationCommitTaskModType modType, long schedule_commit_time, String adminId ) throws SQLException, InstantiationException, InvocationTargetException, IllegalAccessException, IllegalArgumentException, IntrospectionException { DODataModificationCommitTask dataCommitTask = new DODataModificationCommitTask(); if(modType == DataModificationCommitTaskModType.ModTypeInsert) { dataCommitTask.setMod_type(TASK_MOD_TYPE_INSERT); } else if(modType == DataModificationCommitTaskModType.ModTypeUpdate) { dataCommitTask.setMod_type(TASK_MOD_TYPE_UPDATE); } else { dataCommitTask.setMod_type(TASK_MOD_TYPE_DELETE); } dataCommitTask.setCommit_status(TASK_COMMIT_STATUS_WAIT_TO_COMMIT); dataCommitTask.setSchedule_commit_time(schedule_commit_time); dataCommitTask.setSql_primary_key(sqlPrimaryKey); dataCommitTask.setTable_name(mDBTable.getTableName()); dataCommitTask.setUpdate_admin(adminId); dataCommitTask.setUpdate_time(System.currentTimeMillis()); //insert or update to DB int updCnt = 0; try { updCnt = UpdateDataDao.insertData(conn, "DODataModificationCommitTask", dataCommitTask); } catch(SQLException sqle) { if(sqle.getClass().getSimpleName().equalsIgnoreCase("MySQLIntegrityConstraintViolationException")) { //duplicated key, then update updCnt = UpdateDataDao.updateData(conn, "DODataModificationCommitTask", dataCommitTask, new String[] {"table_name", "schedule_commit_time", "sql_primary_key"} ); } else { throw sqle; } } return updCnt; } private static final String SQL_UPDATE_DATA_COMMIT_TASK_BUNDLE_SCHEDULE_TIME = " update DODataModificationCommitTaskBundle set" + " schedule_commit_time = ?, " + " update_time = ? " + " where table_name = ? and schedule_commit_time = ? " ; public static int updateDataCommitTaskBundleScheduleTime( Connection conn, String table_name, long schedule_commit_time, long newSchedule_commit_time ) throws SQLException, InstantiationException, InvocationTargetException, IllegalAccessException, IllegalArgumentException, IntrospectionException { PreparedStatement stmt = null; try { stmt = conn.prepareStatement(SQL_UPDATE_DATA_COMMIT_TASK_BUNDLE_SCHEDULE_TIME); int index = 1; stmt.setLong(index++, newSchedule_commit_time); stmt.setLong(index++, System.currentTimeMillis()); stmt.setString(index++, table_name); stmt.setLong(index++, schedule_commit_time); return stmt.executeUpdate(); } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } } private static final String SQL_UPDATE_DATA_COMMIT_TASK_SCHEDULE_TIME = " update DODataModificationCommitTask set" + " schedule_commit_time = ?, " + " update_time = ? " + " where table_name = ? and schedule_commit_time = ? and commit_status = 0" ; public static int updateDataCommitTaskScheduleTime( Connection conn, String table_name, long schedule_commit_time, long newSchedule_commit_time ) throws SQLException, InstantiationException, InvocationTargetException, IllegalAccessException, IllegalArgumentException, IntrospectionException { PreparedStatement stmt = null; try { stmt = conn.prepareStatement(SQL_UPDATE_DATA_COMMIT_TASK_SCHEDULE_TIME); int index = 1; stmt.setLong(index++, newSchedule_commit_time); stmt.setLong(index++, System.currentTimeMillis()); stmt.setString(index++, table_name); stmt.setLong(index++, schedule_commit_time); return stmt.executeUpdate(); } finally { try { stmt.close(); } catch(Throwable e) { logger.error(null, e); } } } public static DODataModificationCommitTaskBundle copyData(DODataModificationCommitTaskBundle taskBundle) { DODataModificationCommitTaskBundle newTaskBundle = new DODataModificationCommitTaskBundle(); newTaskBundle.setCommit_finish_time(taskBundle.getCommit_finish_time()); newTaskBundle.setCommit_start_time(taskBundle.getCommit_start_time()); newTaskBundle.setData_row_count_of_did_commit(taskBundle.getData_row_count_of_did_commit()); newTaskBundle.setData_row_count_of_total(taskBundle.getData_row_count_of_total()); newTaskBundle.setSchedule_commit_time(taskBundle.getSchedule_commit_time()); newTaskBundle.setTable_name(taskBundle.getTable_name()); newTaskBundle.setTask_bundle_status(taskBundle.getTask_bundle_status()); newTaskBundle.setUpdate_time(taskBundle.getUpdate_time()); return newTaskBundle; } }
// Copyright 2016 Google, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // //////////////////////////////////////////////////////////////////////////////// package com.firebase.jobdispatcher; import static android.content.Context.BIND_AUTO_CREATE; import static com.firebase.jobdispatcher.TestUtil.encodeContentUriJob; import static com.firebase.jobdispatcher.TestUtil.encodeRecurringContentUriJob; import static com.firebase.jobdispatcher.TestUtil.getContentUriTrigger; import static junit.framework.Assert.assertEquals; import static junit.framework.Assert.assertFalse; import static junit.framework.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; import android.app.Service; import android.content.Context; import android.content.Intent; import android.net.Uri; import android.os.Binder; import android.os.Build.VERSION_CODES; import android.os.Bundle; import android.os.IBinder; import android.os.Messenger; import android.os.Parcel; import android.provider.ContactsContract; import android.provider.ContactsContract.Contacts; import android.provider.MediaStore.Images.Media; import android.support.annotation.NonNull; import com.firebase.jobdispatcher.ExecutionDelegator.JobFinishedCallback; import com.firebase.jobdispatcher.GooglePlayReceiverTest.ShadowMessenger; import com.firebase.jobdispatcher.JobInvocation.Builder; import com.firebase.jobdispatcher.TestUtil.InspectableBinder; import com.google.android.gms.gcm.PendingCallback; import java.util.ArrayList; import java.util.Arrays; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.ArgumentCaptor; import org.mockito.Captor; import org.mockito.InOrder; import org.mockito.Mock; import org.mockito.MockitoAnnotations; import org.robolectric.Robolectric; import org.robolectric.RobolectricTestRunner; import org.robolectric.annotation.Config; import org.robolectric.annotation.Implements; /** Tests for the {@link GooglePlayReceiver} class. */ @RunWith(RobolectricTestRunner.class) @Config( manifest = Config.NONE, sdk = 21, shadows = {ShadowMessenger.class}) public class GooglePlayReceiverTest { /** * The default ShadowMessenger implementation causes NPEs when using the {@link * Messenger#Messenger(Handler)} constructor. We create our own empty Shadow so we can just use * the standard Android implementation, which is totally fine. * * @see <a href="https://github.com/robolectric/robolectric/issues/2246">Robolectric issue</a> */ @Implements(Messenger.class) public static class ShadowMessenger {} private GooglePlayReceiver receiver; @Mock private Messenger messengerMock; @Mock private Context contextMock; @Mock private IBinder binderMock; @Mock private JobCallback callbackMock; @Mock private ExecutionDelegator executionDelegatorMock; @Mock private JobCallback jobCallbackMock; @Mock private JobServiceConnection jobServiceConnectionMock; @Mock private Driver driverMock; @Mock private ConstraintChecker contraintCheckerMock; @Captor private ArgumentCaptor<Job> jobArgumentCaptor; @Captor ArgumentCaptor<JobServiceConnection> jobServiceConnectionCaptor; private final Builder jobInvocationBuilder = new Builder() .setTag("tag") .setService(TestJobService.class.getName()) .setTrigger(Trigger.NOW); @Before public void setUp() throws Exception { MockitoAnnotations.initMocks(this); receiver = spy(Robolectric.buildService(GooglePlayReceiver.class).create().get()); when(contraintCheckerMock.areConstraintsSatisfied(any(JobInvocation.class))).thenReturn(true); when(receiver.getExecutionDelegator()).thenReturn(executionDelegatorMock); receiver.setGooglePlayDriver(driverMock); receiver.setValidationEnforcer(new ValidationEnforcer(new NoopJobValidator())); when(contextMock.getPackageName()).thenReturn("foo.bar.whatever"); } @After public void tearDown() { GooglePlayReceiver.clearCallbacks(); ExecutionDelegator.cleanServiceConnections(); } @Test public void onReschedule_notRunning_noException() { Job job = TestUtil.getBuilderWithNoopValidator() .setService(TestJobService.class) .setTrigger(Trigger.NOW) .setTag("TAG") .build(); GooglePlayReceiver.onSchedule(job); } @Test public void onReschedule_notRunningWrongTag_noException() { Bundle bundle = TestUtil.getBundleForContentJobExecution(); Job job = TestUtil.getBuilderWithNoopValidator() .setService(TestJobService.class) .setTrigger(Trigger.NOW) .setTag("TAG") .build(); receiver.prepareJob(jobCallbackMock, bundle); GooglePlayReceiver.onSchedule(job); } @Test public void onReschedule_stopJob() { Bundle bundle = TestUtil.getBundleForContentJobExecution(); JobCoder prefixedCoder = new JobCoder(BundleProtocol.PACKED_PARAM_BUNDLE_PREFIX); JobInvocation invocation = prefixedCoder.decodeIntentBundle(bundle); Job job = TestUtil.getBuilderWithNoopValidator() .setService(TestJobService.class) .setTrigger(invocation.getTrigger()) .setTag(invocation.getTag()) .build(); receiver.prepareJob(jobCallbackMock, bundle); when(contextMock.bindService( any(Intent.class), any(JobServiceConnection.class), eq(BIND_AUTO_CREATE))) .thenReturn(true); new ExecutionDelegator(contextMock, mock(JobFinishedCallback.class), contraintCheckerMock) .executeJob(invocation); verify(contextMock) .bindService(any(Intent.class), jobServiceConnectionCaptor.capture(), eq(BIND_AUTO_CREATE)); assertTrue(jobServiceConnectionCaptor.getValue().hasJobInvocation(invocation)); GooglePlayReceiver.onSchedule(job); assertFalse(jobServiceConnectionCaptor.getValue().hasJobInvocation(invocation)); assertNull( "JobServiceConnection should be removed.", ExecutionDelegator.getJobServiceConnection(invocation.getService())); } @Test public void onJobFinished_unknownJobCallbackIsNotPresent_ignoreNoException() { receiver.onJobFinished(jobInvocationBuilder.build(), JobService.RESULT_SUCCESS); verifyZeroInteractions(driverMock); } @Test public void onJobFinished_notRecurringContentJob_sendResult() { jobInvocationBuilder.setTrigger( Trigger.contentUriTrigger(Arrays.asList(new ObservedUri(Contacts.CONTENT_URI, 0)))); JobInvocation jobInvocation = receiver.prepareJob(callbackMock, TestUtil.getBundleForContentJobExecution()); receiver.onJobFinished(jobInvocation, JobService.RESULT_SUCCESS); verify(callbackMock).jobFinished(JobService.RESULT_SUCCESS); verifyZeroInteractions(driverMock); } @Test public void onJobFinished_successRecurringContentJob_reschedule() { JobInvocation jobInvocation = receiver.prepareJob(callbackMock, getBundleForContentJobExecutionRecurring()); receiver.onJobFinished(jobInvocation, JobService.RESULT_SUCCESS); verify(driverMock).schedule(jobArgumentCaptor.capture()); // No need to callback when job finished. // Reschedule request is treated as two events: completion of old job and scheduling of new // job with the same parameters. verifyZeroInteractions(callbackMock); Job rescheduledJob = jobArgumentCaptor.getValue(); TestUtil.assertJobsEqual(jobInvocation, rescheduledJob); } @Test public void onJobFinished_failWithRetryRecurringContentJob_sendResult() { JobInvocation jobInvocation = receiver.prepareJob(callbackMock, getBundleForContentJobExecutionRecurring()); receiver.onJobFinished(jobInvocation, JobService.RESULT_FAIL_RETRY); // If a job finishes with RESULT_FAIL_RETRY we don't need to send a reschedule request. // Rescheduling will erase previously triggered URIs. verify(callbackMock).jobFinished(JobService.RESULT_FAIL_RETRY); verifyZeroInteractions(driverMock); } @Test public void prepareJob() { Intent intent = new Intent(); Bundle encode = encodeContentUriJob(getContentUriTrigger(), TestUtil.JOB_CODER); intent.putExtra(GooglePlayJobWriter.REQUEST_PARAM_EXTRAS, encode); Parcel container = Parcel.obtain(); container.writeStrongBinder(new Binder()); PendingCallback pcb = new PendingCallback(container); intent.putExtra("callback", pcb); ArrayList<Uri> uris = new ArrayList<>(); uris.add(ContactsContract.AUTHORITY_URI); uris.add(Media.EXTERNAL_CONTENT_URI); intent.putParcelableArrayListExtra(BundleProtocol.PACKED_PARAM_TRIGGERED_URIS, uris); JobInvocation jobInvocation = receiver.prepareJob(intent); assertEquals(jobInvocation.getTriggerReason().getTriggeredContentUris(), uris); } @Test public void prepareJob_messenger() { JobInvocation jobInvocation = receiver.prepareJob(callbackMock, new Bundle()); assertNull(jobInvocation); verify(callbackMock).jobFinished(JobService.RESULT_FAIL_NORETRY); } @Test public void prepareJob_messenger_noExtras() { Bundle bundle = TestUtil.getBundleForContentJobExecution(); JobInvocation jobInvocation = receiver.prepareJob(callbackMock, bundle); assertEquals(jobInvocation.getTriggerReason().getTriggeredContentUris(), TestUtil.URIS); } @NonNull private Bundle getBundleForContentJobExecutionRecurring() { Bundle bundle = new Bundle(); Bundle encode = encodeRecurringContentUriJob(getContentUriTrigger(), TestUtil.JOB_CODER); bundle.putBundle(GooglePlayJobWriter.REQUEST_PARAM_EXTRAS, encode); bundle.putParcelableArrayList(BundleProtocol.PACKED_PARAM_TRIGGERED_URIS, TestUtil.URIS); return bundle; } @Test public void onBind() { Intent intent = new Intent(GooglePlayReceiver.ACTION_EXECUTE); IBinder binderA = receiver.onBind(intent); IBinder binderB = receiver.onBind(intent); assertEquals(binderA, binderB); } @Test public void onBind_nullIntent() { IBinder binder = receiver.onBind(null); assertNull(binder); } @Test public void onBind_wrongAction() { Intent intent = new Intent("test"); IBinder binder = receiver.onBind(intent); assertNull(binder); } @Test @Config(sdk = VERSION_CODES.KITKAT) public void onBind_wrongBuild() { Intent intent = new Intent(GooglePlayReceiver.ACTION_EXECUTE); IBinder binder = receiver.onBind(intent); assertNull(binder); } @Test public void onStartCommand_nullIntent() { assertResultWasStartNotSticky(receiver.onStartCommand(null, 0, 101)); verify(receiver).stopSelf(101); } @Test public void onStartCommand_initAction() { Intent initIntent = new Intent("com.google.android.gms.gcm.SERVICE_ACTION_INITIALIZE"); assertResultWasStartNotSticky(receiver.onStartCommand(initIntent, 0, 101)); verify(receiver).stopSelf(101); } @Test public void onStartCommand_unknownAction() { Intent unknownIntent = new Intent("com.example.foo.bar"); assertResultWasStartNotSticky(receiver.onStartCommand(unknownIntent, 0, 101)); assertResultWasStartNotSticky(receiver.onStartCommand(unknownIntent, 0, 102)); assertResultWasStartNotSticky(receiver.onStartCommand(unknownIntent, 0, 103)); InOrder inOrder = inOrder(receiver); inOrder.verify(receiver).stopSelf(101); inOrder.verify(receiver).stopSelf(102); inOrder.verify(receiver).stopSelf(103); } @Test public void onStartCommand_executeActionWithEmptyExtras() { Intent execIntent = new Intent("com.google.android.gms.gcm.ACTION_TASK_READY"); assertResultWasStartNotSticky(receiver.onStartCommand(execIntent, 0, 101)); verify(receiver).stopSelf(101); } @Test public void onStartCommand_executeAction() { JobInvocation job = new JobInvocation.Builder() .setTag("tag") .setService("com.example.foo.FooService") .setTrigger(Trigger.NOW) .setRetryStrategy(RetryStrategy.DEFAULT_EXPONENTIAL) .setLifetime(Lifetime.UNTIL_NEXT_BOOT) .setConstraints(new int[] {Constraint.DEVICE_IDLE}) .build(); Intent execIntent = new Intent("com.google.android.gms.gcm.ACTION_TASK_READY") .putExtra( "extras", new JobCoder(BundleProtocol.PACKED_PARAM_BUNDLE_PREFIX).encode(job, new Bundle())) .putExtra("callback", new InspectableBinder().toPendingCallback()); assertResultWasStartNotSticky(receiver.onStartCommand(execIntent, 0, 101)); verify(receiver, never()).stopSelf(anyInt()); verify(executionDelegatorMock).executeJob(any(JobInvocation.class)); receiver.onJobFinished(job, JobService.RESULT_SUCCESS); verify(receiver).stopSelf(101); } private static void assertResultWasStartNotSticky(int result) { assertEquals( "Result for onStartCommand wasn't START_NOT_STICKY", Service.START_NOT_STICKY, result); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.deploymentmanager.model; /** * Model definition for ResourceUpdate. * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Google Cloud Deployment Manager API. For a detailed * explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class ResourceUpdate extends com.google.api.client.json.GenericJson { /** * The Access Control Policy to set on this resource after updating the resource itself. * The value may be {@code null}. */ @com.google.api.client.util.Key private ResourceAccessControl accessControl; /** * Output only. If errors are generated during update of the resource, this field will be * populated. * The value may be {@code null}. */ @com.google.api.client.util.Key private Error error; /** * Output only. The expanded properties of the resource with reference values expanded. Returned * as serialized YAML. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String finalProperties; /** * Output only. The intent of the resource: PREVIEW, UPDATE, or CANCEL. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String intent; /** * Output only. URL of the manifest representing the update configuration of this resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String manifest; /** * Output only. The set of updated properties for this resource, before references are expanded. * Returned as serialized YAML. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String properties; /** * Output only. The state of the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String state; /** * Output only. If warning messages are generated during processing of this resource, this field * will be populated. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Warnings> warnings; static { // hack to force ProGuard to consider Warnings used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Warnings.class); } /** * The Access Control Policy to set on this resource after updating the resource itself. * @return value or {@code null} for none */ public ResourceAccessControl getAccessControl() { return accessControl; } /** * The Access Control Policy to set on this resource after updating the resource itself. * @param accessControl accessControl or {@code null} for none */ public ResourceUpdate setAccessControl(ResourceAccessControl accessControl) { this.accessControl = accessControl; return this; } /** * Output only. If errors are generated during update of the resource, this field will be * populated. * @return value or {@code null} for none */ public Error getError() { return error; } /** * Output only. If errors are generated during update of the resource, this field will be * populated. * @param error error or {@code null} for none */ public ResourceUpdate setError(Error error) { this.error = error; return this; } /** * Output only. The expanded properties of the resource with reference values expanded. Returned * as serialized YAML. * @return value or {@code null} for none */ public java.lang.String getFinalProperties() { return finalProperties; } /** * Output only. The expanded properties of the resource with reference values expanded. Returned * as serialized YAML. * @param finalProperties finalProperties or {@code null} for none */ public ResourceUpdate setFinalProperties(java.lang.String finalProperties) { this.finalProperties = finalProperties; return this; } /** * Output only. The intent of the resource: PREVIEW, UPDATE, or CANCEL. * @return value or {@code null} for none */ public java.lang.String getIntent() { return intent; } /** * Output only. The intent of the resource: PREVIEW, UPDATE, or CANCEL. * @param intent intent or {@code null} for none */ public ResourceUpdate setIntent(java.lang.String intent) { this.intent = intent; return this; } /** * Output only. URL of the manifest representing the update configuration of this resource. * @return value or {@code null} for none */ public java.lang.String getManifest() { return manifest; } /** * Output only. URL of the manifest representing the update configuration of this resource. * @param manifest manifest or {@code null} for none */ public ResourceUpdate setManifest(java.lang.String manifest) { this.manifest = manifest; return this; } /** * Output only. The set of updated properties for this resource, before references are expanded. * Returned as serialized YAML. * @return value or {@code null} for none */ public java.lang.String getProperties() { return properties; } /** * Output only. The set of updated properties for this resource, before references are expanded. * Returned as serialized YAML. * @param properties properties or {@code null} for none */ public ResourceUpdate setProperties(java.lang.String properties) { this.properties = properties; return this; } /** * Output only. The state of the resource. * @return value or {@code null} for none */ public java.lang.String getState() { return state; } /** * Output only. The state of the resource. * @param state state or {@code null} for none */ public ResourceUpdate setState(java.lang.String state) { this.state = state; return this; } /** * Output only. If warning messages are generated during processing of this resource, this field * will be populated. * @return value or {@code null} for none */ public java.util.List<Warnings> getWarnings() { return warnings; } /** * Output only. If warning messages are generated during processing of this resource, this field * will be populated. * @param warnings warnings or {@code null} for none */ public ResourceUpdate setWarnings(java.util.List<Warnings> warnings) { this.warnings = warnings; return this; } @Override public ResourceUpdate set(String fieldName, Object value) { return (ResourceUpdate) super.set(fieldName, value); } @Override public ResourceUpdate clone() { return (ResourceUpdate) super.clone(); } /** * Output only. If errors are generated during update of the resource, this field will be populated. */ public static final class Error extends com.google.api.client.json.GenericJson { /** * [Output Only] The array of errors encountered while processing this operation. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Errors> errors; static { // hack to force ProGuard to consider Errors used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Errors.class); } /** * [Output Only] The array of errors encountered while processing this operation. * @return value or {@code null} for none */ public java.util.List<Errors> getErrors() { return errors; } /** * [Output Only] The array of errors encountered while processing this operation. * @param errors errors or {@code null} for none */ public Error setErrors(java.util.List<Errors> errors) { this.errors = errors; return this; } @Override public Error set(String fieldName, Object value) { return (Error) super.set(fieldName, value); } @Override public Error clone() { return (Error) super.clone(); } /** * Model definition for ResourceUpdateErrorErrors. */ public static final class Errors extends com.google.api.client.json.GenericJson { /** * [Output Only] The error type identifier for this error. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String code; /** * [Output Only] Indicates the field in the request that caused the error. This property is * optional. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String location; /** * [Output Only] An optional, human-readable error message. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String message; /** * [Output Only] The error type identifier for this error. * @return value or {@code null} for none */ public java.lang.String getCode() { return code; } /** * [Output Only] The error type identifier for this error. * @param code code or {@code null} for none */ public Errors setCode(java.lang.String code) { this.code = code; return this; } /** * [Output Only] Indicates the field in the request that caused the error. This property is * optional. * @return value or {@code null} for none */ public java.lang.String getLocation() { return location; } /** * [Output Only] Indicates the field in the request that caused the error. This property is * optional. * @param location location or {@code null} for none */ public Errors setLocation(java.lang.String location) { this.location = location; return this; } /** * [Output Only] An optional, human-readable error message. * @return value or {@code null} for none */ public java.lang.String getMessage() { return message; } /** * [Output Only] An optional, human-readable error message. * @param message message or {@code null} for none */ public Errors setMessage(java.lang.String message) { this.message = message; return this; } @Override public Errors set(String fieldName, Object value) { return (Errors) super.set(fieldName, value); } @Override public Errors clone() { return (Errors) super.clone(); } } } /** * Model definition for ResourceUpdateWarnings. */ public static final class Warnings extends com.google.api.client.json.GenericJson { /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String code; /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<Data> data; static { // hack to force ProGuard to consider Data used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(Data.class); } /** * [Output Only] A human-readable description of the warning code. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String message; /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * @return value or {@code null} for none */ public java.lang.String getCode() { return code; } /** * [Output Only] A warning code, if applicable. For example, Compute Engine returns * NO_RESULTS_ON_PAGE if there are no results in the response. * @param code code or {@code null} for none */ public Warnings setCode(java.lang.String code) { this.code = code; return this; } /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * @return value or {@code null} for none */ public java.util.List<Data> getData() { return data; } /** * [Output Only] Metadata about this warning in key: value format. For example: "data": [ { "key": * "scope", "value": "zones/us-east1-d" } * @param data data or {@code null} for none */ public Warnings setData(java.util.List<Data> data) { this.data = data; return this; } /** * [Output Only] A human-readable description of the warning code. * @return value or {@code null} for none */ public java.lang.String getMessage() { return message; } /** * [Output Only] A human-readable description of the warning code. * @param message message or {@code null} for none */ public Warnings setMessage(java.lang.String message) { this.message = message; return this; } @Override public Warnings set(String fieldName, Object value) { return (Warnings) super.set(fieldName, value); } @Override public Warnings clone() { return (Warnings) super.clone(); } /** * Model definition for ResourceUpdateWarningsData. */ public static final class Data extends com.google.api.client.json.GenericJson { /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String key; /** * [Output Only] A warning data value corresponding to the key. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String value; /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * @return value or {@code null} for none */ public java.lang.String getKey() { return key; } /** * [Output Only] A key that provides more detail on the warning being returned. For example, for * warnings where there are no results in a list request for a particular zone, this key might be * scope and the key value might be the zone name. Other examples might be a key indicating a * deprecated resource and a suggested replacement, or a warning about invalid network settings * (for example, if an instance attempts to perform IP forwarding but is not enabled for IP * forwarding). * @param key key or {@code null} for none */ public Data setKey(java.lang.String key) { this.key = key; return this; } /** * [Output Only] A warning data value corresponding to the key. * @return value or {@code null} for none */ public java.lang.String getValue() { return value; } /** * [Output Only] A warning data value corresponding to the key. * @param value value or {@code null} for none */ public Data setValue(java.lang.String value) { this.value = value; return this; } @Override public Data set(String fieldName, Object value) { return (Data) super.set(fieldName, value); } @Override public Data clone() { return (Data) super.clone(); } } } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.externalSystem.service.project; import com.intellij.facet.Facet; import com.intellij.facet.FacetModel; import com.intellij.facet.FacetTypeId; import com.intellij.openapi.application.ReadAction; import com.intellij.openapi.externalSystem.project.ArtifactExternalDependenciesImporter; import com.intellij.openapi.externalSystem.project.PackagingModifiableModel; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootModel; import com.intellij.openapi.roots.ProjectModelExternalSource; import com.intellij.openapi.roots.libraries.Library; import com.intellij.openapi.roots.libraries.LibraryTable; import com.intellij.openapi.roots.libraries.LibraryTablesRegistrar; import com.intellij.openapi.roots.ui.configuration.FacetsProvider; import com.intellij.openapi.roots.ui.configuration.ModulesProvider; import com.intellij.packaging.artifacts.*; import com.intellij.packaging.elements.CompositePackagingElement; import com.intellij.packaging.elements.ManifestFileProvider; import com.intellij.packaging.elements.PackagingElementResolvingContext; import com.intellij.packaging.impl.artifacts.DefaultManifestFileProvider; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.util.Collection; import java.util.List; public class PackagingModifiableModelImpl implements PackagingModifiableModel { private final Project myProject; private final IdeModifiableModelsProvider myModelsProvider; private ModifiableArtifactModel myModifiableArtifactModel; private MyPackagingElementResolvingContext myPackagingElementResolvingContext; private final ArtifactExternalDependenciesImporter myArtifactExternalDependenciesImporter; public PackagingModifiableModelImpl(@NotNull Project project, @NotNull IdeModifiableModelsProvider modifiableModelsProvider) { myProject = project; myModelsProvider = modifiableModelsProvider; myArtifactExternalDependenciesImporter = new ArtifactExternalDependenciesImporterImpl(); } @Override @NotNull public ModifiableArtifactModel getModifiableArtifactModel() { if (myModifiableArtifactModel == null) { myModifiableArtifactModel = myModelsProvider instanceof IdeUIModifiableModelsProvider ? ((IdeUIModifiableModelsProvider)myModelsProvider).getModifiableArtifactModel() : doGetModifiableArtifactModel(); } return myModifiableArtifactModel; } private ModifiableArtifactModel doGetModifiableArtifactModel() { return ReadAction.compute(() -> { ArtifactManager artifactManager = ArtifactManager.getInstance(myProject); return artifactManager != null ? artifactManager.createModifiableModel() : new DummyArtifactModel(); }); } @NotNull @Override public PackagingElementResolvingContext getPackagingElementResolvingContext() { if (myPackagingElementResolvingContext == null) { myPackagingElementResolvingContext = new MyPackagingElementResolvingContext(); } return myPackagingElementResolvingContext; } @Override public ArtifactExternalDependenciesImporter getArtifactExternalDependenciesImporter() { return myArtifactExternalDependenciesImporter; } @Override public void commit() { myArtifactExternalDependenciesImporter.applyChanges(getModifiableArtifactModel(), getPackagingElementResolvingContext()); if (myModifiableArtifactModel != null) { myModifiableArtifactModel.commit(); } } @Override public void dispose() { if (myModifiableArtifactModel != null) { myModifiableArtifactModel.dispose(); } } private class MyPackagingElementResolvingContext implements PackagingElementResolvingContext { private final ModulesProvider myModulesProvider = new MyModulesProvider(); private final MyFacetsProvider myFacetsProvider = new MyFacetsProvider(); private final ManifestFileProvider myManifestFileProvider = new DefaultManifestFileProvider(this); @Override @NotNull public Project getProject() { return myProject; } @Override @NotNull public ArtifactModel getArtifactModel() { return PackagingModifiableModelImpl.this.getModifiableArtifactModel(); } @Override @NotNull public ModulesProvider getModulesProvider() { return myModulesProvider; } @Override @NotNull public FacetsProvider getFacetsProvider() { return myFacetsProvider; } @Override public Library findLibrary(@NotNull String level, @NotNull String libraryName) { if (level.equals(LibraryTablesRegistrar.PROJECT_LEVEL)) { return myModelsProvider.getLibraryByName(libraryName); } final LibraryTable table = LibraryTablesRegistrar.getInstance().getLibraryTableByLevel(level, myProject); return table != null ? table.getLibraryByName(libraryName) : null; } @NotNull @Override public ManifestFileProvider getManifestFileProvider() { return myManifestFileProvider; } } private class MyModulesProvider implements ModulesProvider { @Override public Module @NotNull [] getModules() { return myModelsProvider.getModules(); } @Override public Module getModule(@NotNull String name) { return myModelsProvider.findIdeModule(name); } @Override public ModuleRootModel getRootModel(@NotNull Module module) { return myModelsProvider.getModifiableRootModel(module); } @NotNull @Override public FacetModel getFacetModel(@NotNull Module module) { return myModelsProvider.getModifiableFacetModel(module); } } private class MyFacetsProvider implements FacetsProvider { @Override public Facet @NotNull [] getAllFacets(Module module) { return myModelsProvider.getModifiableFacetModel(module).getAllFacets(); } @Override @NotNull public <F extends Facet> Collection<F> getFacetsByType(Module module, FacetTypeId<F> type) { return myModelsProvider.getModifiableFacetModel(module).getFacetsByType(type); } @Override public <F extends Facet> F findFacet(Module module, FacetTypeId<F> type, String name) { return myModelsProvider.getModifiableFacetModel(module).findFacet(type, name); } } private static class DummyArtifactModel implements ModifiableArtifactModel { @NotNull @Override public ModifiableArtifact addArtifact(@NotNull String name, @NotNull ArtifactType artifactType) { throw new UnsupportedOperationException(); } @NotNull @Override public ModifiableArtifact addArtifact(@NotNull String name, @NotNull ArtifactType artifactType, CompositePackagingElement<?> rootElement) { throw new UnsupportedOperationException(); } @NotNull @Override public ModifiableArtifact addArtifact(@NotNull String name, @NotNull ArtifactType artifactType, CompositePackagingElement<?> rootElement, @Nullable ProjectModelExternalSource externalSource) { throw new UnsupportedOperationException(); } @Override public void removeArtifact(@NotNull Artifact artifact) { } @NotNull @Override public ModifiableArtifact getOrCreateModifiableArtifact(@NotNull Artifact artifact) { throw new UnsupportedOperationException(); } @Nullable @Override public Artifact getModifiableCopy(@NotNull Artifact artifact) { return null; } @Override public void addListener(@NotNull ArtifactListener listener) { } @Override public void removeListener(@NotNull ArtifactListener listener) { } @Override public boolean isModified() { return false; } @Override public void commit() { } @Override public void dispose() { } @Override public Artifact @NotNull [] getArtifacts() { return new Artifact[0]; } @Nullable @Override public Artifact findArtifact(@NotNull String name) { return null; } @NotNull @Override public Artifact getArtifactByOriginal(@NotNull Artifact artifact) { throw new UnsupportedOperationException(); } @NotNull @Override public Artifact getOriginalArtifact(@NotNull Artifact artifact) { throw new UnsupportedOperationException(); } @NotNull @Override public Collection<? extends Artifact> getArtifactsByType(@NotNull ArtifactType type) { throw new UnsupportedOperationException(); } @Override public List<? extends Artifact> getAllArtifactsIncludingInvalid() { throw new UnsupportedOperationException(); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.wicket.util.parse.metapattern; import java.util.Arrays; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.wicket.IClusterable; /** * Useful class for constructing readable and reusable regular expressions. * <p> * MetaPatterns can be constructed from a simple regular expression String, from other MetaPatterns * (copy constructor), from a list of MetaPatterns or from an array of MetaPatterns. In this way, it * is easy to build up larger patterns while transparently binding the capturing groups of each * MetaPattern for easy object oriented access to capturing group matches. * <p> * A given MetaPattern can be converted to a Matcher or Pattern. Groups within the MetaPattern can * be used to automatically reference capturing group values when a match is made with a Matcher * object. * <p> * A variety of static constants are provided for use in constructing compound MetaPatterns. Also, a * number of simple parsers have been constructed using MetaPatterns in the parsers subpackage. * * @author Jonathan Locke */ public class MetaPattern implements IClusterable { private static final long serialVersionUID = 1L; /** * Compiled regular expression pattern, or null if patterns variable is valid instead */ private Pattern pattern; /** List of patterns, or null if pattern variable is valid instead */ private List<MetaPattern> patterns; /** The compiled MetaPattern */ private Pattern compiledPattern; // Regexps that are used multiple times in defining meta patterns private static final String _DOUBLE_QUOTED_STRING = "\"[^\"]*?\""; private static final String _SINGLE_QUOTED_STRING = "'[^']*?\'"; private static final String _STRING = "(?:[\\w\\-\\.]+|" + _DOUBLE_QUOTED_STRING + "|" + _SINGLE_QUOTED_STRING + ")"; private static final String _OPTIONAL_STRING = _STRING + "?"; private static final String _VARIABLE_NAME = "[A-Za-z_][A-Za-z0-9_]*"; private static final String _XML_NAME = "[A-Za-z_][A-Za-z0-9_.-]*"; // Delimiters and punctuation /** Constant for whitespace. */ public static final MetaPattern WHITESPACE = new MetaPattern("\\s+"); /** Constant for optional whitespace. */ public static final MetaPattern OPTIONAL_WHITESPACE = new MetaPattern("\\s*"); /** Constant for non-word. */ public static final MetaPattern NON_WORD = new MetaPattern("\\W+"); /** Constant for comma. */ public static final MetaPattern COMMA = new MetaPattern(","); /** Constant for colon. */ public static final MetaPattern COLON = new MetaPattern(":"); /** Constant for semicolon. */ public static final MetaPattern SEMICOLON = new MetaPattern(";"); /** Constant for slash. */ public static final MetaPattern SLASH = new MetaPattern("/"); /** Constant for backslash. */ public static final MetaPattern BACKSLASH = new MetaPattern("\\\\"); /** Constant for dot. */ public static final MetaPattern DOT = new MetaPattern("\\."); /** Constant for plus. */ public static final MetaPattern PLUS = new MetaPattern("\\+"); /** Constant for minus. */ public static final MetaPattern MINUS = new MetaPattern("-"); /** Constant for dash. */ public static final MetaPattern DASH = new MetaPattern("-"); /** Constant for underscore. */ public static final MetaPattern UNDERSCORE = new MetaPattern("_"); /** Constant for ampersand. */ public static final MetaPattern AMPERSAND = new MetaPattern("&"); /** Constant for percent. */ public static final MetaPattern PERCENT = new MetaPattern("%"); /** Constant for dollar. */ public static final MetaPattern DOLLAR_SIGN = new MetaPattern("$"); /** Constant for pound. */ public static final MetaPattern POUND_SIGN = new MetaPattern("#"); /** Constant for at. */ public static final MetaPattern AT_SIGN = new MetaPattern("@"); /** Constant for excl. */ public static final MetaPattern EXCLAMATION_POINT = new MetaPattern("!"); /** Constant for tilde. */ public static final MetaPattern TILDE = new MetaPattern("~"); /** Constant for equals. */ public static final MetaPattern EQUALS = new MetaPattern("="); /** Constant for star. */ public static final MetaPattern STAR = new MetaPattern("\\*"); /** Constant for pipe. */ public static final MetaPattern PIPE = new MetaPattern("\\|"); /** Constant for left paren. */ public static final MetaPattern LEFT_PAREN = new MetaPattern("\\("); /** Constant for right paren. */ public static final MetaPattern RIGHT_PAREN = new MetaPattern("\\)"); /** Constant for left curly braces. */ public static final MetaPattern LEFT_CURLY = new MetaPattern("\\{"); /** Constant for right curly braces. */ public static final MetaPattern RIGHT_CURLY = new MetaPattern("\\}"); /** Constant for left square bracket. */ public static final MetaPattern LEFT_SQUARE = new MetaPattern("\\["); /** Constant for right square bracket. */ public static final MetaPattern RIGHT_SQUARE = new MetaPattern("\\]"); /** Constant for digit. */ public static final MetaPattern DIGIT = new MetaPattern("\\d"); /** Constant for digits. */ public static final MetaPattern DIGITS = new MetaPattern("\\d+"); /** Constant for an integer (of any size). */ public static final MetaPattern INTEGER = new MetaPattern("-?\\d+"); /** Constant for a floating point number. */ public static final MetaPattern FLOATING_POINT_NUMBER = new MetaPattern( "-?\\d+\\.?\\d*|-?\\.\\d+"); /** Constant for a positive integer. */ public static final MetaPattern POSITIVE_INTEGER = new MetaPattern("\\d+"); /** Constant for hex digit. */ public static final MetaPattern HEXADECIMAL_DIGIT = new MetaPattern("[0-9a-fA-F]"); /** Constant for hex digits. */ public static final MetaPattern HEXADECIMAL_DIGITS = new MetaPattern("[0-9a-fA-F]+"); /** Constant for anything (string). */ public static final MetaPattern ANYTHING = new MetaPattern(".*"); /** Constant for anything non-empty (string). */ public static final MetaPattern ANYTHING_NON_EMPTY = new MetaPattern(".+"); /** Constant for a word. */ public static final MetaPattern WORD = new MetaPattern("\\w+"); /** Constant for an optional word. */ public static final MetaPattern OPTIONAL_WORD = new MetaPattern("\\w*"); /** Constant for a variable name. */ public static final MetaPattern VARIABLE_NAME = new MetaPattern(_VARIABLE_NAME); /** Constant for an XML element name. */ public static final MetaPattern XML_ELEMENT_NAME = new MetaPattern(_XML_NAME); /** Constant for an XML attribute name. */ public static final MetaPattern XML_ATTRIBUTE_NAME = new MetaPattern(_XML_NAME); /** Constant for perl interpolation. */ public static final MetaPattern PERL_INTERPOLATION = new MetaPattern("$\\{" + _VARIABLE_NAME + "\\}"); /** Constant for a double quoted string. */ public static final MetaPattern DOUBLE_QUOTED_STRING = new MetaPattern(_DOUBLE_QUOTED_STRING); /** Constant for a string. */ public static final MetaPattern STRING = new MetaPattern(_STRING); /** Constant for an optional string. */ public static final MetaPattern OPTIONAL_STRING = new MetaPattern(_OPTIONAL_STRING); /** * Constructor for a simple pattern. * * @param pattern * The regular expression pattern to compile */ public MetaPattern(final String pattern) { this.pattern = Pattern.compile(pattern); } /** * Copy constructor. * * @param pattern * The meta pattern to copy */ public MetaPattern(final MetaPattern pattern) { this.pattern = pattern.pattern; patterns = pattern.patterns; compiledPattern = pattern.compiledPattern; } /** * Constructs from an array of MetaPatterns. * * @param patterns * Array of MetaPatterns */ public MetaPattern(final MetaPattern[] patterns) { this(Arrays.asList(patterns)); } /** * Constructs from a list of MetaPatterns * * @param patterns * List of MetaPatterns */ public MetaPattern(final List<MetaPattern> patterns) { this.patterns = patterns; } /** * Creates a matcher against a given input character sequence. * * @param input * The input to match against * @return The matcher */ public final Matcher matcher(final CharSequence input) { return matcher(input, 0); } /** * Creates a matcher with the given regexp compile flags. Once you call this method with a given * regexp compile flag value, the pattern will be compiled. Calling it again with a different * value for flags will not recompile the pattern. * * @param input * The input to match * @param flags * One or more of the standard Java regular expression compile flags (see * {@link Pattern#compile(String, int)}) * @return The matcher */ public final Matcher matcher(final CharSequence input, final int flags) { compile(flags); return compiledPattern.matcher(input); } /** * Gets the regular expression Pattern for this MetaPattern by compiling it. * * @return Pattern compiled with default Java regular expression compile flags */ public final Pattern pattern() { return pattern(0); } /** * Gets the regular expression Pattern for this MetaPattern by compiling it using the given * flags. * * @param flags * One or more of the standard Java regular expression compile flags (see * {@link Pattern#compile(String, int)}) * @return Equivalent Java regular expression Pattern compiled with the given flags */ public final Pattern pattern(final int flags) { compile(flags); return compiledPattern; } /** * Converts this MetaPattern to a String. * * @return A String representing this MetaPattern * @see java.lang.Object#toString() */ @Override public String toString() { if (pattern != null) { return pattern.pattern(); } else { final StringBuffer buffer = new StringBuffer(); for (int i = 0; i < patterns.size(); i++) { buffer.append(patterns.get(i)); } return buffer.toString(); } } /** * Compiles this MetaPattern with the given Java regular expression flags. * * @param flags * One or more of the standard Java regular expression compile flags (see * {@link Pattern#compile(String, int)}) */ private synchronized void compile(final int flags) { if (compiledPattern == null) { bind(1); compiledPattern = Pattern.compile(toString(), flags); } } /** * Binds this MetaPattern to one or more capturing groups. Since MetaPatterns can nest, the * binding process can recurse. * * @param group * The initial capturing group number * @return The final capturing group (for use in recursion) */ private int bind(int group) { if (this instanceof Group) { ((Group)this).bind(group++); } if (patterns != null) { for (int i = 0; i < patterns.size(); i++) { group = (patterns.get(i)).bind(group); } } return group; } }
package com.citruspay.sample; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.content.SharedPreferences; import android.graphics.Color; import android.os.Bundle; import android.os.Handler; import android.support.design.widget.TextInputLayout; import android.support.v4.content.ContextCompat; import android.support.v7.app.AlertDialog; import android.support.v7.widget.AppCompatRadioButton; import android.support.v7.widget.SwitchCompat; import android.support.v7.widget.Toolbar; import android.text.Editable; import android.text.TextUtils; import android.text.TextWatcher; import android.util.Log; import android.view.View; import android.widget.EditText; import android.widget.RadioGroup; import android.widget.TextView; import android.widget.Toast; import com.citrus.sdk.Callback; import com.citrus.sdk.CitrusClient; import com.citrus.sdk.CitrusUser; import com.citrus.sdk.TransactionResponse; import com.citrus.sdk.logger.CitrusLogger; import com.citrus.sdk.response.CitrusError; import com.citrus.sdk.response.CitrusResponse; import com.citruspay.sdkui.ui.utils.CitrusFlowManager; import com.citruspay.sdkui.ui.utils.PPConfig; import com.citruspay.sdkui.ui.utils.ResultModel; import java.util.regex.Matcher; import java.util.regex.Pattern; public class MainActivity extends com.citruspay.sample.BaseActivity implements View.OnClickListener { public static final String returnUrlLoadMoney = "https://salty-plateau-1529.herokuapp" + ".com/redirectUrlLoadCash.php"; public static final String TAG = "MainActivity"; private static final String USER_EMAIL = "user_email"; private static final String USER_MOBILE = "user_mobile"; private static String USER_DETAILS = "user_details"; private static final String PHONE_PATTERN = "^[987]\\d{9}$"; private static final long MENU_DELAY = 300; public static String dummyAmount = "5"; String userMobile, userEmail; private TextView logoutBtn; SharedPreferences settings; SharedPreferences.Editor editor; SharedPreferences userDetailsPreference; EditText email_et, mobile_et, amount_et; TextInputLayout email_til, mobile_til; RadioGroup radioGroup_color_theme, radioGroup_select_env; SwitchCompat switch_disable_wallet, switch_disable_netBanks, switch_disable_cards; int style = -1; private boolean isOverrideResultScreen; private AppCompatRadioButton radio_btn_default; private AppPreference app_stored_pref; private AppCompatRadioButton radio_btn_theme_purple, radio_btn_theme_pink, radio_btn_theme_green, radio_btn_theme_grey; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); app_stored_pref = new AppPreference(); Toolbar toolbar = (Toolbar) findViewById(R.id.custom_toolbar); setSupportActionBar(toolbar); toolbar.setTitleTextColor(Color.WHITE); toolbar.setTitle(getString(R.string.app_name)); settings = getSharedPreferences("settings", MODE_PRIVATE); logoutBtn = (TextView) findViewById(R.id.logout_button); email_et = (EditText) findViewById(R.id.email_et); mobile_et = (EditText) findViewById(R.id.mobile_et); amount_et = (EditText) findViewById(R.id.amount_et); email_til = (TextInputLayout) findViewById(R.id.email_til); mobile_til = (TextInputLayout) findViewById(R.id.mobile_til); radioGroup_color_theme = (RadioGroup) findViewById(R.id.radio_grp_color_theme); radio_btn_default = (AppCompatRadioButton) findViewById(R.id.radio_btn_theme_default); radio_btn_theme_pink = (AppCompatRadioButton) findViewById(R.id.radio_btn_theme_pink); radio_btn_theme_purple = (AppCompatRadioButton) findViewById(R.id.radio_btn_theme_purple); radio_btn_theme_green = (AppCompatRadioButton) findViewById(R.id.radio_btn_theme_green); radio_btn_theme_grey = (AppCompatRadioButton) findViewById(R.id.radio_btn_theme_grey); switch_disable_wallet = (SwitchCompat) findViewById(R.id.switch_disable_wallet); switch_disable_netBanks = (SwitchCompat) findViewById(R.id.switch_disable_netbanks); switch_disable_cards = (SwitchCompat) findViewById(R.id.switch_disable_cards); AppCompatRadioButton radio_btn_sandbox = (AppCompatRadioButton) findViewById(R.id.radio_btn_sandbox); AppCompatRadioButton radio_btn_production = (AppCompatRadioButton) findViewById(R.id.radio_btn_production); radioGroup_select_env = (RadioGroup) findViewById(R.id.radio_grp_env); initListeners(); //Set Up SharedPref setUpUserDetails(); if (settings.getBoolean("is_prod_env", false)) { ((com.citruspay.sample.BaseApplication) getApplication()).setAppEnvironment(com.citruspay.sample.AppEnvironment.PRODUCTION); radio_btn_production.setChecked(true); } else { ((com.citruspay.sample.BaseApplication) getApplication()).setAppEnvironment(com.citruspay.sample.AppEnvironment.SANDBOX); radio_btn_sandbox.setChecked(true); } setupCitrusConfigs(); } private void setUpUserDetails() { userDetailsPreference = getSharedPreferences(USER_DETAILS, MODE_PRIVATE); userEmail = userDetailsPreference.getString(USER_EMAIL, AppPreference.dummyEmail); userMobile = userDetailsPreference.getString(USER_MOBILE, AppPreference.dummyMobile); email_et.setText(userEmail); mobile_et.setText(userMobile); amount_et.setText(dummyAmount); restoreAppPref(); } private void restoreAppPref() { //Set Up Disable Options switch_disable_wallet.setChecked(app_stored_pref.isDisableWallet()); switch_disable_cards.setChecked(app_stored_pref.isDisableSavedCards()); switch_disable_netBanks.setChecked(app_stored_pref.isDisableNetBanking()); //Set Up saved theme pref switch (app_stored_pref.getSelectedTheme()) { case -1: radio_btn_default.setChecked(true); break; case R.style.AppTheme_pink: radio_btn_theme_pink.setChecked(true); break; case R.style.AppTheme_Grey: radio_btn_theme_grey.setChecked(true); break; case R.style.AppTheme_purple: radio_btn_theme_purple.setChecked(true); break; case R.style.AppTheme_Green: radio_btn_theme_green.setChecked(true); break; default: radio_btn_default.setChecked(true); break; } } @Override protected void onResume() { super.onResume(); if (!isFinishing()) { initViews(); } } private void initViews() { try { if (CitrusClient.getInstance(this) != null) { CitrusClient.getInstance(this).isUserSignedIn(new Callback<Boolean>() { @Override public void success(Boolean aBoolean) { if (aBoolean) { logoutBtn.setVisibility(View.VISIBLE); } else { logoutBtn.setVisibility(View.GONE); } } @Override public void error(CitrusError citrusError) { CitrusLogger.d("IsUserSignedInError", citrusError); } }); } } catch (Exception e) { e.printStackTrace(); logoutBtn.setVisibility(View.GONE); } } private void selectProdEnv() { new Handler(getMainLooper()).postDelayed(() -> { ((com.citruspay.sample.BaseApplication) getApplication()).setAppEnvironment(com.citruspay.sample.AppEnvironment.PRODUCTION); editor = settings.edit(); editor.putBoolean("is_prod_env", true); editor.apply(); logoutBtn.setVisibility(View.GONE); CitrusClient.getInstance(MainActivity.this).signOut(new Callback<CitrusResponse>() { @Override public void success(CitrusResponse citrusResponse) { } @Override public void error(CitrusError error) { } }); CitrusClient.getInstance(MainActivity.this).destroy(); setupCitrusConfigs(); }, MENU_DELAY); } private void selectSandBoxEnv() { new Handler(getMainLooper()).postDelayed(() -> { ((com.citruspay.sample.BaseApplication) getApplication()).setAppEnvironment(com.citruspay.sample.AppEnvironment.SANDBOX); editor = settings.edit(); editor.putBoolean("is_prod_env", false); editor.apply(); logoutBtn.setVisibility(View.GONE); CitrusClient.getInstance(MainActivity.this).signOut(new Callback<CitrusResponse>() { @Override public void success(CitrusResponse citrusResponse) { } @Override public void error(CitrusError error) { } }); CitrusClient.getInstance(MainActivity.this).destroy(); setupCitrusConfigs(); }, MENU_DELAY); } private void setupCitrusConfigs() { com.citruspay.sample.AppEnvironment appEnvironment = ((com.citruspay.sample.BaseApplication) getApplication()).getAppEnvironment(); if (appEnvironment == com.citruspay.sample.AppEnvironment.PRODUCTION) { Toast.makeText(MainActivity.this, "Environment Set to Production", Toast.LENGTH_SHORT).show(); } else { Toast.makeText(MainActivity.this, "Environment Set to SandBox", Toast.LENGTH_SHORT).show(); } CitrusFlowManager.initCitrusConfig(appEnvironment.getSignUpId(), appEnvironment.getSignUpSecret(), appEnvironment.getSignInId(), appEnvironment.getSignInSecret(), ContextCompat.getColor(this, R.color.white), MainActivity.this, appEnvironment.getEnvironment(), appEnvironment.getVanity(), appEnvironment.getBillUrl(), returnUrlLoadMoney); //To Set the Log Level of Core SDK & Plug & Play PPConfig.getInstance().setLogLevel(this, CitrusLogger.LogLevel.DEBUG); //To Set the User details CitrusUser.Address customAddress = new CitrusUser.Address("Street1", "Street2", "City", "State", "Country", "411045"); PPConfig.getInstance().setUserDetails("Custom_FName", "Custom_LName", customAddress); } @Override protected int getLayoutResource() { return R.layout.activity_main; } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); Log.d("MainActivity", "request code " + requestCode + " resultcode " + resultCode); if (requestCode == CitrusFlowManager.REQUEST_CODE_PAYMENT && resultCode == RESULT_OK && data != null) { // You will get data here if transaction flow is started through pay options other than wallet TransactionResponse transactionResponse = data.getParcelableExtra(CitrusFlowManager .INTENT_EXTRA_TRANSACTION_RESPONSE); // You will get data here if transaction flow is started through wallet ResultModel resultModel = data.getParcelableExtra(CitrusFlowManager.ARG_RESULT); // Check which object is non-null if (transactionResponse != null && transactionResponse.getJsonResponse() != null) { // Decide what to do with this data Log.d(TAG, "Transaction response : " + transactionResponse.getJsonResponse()); } else if (resultModel != null && resultModel.getTransactionResponse() != null) { // Decide what to do with this data Log.d(TAG, "Result response : " + resultModel.getTransactionResponse().getTransactionId()); } else if (resultModel != null && resultModel.getError() != null) { Log.d(TAG, "Error response : " + resultModel.getError().getTransactionResponse()); } else { Log.d(TAG, "Both objects are null!"); } } } @Override public void onClick(View v) { userEmail = email_et.getText().toString().trim(); userMobile = mobile_et.getText().toString().trim(); if (v.getId() == R.id.logout_button || validateDetails(userEmail, userMobile)) { switch (v.getId()) { case R.id.quick_pay: startShoppingFlow(); break; case R.id.wallet_button: startWalletFlow(); break; case R.id.logout_button: logoutUser(); break; } } } private void startShoppingFlow() { CitrusClient.getInstance(this).isUserSignedIn(new Callback<Boolean>() { @Override public void success(Boolean success) { if (success) { if (isDifferentUser(userMobile)) { showUserLoggedInDialog(true); } else { initQuickPayFlow(); } } else { initQuickPayFlow(); } } @Override public void error(CitrusError error) { } }); } private void initQuickPayFlow() { saveUserDetails(userEmail, userMobile); if (isOverrideResultScreen) { Toast.makeText(MainActivity.this, "Result Screen will Override", Toast.LENGTH_SHORT).show(); } if (style != -1) { CitrusFlowManager.startShoppingFlowStyle(MainActivity.this, userEmail, userMobile, TextUtils.isEmpty(amount_et.getText().toString()) ? dummyAmount : amount_et.getText().toString(), style, isOverrideResultScreen); } else { CitrusFlowManager.startShoppingFlowStyle(MainActivity.this, userEmail, userMobile, TextUtils.isEmpty(amount_et.getText().toString()) ? dummyAmount : amount_et.getText().toString(), R.style.AppTheme_default, isOverrideResultScreen); } } public void startWalletFlow() { CitrusClient.getInstance(this).isUserSignedIn(new Callback<Boolean>() { @Override public void success(Boolean success) { if (success) { if (isDifferentUser(userMobile)) { showUserLoggedInDialog(false); } else { initWalletFlow(); } } else { initWalletFlow(); } } @Override public void error(CitrusError error) { } }); } private void initWalletFlow() { saveUserDetails(userEmail, userMobile); if (style != -1) { CitrusFlowManager.startWalletFlowStyle(MainActivity.this, userEmail, userMobile, style); } else { CitrusFlowManager.startWalletFlow(MainActivity.this, userEmail, userMobile); } } private void storeAppPref() { app_stored_pref.setDisableWallet(switch_disable_wallet.isChecked()); app_stored_pref.setDisableSavedCards(switch_disable_cards.isChecked()); app_stored_pref.setDisableNetBanking(switch_disable_netBanks.isChecked()); app_stored_pref.setSelectedTheme(style); } private void logoutUser() { logoutUser(MainActivity.this); logoutBtn.setVisibility(View.GONE); } private void initListeners() { email_et.addTextChangedListener(new EditTextInputWatcher(email_til)); mobile_et.addTextChangedListener(new EditTextInputWatcher(mobile_til)); radioGroup_color_theme.setOnCheckedChangeListener((group, checkedId) -> { switch (checkedId) { case R.id.radio_btn_theme_default: style = -1; isOverrideResultScreen = false; break; case R.id.radio_btn_theme_pink: style = R.style.AppTheme_pink; isOverrideResultScreen = true; break; case R.id.radio_btn_theme_grey: style = R.style.AppTheme_Grey; isOverrideResultScreen = false; break; case R.id.radio_btn_theme_purple: style = R.style.AppTheme_purple; isOverrideResultScreen = false; break; case R.id.radio_btn_theme_green: style = R.style.AppTheme_Green; isOverrideResultScreen = false; break; default: style = -1; isOverrideResultScreen = false; break; } }); radioGroup_select_env.setOnCheckedChangeListener((group, checkedId) -> { switch (checkedId) { case R.id.radio_btn_sandbox: selectSandBoxEnv(); break; case R.id.radio_btn_production: selectProdEnv(); break; } }); switch_disable_cards.setOnCheckedChangeListener((buttonView, isChecked) -> { PPConfig.getInstance().disableSavedCards(isChecked); }); switch_disable_netBanks.setOnCheckedChangeListener((buttonView, isChecked) -> { PPConfig.getInstance().disableNetBanking(isChecked); }); switch_disable_wallet.setOnCheckedChangeListener((buttonView, isChecked) -> { PPConfig.getInstance().disableWallet(isChecked); }); } public static class EditTextInputWatcher implements TextWatcher { private TextInputLayout textInputLayout; EditTextInputWatcher(TextInputLayout textInputLayout) { this.textInputLayout = textInputLayout; } @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { if (s.toString().length() > 0) { textInputLayout.setError(null); textInputLayout.setErrorEnabled(false); } } } public boolean validateDetails(String email, String mobile) { email = email.trim(); mobile = mobile.trim(); if (TextUtils.isEmpty(mobile)) { setErrorInputLayout(mobile_et, getString(R.string.err_phone_empty), mobile_til); return false; } else if (!isValidPhone(mobile)) { setErrorInputLayout(mobile_et, getString(R.string.err_phone_not_valid), mobile_til); return false; } else if (TextUtils.isEmpty(email)) { setErrorInputLayout(email_et, getString(R.string.err_email_empty), email_til); return false; } else if (!isValidEmail(email)) { setErrorInputLayout(email_et, getString(R.string.email_not_valid), email_til); return false; } else { return true; } } public static void setErrorInputLayout(EditText editText, String msg, TextInputLayout textInputLayout) { textInputLayout.setError(msg); editText.requestFocus(); } public static boolean isValidEmail(String strEmail) { return strEmail != null && android.util.Patterns.EMAIL_ADDRESS.matcher(strEmail).matches(); } public static boolean isValidPhone(String phone) { Pattern pattern = Pattern.compile(PHONE_PATTERN); Matcher matcher = pattern.matcher(phone); return matcher.matches(); } private void showUserLoggedInDialog(boolean isQuickPayFlow) { AlertDialog.Builder alertDialog = new AlertDialog.Builder(this); alertDialog.setTitle("Already logged in."); alertDialog.setMessage("Different User is already logged in\n do you want to logout ?"); alertDialog.setPositiveButton("Yes", (dialog, which) -> { logoutUser(); if (isQuickPayFlow) { initQuickPayFlow(); } else { initWalletFlow(); } }); alertDialog.setNegativeButton("No", (dialog, which) -> CitrusFlowManager.startWalletFlow(MainActivity.this, userEmail, userMobile)); alertDialog.show(); } public boolean isDifferentUser(String userMobile) { String savedUserMobile = userDetailsPreference.getString(USER_MOBILE, ""); return !savedUserMobile.equals(userMobile); } private void saveUserDetails(String email, String mobile) { SharedPreferences.Editor userDetailsEditor = userDetailsPreference.edit(); userDetailsEditor.putString(USER_EMAIL, email); userDetailsEditor.putString(USER_MOBILE, mobile); userDetailsEditor.apply(); storeAppPref(); } public void logoutUser(final Context context) { final ProgressDialog mProgressDialog = new ProgressDialog(context); mProgressDialog.setCanceledOnTouchOutside(false); mProgressDialog.setCancelable(false); mProgressDialog.setMessage("Logging user out..."); mProgressDialog.show(); CitrusClient.getInstance(context).signOut(new Callback<CitrusResponse>() { @Override public void success(CitrusResponse citrusResponse) { mProgressDialog.dismiss(); Toast.makeText(context, "User is successfully logged out", Toast.LENGTH_SHORT) .show(); } @Override public void error(CitrusError error) { mProgressDialog.dismiss(); Toast.makeText(context, "User could not be logged out", Toast.LENGTH_SHORT).show(); } }); } }
/* * Copyright 1999-2004 Carnegie Mellon University. * Portions Copyright 2002-2004 Sun Microsystems, Inc. * Portions Copyright 2002-2004 Mitsubishi Electric Research Laboratories. * All Rights Reserved. Use is subject to license terms. * * See the file "license.terms" for information on usage and * redistribution of this file, and for a DISCLAIMER OF ALL * WARRANTIES. * */ package edu.cmu.sphinx.tools.audio; import edu.cmu.sphinx.frontend.Data; import edu.cmu.sphinx.frontend.DataEndSignal; import edu.cmu.sphinx.frontend.DoubleData; import edu.cmu.sphinx.frontend.FloatData; import edu.cmu.sphinx.frontend.FrontEnd; import edu.cmu.sphinx.frontend.util.StreamDataSource; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import java.awt.*; import java.awt.image.BufferedImage; import java.awt.image.FilteredImageSource; import java.awt.image.ImageFilter; import java.awt.image.ReplicateScaleFilter; import java.util.ArrayList; import java.util.Arrays; /** Converts a set of log magnitude Spectrum data into a graphical representation. */ public class CepstrumPanel extends JPanel { /** Where the spectrogram will live. */ protected BufferedImage spectrogram; /** A scaled version of the spectrogram image. */ protected Image scaledSpectrogram; /** The zooming factor. */ protected float zoom = 1.0f; /** Offset factor - what will be subtracted from the image to adjust for noise level. */ protected double offsetFactor; /** The audio data. */ protected AudioData audio; /** The frontEnd (the source of features */ protected FrontEnd frontEnd; /** The source of audio (the first stage of the frontend) */ protected StreamDataSource dataSource; /** Creates a new <code>JPanel</code> with a double buffer and a flow layout. */ public CepstrumPanel() { } /** * Creates a new SpectrogramPanel for the given AudioData. * * @param frontEnd the front end to use * @param dataSource the source of audio * @param audioData the AudioData */ public CepstrumPanel(FrontEnd frontEnd, StreamDataSource dataSource, AudioData audioData) { audio = audioData; this.frontEnd = frontEnd; this.dataSource = dataSource; audio.addChangeListener(new ChangeListener() { @Override public void stateChanged(ChangeEvent event) { computeCepstrum(); } }); } static final int HSCALE = 10; /** Actually creates the Spectrogram image. */ protected void computeCepstrum() { try { AudioDataInputStream is = new AudioDataInputStream(audio); dataSource.setInputStream(is, "live audio"); /* Run through all the spectra one at a time and convert * them to an log intensity value. */ ArrayList<float[]> intensitiesList = new ArrayList<float[]>(); float maxIntensity[] = new float[100]; Arrays.fill(maxIntensity, Float.MIN_VALUE); Data spectrum = frontEnd.getData(); while (!(spectrum instanceof DataEndSignal)) { if (spectrum instanceof FloatData) { float[] spectrumData = ((FloatData) spectrum).getValues(); float[] intensities = new float[spectrumData.length]; for (int i = 0; i < intensities.length; i++) { intensities[i] = spectrumData[i]; if (Math.abs(intensities[i]) > maxIntensity[i]) { maxIntensity[i] = Math.abs(intensities[i]); } } intensitiesList.add(intensities); } if (spectrum instanceof DoubleData) { double[] spectrumData = ((DoubleData) spectrum).getValues(); float[] intensities = new float[spectrumData.length]; for (int i = 0; i < intensities.length; i++) { intensities[i] = (float)spectrumData[i]; if (Math.abs(intensities[i]) > maxIntensity[i]) { maxIntensity[i] = Math.abs(intensities[i]); } } intensitiesList.add(intensities); } spectrum = frontEnd.getData(); } is.close(); int width = intensitiesList.size(); int ncep = intensitiesList.get(0).length; int height = ncep * HSCALE; Dimension d = new Dimension(width, height); setMinimumSize(d); setMaximumSize(d); setPreferredSize(d); /* Create the image for displaying the data. */ spectrogram = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); for (int i = 0; i < width; i++) { float[] intensities = intensitiesList.get(i); for (int j = ncep - 1; j >= 0; j--) { /* Adjust the grey value to make a value of 0 to mean * white and a value of 0xff to mean black. */ int grey = 0x7f - (int) (intensities[j] / maxIntensity[j] * 0x7f); /* Turn the grey into a pixel value. */ int pixel = ((grey << 16) & 0xff0000) | ((grey << 8) & 0xff00) | (grey & 0xff); for (int k = 0; k < HSCALE; k++) spectrogram.setRGB(i, height - 1 - j * HSCALE - k, pixel); } } ImageFilter scaleFilter = new ReplicateScaleFilter((int) (zoom * width), height); scaledSpectrogram = createImage(new FilteredImageSource(spectrogram.getSource(), scaleFilter)); Dimension sz = getSize(); repaint(0, 0, 0, sz.width - 1, sz.height - 1); } catch (Exception e) { e.printStackTrace(); } } /** * Updates the offset factor used to calculate the greyscale values from the intensities. This also calculates and * populates all the greyscale values in the image. * * @param offsetFactor the offset factor used to calculate the greyscale values from the intensities; this is used * to adjust the level of background noise that shows up in the image */ public void setOffsetFactor(double offsetFactor) { this.offsetFactor = offsetFactor; computeCepstrum(); } /** Zoom the image, preparing for new display. */ protected void zoomSet(float zoom) { this.zoom = zoom; if (spectrogram != null) { int width = spectrogram.getWidth(); int height = spectrogram.getHeight(); ImageFilter scaleFilter = new ReplicateScaleFilter((int) (zoom * width), height); scaledSpectrogram = createImage(new FilteredImageSource(spectrogram.getSource(), scaleFilter)); Dimension d = new Dimension((int) (width * zoom), height); setMinimumSize(d); setMaximumSize(d); setPreferredSize(d); repaint(); } } /** * Paint the component. This will be called by AWT/Swing. * * @param g The <code>Graphics</code> to draw on. */ @Override public void paint(Graphics g) { /** * Fill in the whole image with white. */ Dimension sz = getSize(); g.setColor(Color.WHITE); g.fillRect(0, 0, sz.width - 1, sz.height - 1); if (spectrogram != null) { g.drawImage(scaledSpectrogram, 0, 0, null); } } }
/* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ /* * MIWrapper.java * Copyright (C) 2005 University of Waikato, Hamilton, New Zealand * */ package weka.classifiers.mi; import weka.classifiers.SingleClassifierEnhancer; import weka.core.Capabilities; import weka.core.Instance; import weka.core.Instances; import weka.core.MultiInstanceCapabilitiesHandler; import weka.core.Option; import weka.core.OptionHandler; import weka.core.RevisionUtils; import weka.core.SelectedTag; import weka.core.Tag; import weka.core.TechnicalInformation; import weka.core.TechnicalInformationHandler; import weka.core.Utils; import weka.core.Capabilities.Capability; import weka.core.TechnicalInformation.Field; import weka.core.TechnicalInformation.Type; import weka.filters.Filter; import weka.filters.unsupervised.attribute.MultiInstanceToPropositional; import java.util.Enumeration; import java.util.Vector; /** <!-- globalinfo-start --> * A simple Wrapper method for applying standard propositional learners to multi-instance data.<br/> * <br/> * For more information see:<br/> * <br/> * E. T. Frank, X. Xu (2003). Applying propositional learning algorithms to multi-instance data. Department of Computer Science, University of Waikato, Hamilton, NZ. * <p/> <!-- globalinfo-end --> * <!-- technical-bibtex-start --> * BibTeX: * <pre> * &#64;techreport{Frank2003, * address = {Department of Computer Science, University of Waikato, Hamilton, NZ}, * author = {E. T. Frank and X. Xu}, * institution = {University of Waikato}, * month = {06}, * title = {Applying propositional learning algorithms to multi-instance data}, * year = {2003} * } * </pre> * <p/> <!-- technical-bibtex-end --> * <!-- options-start --> * Valid options are: <p/> * * <pre> -P [1|2|3] * The method used in testing: * 1.arithmetic average * 2.geometric average * 3.max probability of positive bag. * (default: 1)</pre> * * <pre> -A [0|1|2|3] * The type of weight setting for each single-instance: * 0.keep the weight to be the same as the original value; * 1.weight = 1.0 * 2.weight = 1.0/Total number of single-instance in the * corresponding bag * 3. weight = Total number of single-instance / (Total * number of bags * Total number of single-instance * in the corresponding bag). * (default: 3)</pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -W * Full name of base classifier. * (default: weka.classifiers.rules.ZeroR)</pre> * * <pre> * Options specific to classifier weka.classifiers.rules.ZeroR: * </pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * <!-- options-end --> * * @author Eibe Frank (eibe@cs.waikato.ac.nz) * @author Xin Xu (xx5@cs.waikato.ac.nz) * @version $Revision: 9144 $ */ public class MIWrapper extends SingleClassifierEnhancer implements MultiInstanceCapabilitiesHandler, OptionHandler, TechnicalInformationHandler { /** for serialization */ static final long serialVersionUID = -7707766152904315910L; /** The number of the class labels */ protected int m_NumClasses; /** arithmetic average */ public static final int TESTMETHOD_ARITHMETIC = 1; /** geometric average */ public static final int TESTMETHOD_GEOMETRIC = 2; /** max probability of positive bag */ public static final int TESTMETHOD_MAXPROB = 3; /** the test methods */ public static final Tag[] TAGS_TESTMETHOD = { new Tag(TESTMETHOD_ARITHMETIC, "arithmetic average"), new Tag(TESTMETHOD_GEOMETRIC, "geometric average"), new Tag(TESTMETHOD_MAXPROB, "max probability of positive bag") }; /** the test method */ protected int m_Method = TESTMETHOD_GEOMETRIC; /** Filter used to convert MI dataset into single-instance dataset */ protected MultiInstanceToPropositional m_ConvertToProp = new MultiInstanceToPropositional(); /** the single-instance weight setting method */ protected int m_WeightMethod = MultiInstanceToPropositional.WEIGHTMETHOD_INVERSE2; /** * Returns a string describing this filter * * @return a description of the filter suitable for * displaying in the explorer/experimenter gui */ public String globalInfo() { return "A simple Wrapper method for applying standard propositional learners " + "to multi-instance data.\n\n" + "For more information see:\n\n" + getTechnicalInformation().toString(); } /** * Returns an instance of a TechnicalInformation object, containing * detailed information about the technical background of this class, * e.g., paper reference or book this class is based on. * * @return the technical information about this class */ public TechnicalInformation getTechnicalInformation() { TechnicalInformation result; result = new TechnicalInformation(Type.TECHREPORT); result.setValue(Field.AUTHOR, "E. T. Frank and X. Xu"); result.setValue(Field.TITLE, "Applying propositional learning algorithms to multi-instance data"); result.setValue(Field.YEAR, "2003"); result.setValue(Field.MONTH, "06"); result.setValue(Field.INSTITUTION, "University of Waikato"); result.setValue(Field.ADDRESS, "Department of Computer Science, University of Waikato, Hamilton, NZ"); return result; } /** * Returns an enumeration describing the available options. * * @return an enumeration of all the available options. */ public Enumeration listOptions() { Vector result = new Vector(); result.addElement(new Option( "\tThe method used in testing:\n" + "\t1.arithmetic average\n" + "\t2.geometric average\n" + "\t3.max probability of positive bag.\n" + "\t(default: 1)", "P", 1, "-P [1|2|3]")); result.addElement(new Option( "\tThe type of weight setting for each single-instance:\n" + "\t0.keep the weight to be the same as the original value;\n" + "\t1.weight = 1.0\n" + "\t2.weight = 1.0/Total number of single-instance in the\n" + "\t\tcorresponding bag\n" + "\t3. weight = Total number of single-instance / (Total\n" + "\t\tnumber of bags * Total number of single-instance \n" + "\t\tin the corresponding bag).\n" + "\t(default: 3)", "A", 1, "-A [0|1|2|3]")); Enumeration enu = super.listOptions(); while (enu.hasMoreElements()) { result.addElement(enu.nextElement()); } return result.elements(); } /** * Parses a given list of options. <p/> * <!-- options-start --> * Valid options are: <p/> * * <pre> -P [1|2|3] * The method used in testing: * 1.arithmetic average * 2.geometric average * 3.max probability of positive bag. * (default: 1)</pre> * * <pre> -A [0|1|2|3] * The type of weight setting for each single-instance: * 0.keep the weight to be the same as the original value; * 1.weight = 1.0 * 2.weight = 1.0/Total number of single-instance in the * corresponding bag * 3. weight = Total number of single-instance / (Total * number of bags * Total number of single-instance * in the corresponding bag). * (default: 3)</pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * * <pre> -W * Full name of base classifier. * (default: weka.classifiers.rules.ZeroR)</pre> * * <pre> * Options specific to classifier weka.classifiers.rules.ZeroR: * </pre> * * <pre> -D * If set, classifier is run in debug mode and * may output additional info to the console</pre> * <!-- options-end --> * * @param options the list of options as an array of strings * @throws Exception if an option is not supported */ public void setOptions(String[] options) throws Exception { setDebug(Utils.getFlag('D', options)); String methodString = Utils.getOption('P', options); if (methodString.length() != 0) { setMethod( new SelectedTag(Integer.parseInt(methodString), TAGS_TESTMETHOD)); } else { setMethod( new SelectedTag(TESTMETHOD_ARITHMETIC, TAGS_TESTMETHOD)); } String weightString = Utils.getOption('A', options); if (weightString.length() != 0) { setWeightMethod( new SelectedTag( Integer.parseInt(weightString), MultiInstanceToPropositional.TAGS_WEIGHTMETHOD)); } else { setWeightMethod( new SelectedTag( MultiInstanceToPropositional.WEIGHTMETHOD_INVERSE2, MultiInstanceToPropositional.TAGS_WEIGHTMETHOD)); } super.setOptions(options); } /** * Gets the current settings of the Classifier. * * @return an array of strings suitable for passing to setOptions */ public String[] getOptions() { Vector result; String[] options; int i; result = new Vector(); result.add("-P"); result.add("" + m_Method); result.add("-A"); result.add("" + m_WeightMethod); options = super.getOptions(); for (i = 0; i < options.length; i++) result.add(options[i]); return (String[]) result.toArray(new String[result.size()]); } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String weightMethodTipText() { return "The method used for weighting the instances."; } /** * The new method for weighting the instances. * * @param method the new method */ public void setWeightMethod(SelectedTag method){ if (method.getTags() == MultiInstanceToPropositional.TAGS_WEIGHTMETHOD) m_WeightMethod = method.getSelectedTag().getID(); } /** * Returns the current weighting method for instances. * * @return the current weighting method */ public SelectedTag getWeightMethod(){ return new SelectedTag( m_WeightMethod, MultiInstanceToPropositional.TAGS_WEIGHTMETHOD); } /** * Returns the tip text for this property * * @return tip text for this property suitable for * displaying in the explorer/experimenter gui */ public String methodTipText() { return "The method used for testing."; } /** * Set the method used in testing. * * @param method the index of method to use. */ public void setMethod(SelectedTag method) { if (method.getTags() == TAGS_TESTMETHOD) m_Method = method.getSelectedTag().getID(); } /** * Get the method used in testing. * * @return the index of method used in testing. */ public SelectedTag getMethod() { return new SelectedTag(m_Method, TAGS_TESTMETHOD); } /** * Returns default capabilities of the classifier. * * @return the capabilities of this classifier */ public Capabilities getCapabilities() { Capabilities result = super.getCapabilities(); // class result.disableAllClasses(); result.disableAllClassDependencies(); if (super.getCapabilities().handles(Capability.NOMINAL_CLASS)) result.enable(Capability.NOMINAL_CLASS); if (super.getCapabilities().handles(Capability.BINARY_CLASS)) result.enable(Capability.BINARY_CLASS); result.enable(Capability.RELATIONAL_ATTRIBUTES); result.enable(Capability.MISSING_CLASS_VALUES); result.disable(Capability.MISSING_VALUES); // other result.enable(Capability.ONLY_MULTIINSTANCE); return result; } /** * Returns the capabilities of this multi-instance classifier for the * relational data. * * @return the capabilities of this object * @see Capabilities */ public Capabilities getMultiInstanceCapabilities() { Capabilities result = super.getCapabilities(); // class result.disableAllClasses(); result.enable(Capability.NO_CLASS); return result; } /** * Builds the classifier * * @param data the training data to be used for generating the * boosted classifier. * @throws Exception if the classifier could not be built successfully */ public void buildClassifier(Instances data) throws Exception { // can classifier handle the data? getCapabilities().testWithFail(data); // remove instances with missing class Instances train = new Instances(data); train.deleteWithMissingClass(); if (m_Classifier == null) { throw new Exception("A base classifier has not been specified!"); } if (getDebug()) System.out.println(Thread.currentThread().getStackTrace()[1].getClassName() +"Start training ..."); m_NumClasses = train.numClasses(); //convert the training dataset into single-instance dataset m_ConvertToProp.setWeightMethod(getWeightMethod()); m_ConvertToProp.setInputFormat(train); train = Filter.useFilter(train, m_ConvertToProp); train.deleteAttributeAt(0); // remove the bag index attribute m_Classifier.buildClassifier(train); } /** * Computes the distribution for a given exemplar * * @param exmp the exemplar for which distribution is computed * @return the distribution * @throws Exception if the distribution can't be computed successfully */ public double[] distributionForInstance(Instance exmp) throws Exception { Instances testData = new Instances (exmp.dataset(),0); testData.add(exmp); // convert the training dataset into single-instance dataset m_ConvertToProp.setWeightMethod( new SelectedTag( MultiInstanceToPropositional.WEIGHTMETHOD_ORIGINAL, MultiInstanceToPropositional.TAGS_WEIGHTMETHOD)); testData = Filter.useFilter(testData, m_ConvertToProp); testData.deleteAttributeAt(0); //remove the bag index attribute // Compute the log-probability of the bag double [] distribution = new double[m_NumClasses]; double nI = (double)testData.numInstances(); double [] maxPr = new double [m_NumClasses]; for(int i=0; i<nI; i++){ double[] dist = m_Classifier.distributionForInstance(testData.instance(i)); for(int j=0; j<m_NumClasses; j++){ switch(m_Method){ case TESTMETHOD_ARITHMETIC: distribution[j] += dist[j]/nI; break; case TESTMETHOD_GEOMETRIC: // Avoid 0/1 probability if(dist[j]<0.001) dist[j] = 0.001; else if(dist[j]>0.999) dist[j] = 0.999; distribution[j] += Math.log(dist[j])/nI; break; case TESTMETHOD_MAXPROB: if (dist[j]>maxPr[j]) maxPr[j] = dist[j]; break; } } } if(m_Method == TESTMETHOD_GEOMETRIC) for(int j=0; j<m_NumClasses; j++) distribution[j] = Math.exp(distribution[j]); if(m_Method == TESTMETHOD_MAXPROB){ // for positive bag distribution[1] = maxPr[1]; distribution[0] = 1 - distribution[1]; } if (Utils.eq(Utils.sum(distribution), 0)) { for (int i = 0; i < distribution.length; i++) distribution[i] = 1.0 / (double) distribution.length; } else { Utils.normalize(distribution); } return distribution; } /** * Gets a string describing the classifier. * * @return a string describing the classifer built. */ public String toString() { return "MIWrapper with base classifier: \n"+m_Classifier.toString(); } /** * Returns the revision string. * * @return the revision */ public String getRevision() { return RevisionUtils.extract("$Revision: 9144 $"); } /** * Main method for testing this class. * * @param argv should contain the command line arguments to the * scheme (see Evaluation) */ public static void main(String[] argv) { runClassifier(new MIWrapper(), argv); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import static org.apache.hadoop.fs.permission.AclEntryScope.*; import static org.apache.hadoop.fs.permission.AclEntryType.*; import static org.apache.hadoop.fs.permission.FsAction.*; import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.*; import static org.apache.hadoop.hdfs.server.namenode.AclTransformation.*; import static org.junit.Assert.*; import java.util.List; import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.junit.Test; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclEntryScope; import org.apache.hadoop.fs.permission.AclEntryType; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.hdfs.server.namenode.AclTransformation; /** * Tests operations that modify ACLs. All tests in this suite have been * cross-validated against Linux setfacl/getfacl to check for consistency of the * HDFS implementation. */ public class TestAclTransformation { private static final List<AclEntry> ACL_SPEC_TOO_LARGE; static { ACL_SPEC_TOO_LARGE = Lists.newArrayListWithCapacity(33); for (int i = 0; i < 33; ++i) { ACL_SPEC_TOO_LARGE.add(aclEntry(ACCESS, USER, "user" + i, ALL)); } } @Test public void testFilterAclEntriesByAclSpec() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, USER, "diana", READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, GROUP, "sales", READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "execs", READ_WRITE)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "diana"), aclEntry(ACCESS, GROUP, "sales")); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, GROUP, "execs", READ_WRITE)) .add(aclEntry(ACCESS, MASK, READ_WRITE)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecUnchanged() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", ALL)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", ALL)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "clark"), aclEntry(ACCESS, GROUP, "execs")); assertEquals(existing, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecAccessMaskCalculated() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ_WRITE)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "diana")); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecDefaultMaskCalculated() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "diana")); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecDefaultMaskPreserved() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ_WRITE)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "diana", ALL)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "diana")); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "diana", ALL)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecAccessMaskPreserved() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "diana")); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecAutomaticDefaultUser() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecAutomaticDefaultGroup() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, GROUP)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecAutomaticDefaultOther() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, OTHER)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); assertEquals(expected, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test public void testFilterAclEntriesByAclSpecEmptyAclSpec() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.<AclEntry>newArrayList(); assertEquals(existing, filterAclEntriesByAclSpec(existing, aclSpec)); } @Test(expected=AclException.class) public void testFilterAclEntriesByAclSpecRemoveAccessMaskRequired() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, MASK)); filterAclEntriesByAclSpec(existing, aclSpec); } @Test(expected=AclException.class) public void testFilterAclEntriesByAclSpecRemoveDefaultMaskRequired() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, MASK)); filterAclEntriesByAclSpec(existing, aclSpec); } @Test(expected=AclException.class) public void testFilterAclEntriesByAclSpecInputTooLarge() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); filterAclEntriesByAclSpec(existing, ACL_SPEC_TOO_LARGE); } @Test public void testFilterDefaultAclEntries() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, GROUP, "sales", READ_EXECUTE)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, READ_EXECUTE)) .build(); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); assertEquals(expected, filterDefaultAclEntries(existing)); } @Test public void testFilterDefaultAclEntriesUnchanged() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", ALL)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", ALL)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); assertEquals(existing, filterDefaultAclEntries(existing)); } @Test public void testMergeAclEntries() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", ALL)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", ALL)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesUnchanged() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", ALL)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", ALL)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", ALL)) .add(aclEntry(DEFAULT, GROUP, READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, "sales", ALL)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", ALL), aclEntry(ACCESS, GROUP, READ_EXECUTE), aclEntry(ACCESS, GROUP, "sales", ALL), aclEntry(ACCESS, MASK, ALL), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "bruce", ALL), aclEntry(DEFAULT, GROUP, READ_EXECUTE), aclEntry(DEFAULT, GROUP, "sales", ALL), aclEntry(DEFAULT, MASK, ALL), aclEntry(DEFAULT, OTHER, NONE)); assertEquals(existing, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesMultipleNewBeforeExisting() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "diana", READ)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, READ_EXECUTE)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, USER, "clark", READ_EXECUTE), aclEntry(ACCESS, USER, "diana", READ_EXECUTE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_EXECUTE)) .add(aclEntry(ACCESS, USER, "clark", READ_EXECUTE)) .add(aclEntry(ACCESS, USER, "diana", READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, READ_EXECUTE)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesAccessMaskCalculated() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, USER, "diana", READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_EXECUTE)) .add(aclEntry(ACCESS, USER, "diana", READ)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ_EXECUTE)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesDefaultMaskCalculated() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "bruce", READ_WRITE), aclEntry(DEFAULT, USER, "diana", READ_EXECUTE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ_WRITE)) .add(aclEntry(DEFAULT, USER, "diana", READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesDefaultMaskPreserved() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "diana", ALL)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "diana", FsAction.READ_EXECUTE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "diana", READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ_EXECUTE)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "diana", ALL)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesAccessMaskPreserved() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "diana", READ_EXECUTE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ_EXECUTE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesAutomaticDefaultUser() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, GROUP, READ_EXECUTE), aclEntry(DEFAULT, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, GROUP, READ_EXECUTE)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesAutomaticDefaultGroup() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, READ_EXECUTE), aclEntry(DEFAULT, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesAutomaticDefaultOther() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, READ_EXECUTE), aclEntry(DEFAULT, GROUP, READ_EXECUTE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, READ_EXECUTE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesProvidedAccessMask() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, MASK, ALL)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesProvidedDefaultMask() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, GROUP, READ), aclEntry(DEFAULT, MASK, ALL), aclEntry(DEFAULT, OTHER, NONE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, mergeAclEntries(existing, aclSpec)); } @Test public void testMergeAclEntriesEmptyAclSpec() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.<AclEntry>newArrayList(); assertEquals(existing, mergeAclEntries(existing, aclSpec)); } @Test(expected=AclException.class) public void testMergeAclEntriesInputTooLarge() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); mergeAclEntries(existing, ACL_SPEC_TOO_LARGE); } @Test(expected=AclException.class) public void testMergeAclEntriesResultTooLarge() throws AclException { ImmutableList.Builder<AclEntry> aclBuilder = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)); for (int i = 1; i <= 28; ++i) { aclBuilder.add(aclEntry(ACCESS, USER, "user" + i, READ)); } aclBuilder .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, NONE)); List<AclEntry> existing = aclBuilder.build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", READ)); mergeAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testMergeAclEntriesDuplicateEntries() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", ALL), aclEntry(ACCESS, USER, "diana", READ_WRITE), aclEntry(ACCESS, USER, "clark", READ), aclEntry(ACCESS, USER, "bruce", READ_EXECUTE)); mergeAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testMergeAclEntriesNamedMask() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, MASK, "bruce", READ_EXECUTE)); mergeAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testMergeAclEntriesNamedOther() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, OTHER, "bruce", READ_EXECUTE)); mergeAclEntries(existing, aclSpec); } @Test public void testReplaceAclEntries() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", ALL)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, READ_EXECUTE), aclEntry(ACCESS, GROUP, "sales", ALL), aclEntry(ACCESS, MASK, ALL), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "bruce", READ_WRITE), aclEntry(DEFAULT, GROUP, READ_EXECUTE), aclEntry(DEFAULT, GROUP, "sales", ALL), aclEntry(DEFAULT, MASK, ALL), aclEntry(DEFAULT, OTHER, NONE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", ALL)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, "sales", ALL)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesUnchanged() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", ALL)) .add(aclEntry(ACCESS, GROUP, READ_EXECUTE)) .add(aclEntry(ACCESS, GROUP, "sales", ALL)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", ALL)) .add(aclEntry(DEFAULT, GROUP, READ_EXECUTE)) .add(aclEntry(DEFAULT, GROUP, "sales", ALL)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", ALL), aclEntry(ACCESS, GROUP, READ_EXECUTE), aclEntry(ACCESS, GROUP, "sales", ALL), aclEntry(ACCESS, MASK, ALL), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "bruce", ALL), aclEntry(DEFAULT, GROUP, READ_EXECUTE), aclEntry(DEFAULT, GROUP, "sales", ALL), aclEntry(DEFAULT, MASK, ALL), aclEntry(DEFAULT, OTHER, NONE)); assertEquals(existing, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesAccessMaskCalculated() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", READ), aclEntry(ACCESS, USER, "diana", READ_WRITE), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ_WRITE)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesDefaultMaskCalculated() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, READ), aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "bruce", READ), aclEntry(DEFAULT, USER, "diana", READ_WRITE), aclEntry(DEFAULT, GROUP, ALL), aclEntry(DEFAULT, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, ALL)) .add(aclEntry(DEFAULT, MASK, ALL)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesDefaultMaskPreserved() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ_WRITE)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "diana", ALL)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", READ), aclEntry(ACCESS, USER, "diana", READ_WRITE), aclEntry(ACCESS, GROUP, ALL), aclEntry(ACCESS, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, ALL)) .add(aclEntry(ACCESS, MASK, ALL)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "diana", ALL)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesAccessMaskPreserved() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, USER, "diana", READ_WRITE)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, ALL), aclEntry(DEFAULT, USER, "bruce", READ), aclEntry(DEFAULT, GROUP, READ), aclEntry(DEFAULT, OTHER, NONE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, USER, "bruce", READ)) .add(aclEntry(ACCESS, USER, "diana", READ_WRITE)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, MASK, READ)) .add(aclEntry(ACCESS, OTHER, READ)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesAutomaticDefaultUser() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, "bruce", READ), aclEntry(DEFAULT, GROUP, READ_WRITE), aclEntry(DEFAULT, MASK, READ_WRITE), aclEntry(DEFAULT, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ_WRITE)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesAutomaticDefaultGroup() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, READ_WRITE), aclEntry(DEFAULT, USER, "bruce", READ), aclEntry(DEFAULT, MASK, READ), aclEntry(DEFAULT, OTHER, READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, READ)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesAutomaticDefaultOther() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE), aclEntry(DEFAULT, USER, READ_WRITE), aclEntry(DEFAULT, USER, "bruce", READ), aclEntry(DEFAULT, GROUP, READ_WRITE), aclEntry(DEFAULT, MASK, READ_WRITE)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, READ_WRITE)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ_WRITE)) .add(aclEntry(DEFAULT, MASK, READ_WRITE)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test public void testReplaceAclEntriesOnlyDefaults() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(DEFAULT, USER, "bruce", READ)); List<AclEntry> expected = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .add(aclEntry(DEFAULT, USER, ALL)) .add(aclEntry(DEFAULT, USER, "bruce", READ)) .add(aclEntry(DEFAULT, GROUP, READ)) .add(aclEntry(DEFAULT, MASK, READ)) .add(aclEntry(DEFAULT, OTHER, NONE)) .build(); assertEquals(expected, replaceAclEntries(existing, aclSpec)); } @Test(expected=AclException.class) public void testReplaceAclEntriesInputTooLarge() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); replaceAclEntries(existing, ACL_SPEC_TOO_LARGE); } @Test(expected=AclException.class) public void testReplaceAclEntriesResultTooLarge() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayListWithCapacity(32); aclSpec.add(aclEntry(ACCESS, USER, ALL)); for (int i = 1; i <= 29; ++i) { aclSpec.add(aclEntry(ACCESS, USER, "user" + i, READ)); } aclSpec.add(aclEntry(ACCESS, GROUP, READ)); aclSpec.add(aclEntry(ACCESS, OTHER, NONE)); // The ACL spec now has 32 entries. Automatic mask calculation will push it // over the limit to 33. replaceAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testReplaceAclEntriesDuplicateEntries() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", ALL), aclEntry(ACCESS, USER, "diana", READ_WRITE), aclEntry(ACCESS, USER, "clark", READ), aclEntry(ACCESS, USER, "bruce", READ_EXECUTE), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE)); replaceAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testReplaceAclEntriesNamedMask() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE), aclEntry(ACCESS, MASK, "bruce", READ_EXECUTE)); replaceAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testReplaceAclEntriesNamedOther() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, GROUP, READ), aclEntry(ACCESS, OTHER, NONE), aclEntry(ACCESS, OTHER, "bruce", READ_EXECUTE)); replaceAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testReplaceAclEntriesMissingUser() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, READ_EXECUTE), aclEntry(ACCESS, GROUP, "sales", ALL), aclEntry(ACCESS, MASK, ALL), aclEntry(ACCESS, OTHER, NONE)); replaceAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testReplaceAclEntriesMissingGroup() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, "sales", ALL), aclEntry(ACCESS, MASK, ALL), aclEntry(ACCESS, OTHER, NONE)); replaceAclEntries(existing, aclSpec); } @Test(expected=AclException.class) public void testReplaceAclEntriesMissingOther() throws AclException { List<AclEntry> existing = new ImmutableList.Builder<AclEntry>() .add(aclEntry(ACCESS, USER, ALL)) .add(aclEntry(ACCESS, GROUP, READ)) .add(aclEntry(ACCESS, OTHER, NONE)) .build(); List<AclEntry> aclSpec = Lists.newArrayList( aclEntry(ACCESS, USER, ALL), aclEntry(ACCESS, USER, "bruce", READ_WRITE), aclEntry(ACCESS, GROUP, READ_EXECUTE), aclEntry(ACCESS, GROUP, "sales", ALL), aclEntry(ACCESS, MASK, ALL)); replaceAclEntries(existing, aclSpec); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with this * work for additional information regarding copyright ownership. The ASF * licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.apache.hadoop.yarn.server.timelineservice.storage.common; import org.apache.hadoop.hbase.ArrayBackedTag; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.Tag; import org.apache.hadoop.hbase.TagUtil; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationCompactionDimension; import org.apache.hadoop.yarn.server.timelineservice.storage.flow.AggregationOperation; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Set; /** * A utility class used by hbase-server module. */ public final class HBaseTimelineServerUtils { private HBaseTimelineServerUtils() { } /** * Creates a {@link Tag} from the input attribute. * * @param attribute Attribute from which tag has to be fetched. * @return a HBase Tag. */ public static Tag getTagFromAttribute(Map.Entry<String, byte[]> attribute) { // attribute could be either an Aggregation Operation or // an Aggregation Dimension // Get the Tag type from either AggregationOperation aggOp = AggregationOperation .getAggregationOperation(attribute.getKey()); if (aggOp != null) { Tag t = createTag(aggOp.getTagType(), attribute.getValue()); return t; } AggregationCompactionDimension aggCompactDim = AggregationCompactionDimension.getAggregationCompactionDimension( attribute.getKey()); if (aggCompactDim != null) { Tag t = createTag(aggCompactDim.getTagType(), attribute.getValue()); return t; } return null; } /** * creates a new cell based on the input cell but with the new value. * * @param origCell Original cell * @param newValue new cell value * @return cell * @throws IOException while creating new cell. */ public static Cell createNewCell(Cell origCell, byte[] newValue) throws IOException { return CellUtil.createCell(CellUtil.cloneRow(origCell), CellUtil.cloneFamily(origCell), CellUtil.cloneQualifier(origCell), origCell.getTimestamp(), KeyValue.Type.Put.getCode(), newValue); } /** * creates a cell with the given inputs. * * @param row row of the cell to be created * @param family column family name of the new cell * @param qualifier qualifier for the new cell * @param ts timestamp of the new cell * @param newValue value of the new cell * @param tags tags in the new cell * @return cell * @throws IOException while creating the cell. */ public static Cell createNewCell(byte[] row, byte[] family, byte[] qualifier, long ts, byte[] newValue, byte[] tags) throws IOException { return CellUtil.createCell(row, family, qualifier, ts, KeyValue.Type.Put, newValue, tags); } /** * Create a Tag. * @param tagType tag type * @param tag the content of the tag in byte array. * @return an instance of Tag */ public static Tag createTag(byte tagType, byte[] tag) { return new ArrayBackedTag(tagType, tag); } /** * Create a Tag. * @param tagType tag type * @param tag the content of the tag in String. * @return an instance of Tag */ public static Tag createTag(byte tagType, String tag) { return createTag(tagType, Bytes.toBytes(tag)); } /** * Convert a cell to a list of tags. * @param cell the cell to convert * @return a list of tags */ public static List<Tag> convertCellAsTagList(Cell cell) { return TagUtil.asList( cell.getTagsArray(), cell.getTagsOffset(), cell.getTagsLength()); } /** * Convert a list of tags to a byte array. * @param tags the list of tags to convert * @return byte array representation of the list of tags */ public static byte[] convertTagListToByteArray(List<Tag> tags) { return TagUtil.fromList(tags); } /** * returns app id from the list of tags. * * @param tags cell tags to be looked into * @return App Id as the AggregationCompactionDimension */ public static String getAggregationCompactionDimension(List<Tag> tags) { String appId = null; for (Tag t : tags) { if (AggregationCompactionDimension.APPLICATION_ID.getTagType() == t .getType()) { appId = Bytes.toString(Tag.cloneValue(t)); return appId; } } return appId; } /** * Returns the first seen aggregation operation as seen in the list of input * tags or null otherwise. * * @param tags list of HBase tags. * @return AggregationOperation */ public static AggregationOperation getAggregationOperationFromTagsList( List<Tag> tags) { for (AggregationOperation aggOp : AggregationOperation.values()) { for (Tag tag : tags) { if (tag.getType() == aggOp.getTagType()) { return aggOp; } } } return null; } // flush and compact all the regions of the primary table /** * Flush and compact all regions of a table. * @param server region server * @param table the table to flush and compact * @return the number of regions flushed and compacted */ public static int flushCompactTableRegions(HRegionServer server, TableName table) throws IOException { List<HRegion> regions = server.getRegions(table); for (HRegion region : regions) { region.flush(true); region.compact(true); } return regions.size(); } /** * Check the existence of FlowRunCoprocessor in a table. * @param server region server * @param table table to check * @param existenceExpected true if the FlowRunCoprocessor is expected * to be loaded in the table, false otherwise * @throws Exception */ public static void validateFlowRunCoprocessor(HRegionServer server, TableName table, boolean existenceExpected) throws Exception { List<HRegion> regions = server.getRegions(table); for (HRegion region : regions) { boolean found = false; Set<String> coprocs = region.getCoprocessorHost().getCoprocessors(); for (String coprocName : coprocs) { if (coprocName.contains("FlowRunCoprocessor")) { found = true; } } if (found != existenceExpected) { throw new Exception("FlowRunCoprocessor is" + (existenceExpected ? " not " : " ") + "loaded in table " + table); } } } }
/* * Copyright LWJGL. All rights reserved. * License terms: http://lwjgl.org/license.php * MACHINE GENERATED FILE, DO NOT EDIT */ package org.lwjgl.opengl; import java.nio.*; import static org.lwjgl.system.Checks.*; import static org.lwjgl.system.JNI.*; import static org.lwjgl.system.MemoryUtil.*; /** * Native bindings to the <a href="http://www.opengl.org/registry/specs/ARB/transpose_matrix.txt">ARB_transpose_matrix</a> extension. * * <p>New functions and tokens are added allowing application matrices stored in row major order rather than column major order to be transferred to the * OpenGL implementation. This allows an application to use standard C-language 2-dimensional arrays ({@code m[row][col]}) and have the array indices match the * expected matrix row and column indexes. These arrays are referred to as transpose matrices since they are the transpose of the standard matrices passed * to OpenGL.</p> * * <p>This extension adds an interface for transfering data to and from the OpenGL pipeline, it does not change any OpenGL processing or imply any changes in * state representation.</p> * * <p>Promoted to core in {@link GL13 OpenGL 1.3}.</p> */ public class ARBTransposeMatrix { /** Accepted by the {@code pname} parameter of GetBooleanv, GetIntegerv, GetFloatv, and GetDoublev. */ public static final int GL_TRANSPOSE_MODELVIEW_MATRIX_ARB = 0x84E3, GL_TRANSPOSE_PROJECTION_MATRIX_ARB = 0x84E4, GL_TRANSPOSE_TEXTURE_MATRIX_ARB = 0x84E5, GL_TRANSPOSE_COLOR_MATRIX_ARB = 0x84E6; protected ARBTransposeMatrix() { throw new UnsupportedOperationException(); } static boolean isAvailable(GLCapabilities caps) { return checkFunctions( caps.glLoadTransposeMatrixfARB, caps.glLoadTransposeMatrixdARB, caps.glMultTransposeMatrixfARB, caps.glMultTransposeMatrixdARB ); } // --- [ glLoadTransposeMatrixfARB ] --- /** * Sets the current matrix to a 4 &times; 4 matrix in row-major order. * * <p>The matrix is stored as 16 consecutive values, i.e. as:</p> * * <table class="lwjgl matrix"> * <tr><td>a1</td><td>a2</td><td>a3</td><td>a4</td></tr> * <tr><td>a5</td><td>a6</td><td>a7</td><td>a8</td></tr> * <tr><td>a9</td><td>a10</td><td>a11</td><td>a12</td></tr> * <tr><td>a13</td><td>a14</td><td>a15</td><td>a16</td></tr> * </table> * * @param m the matrix data */ public static void nglLoadTransposeMatrixfARB(long m) { long __functionAddress = GL.getCapabilities().glLoadTransposeMatrixfARB; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, m); } /** * Sets the current matrix to a 4 &times; 4 matrix in row-major order. * * <p>The matrix is stored as 16 consecutive values, i.e. as:</p> * * <table class="lwjgl matrix"> * <tr><td>a1</td><td>a2</td><td>a3</td><td>a4</td></tr> * <tr><td>a5</td><td>a6</td><td>a7</td><td>a8</td></tr> * <tr><td>a9</td><td>a10</td><td>a11</td><td>a12</td></tr> * <tr><td>a13</td><td>a14</td><td>a15</td><td>a16</td></tr> * </table> * * @param m the matrix data */ public static void glLoadTransposeMatrixfARB(FloatBuffer m) { if ( CHECKS ) checkBuffer(m, 16); nglLoadTransposeMatrixfARB(memAddress(m)); } // --- [ glLoadTransposeMatrixdARB ] --- /** * Double version of {@link #glLoadTransposeMatrixfARB LoadTransposeMatrixfARB}. * * @param m the matrix data */ public static void nglLoadTransposeMatrixdARB(long m) { long __functionAddress = GL.getCapabilities().glLoadTransposeMatrixdARB; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, m); } /** * Double version of {@link #glLoadTransposeMatrixfARB LoadTransposeMatrixfARB}. * * @param m the matrix data */ public static void glLoadTransposeMatrixdARB(DoubleBuffer m) { if ( CHECKS ) checkBuffer(m, 16); nglLoadTransposeMatrixdARB(memAddress(m)); } // --- [ glMultTransposeMatrixfARB ] --- /** * Multiplies the current matrix with a 4 &times; 4 matrix in row-major order. See {@link #glLoadTransposeMatrixfARB LoadTransposeMatrixfARB} for details. * * @param m the matrix data */ public static void nglMultTransposeMatrixfARB(long m) { long __functionAddress = GL.getCapabilities().glMultTransposeMatrixfARB; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, m); } /** * Multiplies the current matrix with a 4 &times; 4 matrix in row-major order. See {@link #glLoadTransposeMatrixfARB LoadTransposeMatrixfARB} for details. * * @param m the matrix data */ public static void glMultTransposeMatrixfARB(FloatBuffer m) { if ( CHECKS ) checkBuffer(m, 16); nglMultTransposeMatrixfARB(memAddress(m)); } // --- [ glMultTransposeMatrixdARB ] --- /** * Double version of {@link #glMultTransposeMatrixfARB MultTransposeMatrixfARB}. * * @param m the matrix data */ public static void nglMultTransposeMatrixdARB(long m) { long __functionAddress = GL.getCapabilities().glMultTransposeMatrixdARB; if ( CHECKS ) checkFunctionAddress(__functionAddress); callPV(__functionAddress, m); } /** * Double version of {@link #glMultTransposeMatrixfARB MultTransposeMatrixfARB}. * * @param m the matrix data */ public static void glMultTransposeMatrixdARB(DoubleBuffer m) { if ( CHECKS ) checkBuffer(m, 16); nglMultTransposeMatrixdARB(memAddress(m)); } /** Array version of: {@link #glLoadTransposeMatrixfARB LoadTransposeMatrixfARB} */ public static void glLoadTransposeMatrixfARB(float[] m) { long __functionAddress = GL.getCapabilities().glLoadTransposeMatrixfARB; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(m, 16); } callPV(__functionAddress, m); } /** Array version of: {@link #glLoadTransposeMatrixdARB LoadTransposeMatrixdARB} */ public static void glLoadTransposeMatrixdARB(double[] m) { long __functionAddress = GL.getCapabilities().glLoadTransposeMatrixdARB; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(m, 16); } callPV(__functionAddress, m); } /** Array version of: {@link #glMultTransposeMatrixfARB MultTransposeMatrixfARB} */ public static void glMultTransposeMatrixfARB(float[] m) { long __functionAddress = GL.getCapabilities().glMultTransposeMatrixfARB; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(m, 16); } callPV(__functionAddress, m); } /** Array version of: {@link #glMultTransposeMatrixdARB MultTransposeMatrixdARB} */ public static void glMultTransposeMatrixdARB(double[] m) { long __functionAddress = GL.getCapabilities().glMultTransposeMatrixdARB; if ( CHECKS ) { checkFunctionAddress(__functionAddress); checkBuffer(m, 16); } callPV(__functionAddress, m); } }
package org.ripple.power.txns.btc; import java.io.EOFException; import java.math.BigInteger; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.ripple.power.Helper; /** * <p> * The Bitcoin block chain contains all of the transactions that have occurred * and is available to everyone. The block chain consists of a series of blocks * starting with the genesis block (block 0) and continuing to the chain head * (the latest block in the chain). * </p> * * <p> * Each block is composed of one or more transactions. The first transaction is * called the coinbase transaction and it assigns the block reward to the miner * who solved the block hash. The remaining transactions move coins from Input A * to Output B. A single transaction can contain multiple inputs and multiple * outputs. The sum of the inputs minus the sum of the output represents the * mining fee for that transaction. * </p> * * <p> * A block has the following format: * </p> * * <pre> * Size Field Description * ==== ===== =========== * 80 bytes BlockHeader Consists of 6 fields that are hashed to calculate the block hash * VarInt TxCount Number of transactions in the block * Variable Transactions The transactions in the block * </pre> * * <p> * The block header has the following format: * </p> * * <pre> * Size Field Description * ==== ===== =========== * 4 bytes Version The block version number * 32 bytes PrevBlockHash The hash of the preceding block in the chain * 32 byte MerkleRoot The Merkle root for the transactions in the block * 4 bytes Time The time the block was mined * 4 bytes Difficulty The target difficulty * 4 bytes Nonce The nonce used to generate the required hash * </pre> */ public class Block implements ByteSerializable { /** The serialized byte stream */ private byte[] blockData; /** The block version */ private int blockVersion; /** The block hash calculated from the block header */ private Sha256Hash blockHash; /** The hash for the previous block in the chain */ private Sha256Hash prevBlockHash; /** The Merkle root for the transactions in the block */ private Sha256Hash merkleRoot; /** The Merkle tree for the transaction in the block */ private List<byte[]> merkleTree; /** The block timestamp */ private long timeStamp; /** The target difficulty */ private long targetDifficulty; /** The nonce */ private int nonce; /** The transactions contained in the block */ private List<Transaction> transactions; /** * Create an empty block for use by subclasses */ protected Block() { } /** * Create a block from a serialized byte array * * @param inBytes * Byte array containing the serialized data * @param doVerify * TRUE if the block structure should be verified * @throws EOFException * End-of-data while processing byte stream * @throws VerificationException * Block verification failed */ public Block(byte[] inBytes, boolean doVerify) throws EOFException, VerificationException { this(inBytes, 0, inBytes.length, doVerify); } /** * Create a block from a serialized byte array * * @param inBytes * Byte array containing the serialized data * @param inOffset * Starting offset within the array * @param inLength * Length of the serialized data * @param doVerify * TRUE if the block structure should be verified * @throws EOFException * Serialized byte stream is too short * @throws VerificationException * Block verification failed */ public Block(byte[] inBytes, int inOffset, int inLength, boolean doVerify) throws EOFException, VerificationException { this(new SerializedBuffer(inBytes, inOffset, inLength), doVerify); } /** * Create a block from a serialized buffer * * @param inBuffer * Serialized buffer * @param doVerify * TRUE if the block structure should be verified * @throws EOFException * Serialized byte stream is too short * @throws VerificationException * Block verification failed */ public Block(SerializedBuffer inBuffer, boolean doVerify) throws EOFException, VerificationException { // // We must have at least 80 bytes // if (inBuffer.available() < BlockHeader.HEADER_SIZE) throw new EOFException("Block header truncated"); // // Compute the block hash from the serialized block header // int startPosition = inBuffer.getPosition(); blockHash = new Sha256Hash( Helper.reverseBytes(Helper.doubleDigest(inBuffer.getBytes(BlockHeader.HEADER_SIZE)))); inBuffer.setPosition(startPosition); // // Read the block header // readHeader(inBuffer); // // Read the transactions // readTransactions(inBuffer); // // Verify the block and its transactions. Note that transaction // signatures and connected // outputs will be verified when the block is added to the block chain. // if (doVerify) verifyBlock(); // // Save a copy of the serialized byte stream // inBuffer.setSegmentStart(startPosition); blockData = inBuffer.getSegmentBytes(); } /** * Write the serialized block data to the output buffer * * @param outBuffer * Output buffer * @return Output buffer */ @Override public SerializedBuffer getBytes(SerializedBuffer outBuffer) { outBuffer.putBytes(blockData); return outBuffer; } /** * Return the serialized block data * * @return Byte array containing the serialized block */ @Override public byte[] getBytes() { return blockData; } /** * Write the serialized block header to the output buffer * * @param outBuffer * Output buffer * @return Output buffer */ public SerializedBuffer getHeaderBytes(SerializedBuffer outBuffer) { outBuffer.putBytes(blockData, 0, BlockHeader.HEADER_SIZE); return outBuffer; } /** * Return the serialized block header * * @return Byte array containing just the block header */ public byte[] getHeaderBytes() { return Arrays.copyOfRange(blockData, 0, BlockHeader.HEADER_SIZE); } /** * <p> * Returns the block version. Only Version 1 and Version 2 blocks are * supported. * </p> * <ul> * <li>Blocks created before BIP 34 are Version 1 and do not contain the * chain height in the coinbase transaction input script</li> * <li>Blocks created after BIP 34 are Version 2 and contain the chain * height in the coinbase transaction input script</li> * </ul> * * @return Block version */ public int getVersion() { return blockVersion; } /** * Returns the time the block was mined * * @return The block timestamp in seconds since the Unix epoch (Jan 1, 1970) */ public long getTimeStamp() { return timeStamp; } /** * Returns the block hash calculated over the block header * * @return Block hash */ public Sha256Hash getHash() { return blockHash; } /** * Returns the block hash as a formatted hex string * * @return Hex string */ public String getHashAsString() { return blockHash.toString(); } /** * Returns the hash of the previous block in the chain * * @return Previous block hash */ public Sha256Hash getPrevBlockHash() { return prevBlockHash; } /** * Returns the Merkle root * * @return Merkle root */ public Sha256Hash getMerkleRoot() { return merkleRoot; } /** * Returns the Merkle tree * * @return Merkle tree */ public List<byte[]> getMerkleTree() { if (merkleTree == null) merkleTree = buildMerkleTree(); return merkleTree; } /** * Returns the target difficulty in compact form * * @return Target difficulty */ public long getTargetDifficulty() { return targetDifficulty; } /** * Returns the target difficulty as a 256-bit value that can be compared to * a SHA-256 hash. Inside a block. the target is represented using the * compact form. * * @return The difficulty target */ public BigInteger getTargetDifficultyAsInteger() { return Helper.decodeCompactBits(targetDifficulty); } /** * Returns the work represented by this block * * Work is defined as the number of tries needed to solve a block in the * average case. As the target gets lower, the amount of work goes up. * * @return The work represented by this block */ public BigInteger getWork() { BigInteger target = getTargetDifficultyAsInteger(); return BlockHeader.LARGEST_HASH.divide(target.add(BigInteger.ONE)); } /** * Returns the block nonce * * @return Block nonce */ public int getNonce() { return nonce; } /** * Returns the transactions in this block * * @return Transaction list */ public List<Transaction> getTransactions() { return transactions; } /** * Calculates the Merkle root from the block transactions * * @return Merkle root */ private Sha256Hash calculateMerkleRoot() { if (merkleTree == null) merkleTree = buildMerkleTree(); return new Sha256Hash(merkleTree.get(merkleTree.size() - 1)); } /** * Builds the Merkle tree from the block transactions * * @return List of byte arrays representing the nodes in the Merkle tree */ private List<byte[]> buildMerkleTree() { // // The Merkle root is based on a tree of hashes calculated from the // transactions: // // root // / \ // A B // / \ / \ // t1 t2 t3 t4 // // The tree is represented as a list: t1,t2,t3,t4,A,B,root where each // entry is a hash // // The hashing algorithm is double SHA-256. The leaves are a hash of the // serialized contents of the transaction. // The interior nodes are hashes of the concatenation of the two child // hashes. // // This structure allows the creation of proof that a transaction was // included into a block without having to // provide the full block contents. Instead, you can provide only a // Merkle branch. For example to prove tx2 was // in a block you can just provide tx2, the hash(tx1) and B. Now the // other party has everything they need to // derive the root, which can be checked against the block header. These // proofs are useful when we // want to download partial block contents. // // Note that if the number of transactions is not even, the last tx is // repeated to make it so. // A tree with 5 transactions would look like this: // // root // / \ // 4 5 // / \ / \ // 1 2 3 3 // / \ / \ / \ // t1 t2 t3 t4 t5 t5 // ArrayList<byte[]> tree = new ArrayList<>(); for (Transaction tx : transactions) { tree.add(tx.getHash().getBytes()); } // // The tree is generated starting at the leaves and moving down to the // root // int levelOffset = 0; // // Step through each level, stopping when we reach the root (levelSize // == 1). // for (int levelSize = transactions.size(); levelSize > 1; levelSize = (levelSize + 1) / 2) { // // Process each pair of nodes on the current level // for (int left = 0; left < levelSize; left += 2) { // // The right hand node can be the same as the left hand in the // case where we have // an odd number of nodes for the level // int right = Math.min(left + 1, levelSize - 1); byte[] leftBytes = Helper.reverseBytes(tree.get(levelOffset + left)); byte[] rightBytes = Helper.reverseBytes(tree.get(levelOffset + right)); byte[] nodeHash = Helper.doubleDigestTwoBuffers(leftBytes, 0, 32, rightBytes, 0, 32); tree.add(Helper.reverseBytes(nodeHash)); } // // Move to the next level. // levelOffset += levelSize; } return tree; } /** * Reads the block header from the input stream * * @param inBuffer * Input buffer * @throws EOFException * Serialized input stream is too short * @throws VerificationException * Block structure is incorrect */ private void readHeader(SerializedBuffer inBuffer) throws EOFException, VerificationException { blockVersion = inBuffer.getInt(); if (blockVersion < 1 || blockVersion > 3) throw new VerificationException(String.format("Block version %d is not supported", blockVersion)); prevBlockHash = new Sha256Hash(Helper.reverseBytes(inBuffer.getBytes(32))); merkleRoot = new Sha256Hash(Helper.reverseBytes(inBuffer.getBytes(32))); timeStamp = inBuffer.getUnsignedInt(); targetDifficulty = inBuffer.getUnsignedInt(); nonce = inBuffer.getInt(); } /** * Reads the transactions from the serialized stream * * @param inBuffer * Serialized buffer * @throws EOFException * Serialized input stream is too short * @throws VerificationException * Transaction verification failed */ private void readTransactions(SerializedBuffer inBuffer) throws EOFException, VerificationException { int count = inBuffer.getVarInt(); if (count < 1 || count > NetParams.MAX_BLOCK_SIZE / 60) throw new VerificationException(String.format("Transaction count %d is not valid", count)); transactions = new ArrayList<>(count); for (int i = 0; i < count; i++) transactions.add(new Transaction(inBuffer)); } /** * <p> * Checks the block to ensure it follows the rules laid out in the network * parameters. * </p> * <p> * The following checks are performed: * </p> * <ul> * <li>Check the proof of work by comparing the block hash to the target * difficulty</li> * <li>Check the timestamp against the current time</li> * <li>Verify that there is a single coinbase transaction and it is the * first transaction in the block</li> * <li>Verify the merkle root</li> * <li>Verify the transaction structure</li> * <li>Verify the transaction lock time</li> * </ul> * * @throws VerificationException * Block verification failed */ private void verifyBlock() throws VerificationException { // // Ensure this block does in fact represent real work done. If the // difficulty is high enough, // we can be fairly certain the work was done by the network. // // The block hash must be less than or equal to the target difficulty // (the difficulty increases // by requiring an increasing number of leading zeroes in the block // hash) // BigInteger target = getTargetDifficultyAsInteger(); if (target.signum() <= 0 || target.compareTo(NetParams.PROOF_OF_WORK_LIMIT) > 0) throw new VerificationException("Target difficulty is not valid", RejectMessage.REJECT_INVALID, blockHash); BigInteger hash = getHash().toBigInteger(); if (hash.compareTo(target) > 0) throw new VerificationException("Block hash is higher than target difficulty", RejectMessage.REJECT_INVALID, blockHash); // // Verify the block timestamp // long currentTime = System.currentTimeMillis() / 1000; if (timeStamp > currentTime + NetParams.ALLOWED_TIME_DRIFT) throw new VerificationException("Block timestamp is too far in the future", RejectMessage.REJECT_INVALID, blockHash); // // Check that there is just one coinbase transaction and it is the first // transaction in the block // boolean foundCoinBase = false; for (Transaction tx : transactions) { if (tx.isCoinBase()) { if (foundCoinBase) throw new VerificationException("Block contains multiple coinbase transactions", RejectMessage.REJECT_MALFORMED, blockHash); foundCoinBase = true; } else if (!foundCoinBase) { throw new VerificationException("First transaction in block is not the coinbase transaction", RejectMessage.REJECT_MALFORMED, blockHash); } } // // Verify the Merkle root // Sha256Hash checkRoot = calculateMerkleRoot(); if (!checkRoot.equals(merkleRoot)) throw new VerificationException("Merkle root is not correct", RejectMessage.REJECT_INVALID, blockHash); // // Verify the transactions in the block // for (Transaction tx : transactions) { // // Verify the transaction structure // tx.verify(false); // // A transaction is locked if the lock time is greater than the // block time (we allow // a 10-minute leeway) // if (tx.getLockTime() > timeStamp + (10 * 60)) { // // A transaction is unlocked if all of the input sequences are // -1 even though // the lock time has not been reached // List<TransactionInput> txInputs = tx.getInputs(); for (TransactionInput txInput : txInputs) { if (txInput.getSeqNumber() != -1) throw new VerificationException("Transaction lock time greater than block time", RejectMessage.REJECT_INVALID, tx.getHash()); } } } } /** * Determines if this block is equal to another block * * @param obj * The block to compare * @return TRUE if the blocks are equal */ @Override public boolean equals(Object obj) { return (obj != null && (obj instanceof Block) && blockHash.equals(((Block) obj).blockHash)); } /** * Returns the hash code for this object. The returned value is based on the * block hash but is not the same value. * * @return Hash code */ @Override public int hashCode() { return blockHash.hashCode(); } /** * Returns a string representation for this block * * @return Formatted string */ @Override public String toString() { return String.format("Block hash: %s\n Previous block hash %s\n Merkle root: %s\n Target difficulty %d", getHashAsString(), getPrevBlockHash().toString(), getMerkleRoot().toString(), targetDifficulty); } }
/* Derby - Class com.pivotal.gemfirexd.internal.impl.sql.compile.TableElementList Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ /* * Changes for GemFireXD distributed data platform (some marked by "GemStone changes") * * Portions Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); you * may not use this file except in compliance with the License. You * may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or * implied. See the License for the specific language governing * permissions and limitations under the License. See accompanying * LICENSE file. */ package com.pivotal.gemfirexd.internal.impl.sql.compile; import com.pivotal.gemfirexd.internal.catalog.UUID; import com.pivotal.gemfirexd.internal.engine.GfxdConstants; import com.pivotal.gemfirexd.internal.engine.sql.catalog.DistributionDescriptor; import com.pivotal.gemfirexd.internal.engine.sql.compile.DistributionDefinitionNode; import com.pivotal.gemfirexd.internal.iapi.error.StandardException; import com.pivotal.gemfirexd.internal.iapi.reference.SQLState; import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager; import com.pivotal.gemfirexd.internal.iapi.sql.compile.C_NodeTypes; import com.pivotal.gemfirexd.internal.iapi.sql.compile.CompilerContext; import com.pivotal.gemfirexd.internal.iapi.sql.conn.ConnectionUtil; import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext; import com.pivotal.gemfirexd.internal.iapi.sql.depend.DependencyManager; import com.pivotal.gemfirexd.internal.iapi.sql.depend.ProviderInfo; import com.pivotal.gemfirexd.internal.iapi.sql.depend.ProviderList; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ColumnDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ConstraintDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.ConstraintDescriptorList; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.DataDictionary; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.SchemaDescriptor; import com.pivotal.gemfirexd.internal.iapi.sql.dictionary.TableDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor; import com.pivotal.gemfirexd.internal.iapi.types.TypeId; import com.pivotal.gemfirexd.internal.impl.sql.execute.ColumnInfo; import com.pivotal.gemfirexd.internal.impl.sql.execute.ConstraintConstantAction; import com.pivotal.gemfirexd.internal.impl.sql.execute.ConstraintInfo; import com.pivotal.gemfirexd.internal.impl.sql.execute.IndexConstantAction; import java.sql.SQLException; import java.util.ArrayList; import java.util.Hashtable; import java.util.List; import java.util.Properties; import java.util.Vector; /** * A TableElementList represents the list of columns and other table elements * such as constraints in a CREATE TABLE or ALTER TABLE statement. * */ public class TableElementList extends QueryTreeNodeVector { private int numColumns; private TableDescriptor td; /** * Add a TableElementNode to this TableElementList * * @param tableElement The TableElementNode to add to this list */ public void addTableElement(TableElementNode tableElement) { addElement(tableElement); if ((tableElement instanceof ColumnDefinitionNode) || tableElement.getElementType() == TableElementNode.AT_DROP_COLUMN) { numColumns++; } } /** * Convert this object to a String. See comments in QueryTreeNode.java * for how this should be done for tree printing. * * @return This object as a String */ public String toString() { if (SanityManager.DEBUG) { StringBuilder buffer = new StringBuilder(""); for (int index = 0; index < size(); index++) { buffer.append(elementAt(index).toString()).append("\n"); } return buffer.toString(); } else { return ""; } } /** * Use the passed schema descriptor's collation type to set the collation * of the character string types in create table node * @param sd */ void setCollationTypesOnCharacterStringColumns(SchemaDescriptor sd) { int size = size(); int collationType = sd.getCollationType(); for (int index = 0; index < size; index++) { TableElementNode tableElement = (TableElementNode) elementAt(index); if (tableElement instanceof ColumnDefinitionNode) { ColumnDefinitionNode cdn = (ColumnDefinitionNode) elementAt(index); if (cdn.getType().getTypeId().isStringTypeId()) { cdn.setCollationType(collationType); } } } } /** * Validate this TableElementList. This includes checking for * duplicate columns names, and checking that user types really exist. * * @param ddlStmt DDLStatementNode which contains this list * @param dd DataDictionary to use * @param td TableDescriptor for table, if existing table. * * @exception StandardException Thrown on error */ void validate(DDLStatementNode ddlStmt, DataDictionary dd, TableDescriptor td) throws StandardException { this.td = td; int numAutoCols = 0; // list of table elements(fk constraints) to be removed List<Integer> elementsToBeRemoved = new ArrayList<Integer>(); int size = size(); Hashtable columnHT = new Hashtable(size + 2, (float) .999); Hashtable constraintHT = new Hashtable(size + 2, (float) .999); //all the primary key/unique key constraints for this table Vector constraintsVector = new Vector(); //special case for alter table (td is not null in case of alter table) if (td != null) { //In case of alter table, get the already existing primary key and unique //key constraints for this table. And then we will compare them with new //primary key/unique key constraint column lists. ConstraintDescriptorList cdl = dd.getConstraintDescriptors(td); ConstraintDescriptor cd; if (cdl != null) //table does have some pre-existing constraints defined on it { for (int i=0; i<cdl.size();i++) { cd = cdl.elementAt(i); //if the constraint type is not primary key or unique key, ignore it. if (cd.getConstraintType() == DataDictionary.PRIMARYKEY_CONSTRAINT || cd.getConstraintType() == DataDictionary.UNIQUE_CONSTRAINT) constraintsVector.addElement(cd); } } } int tableType = TableDescriptor.BASE_TABLE_TYPE; if (ddlStmt instanceof CreateTableNode) tableType = ((CreateTableNode)ddlStmt).tableType; for (int index = 0; index < size; index++) { TableElementNode tableElement = (TableElementNode) elementAt(index); if (tableElement instanceof ColumnDefinitionNode) { ColumnDefinitionNode cdn = (ColumnDefinitionNode) elementAt(index); if (tableType == TableDescriptor.GLOBAL_TEMPORARY_TABLE_TYPE && (cdn.getType().getTypeId().isLongConcatableTypeId() || cdn.getType().getTypeId().isUserDefinedTypeId())) { throw StandardException.newException(SQLState.LANG_LONG_DATA_TYPE_NOT_ALLOWED, cdn.getColumnName()); } checkForDuplicateColumns(ddlStmt, columnHT, cdn.getColumnName()); cdn.checkUserType(td); cdn.bindAndValidateDefault(dd, td); cdn.validateAutoincrement(dd, td, tableType); if (tableElement instanceof ModifyColumnNode) { ModifyColumnNode mcdn = (ModifyColumnNode)cdn; mcdn.checkExistingConstraints(td); mcdn.useExistingCollation(td); } else if (cdn.isAutoincrementColumn()) { numAutoCols ++; } } else if (tableElement.getElementType() == TableElementNode.AT_DROP_COLUMN) { String colName = tableElement.getName(); if (td.getColumnDescriptor(colName) == null) { throw StandardException.newException( SQLState.LANG_COLUMN_NOT_FOUND_IN_TABLE, colName, td.getQualifiedName()); } break; } /* The rest of this method deals with validating constraints */ if (! (tableElement.hasConstraint())) { continue; } ConstraintDefinitionNode cdn = (ConstraintDefinitionNode) tableElement; try { cdn.bind(ddlStmt, dd); } catch (StandardException se) { //#50116: ignore error during DDL replay as reference table //on which FK is defined may have been dropped if (se.getMessageId().equals(SQLState.LANG_INVALID_FK_NO_REF_TAB)) { LanguageConnectionContext currentLcc = null; try { currentLcc = ConnectionUtil.getCurrentLCC(); } catch (SQLException e) { throw StandardException.newException(e.getSQLState(), e); } if (currentLcc.getDroppedFKConstraints() != null && currentLcc.getDroppedFKConstraints().contains(cdn.getConstraintMoniker())) { elementsToBeRemoved.add(index); continue; } else { throw se; } } else { throw se; } } //if constraint is primary key or unique key, add it to the vector if (cdn.getConstraintType() == DataDictionary.PRIMARYKEY_CONSTRAINT || cdn.getConstraintType() == DataDictionary.UNIQUE_CONSTRAINT) { /* In case of create table, the vector can have only ConstraintDefinitionNode * elements. In case of alter table, it can have both ConstraintDefinitionNode * (for new constraints) and ConstraintDescriptor(for pre-existing constraints). */ Object destConstraint; String destName = null; String[] destColumnNames = null; for (int i=0; i<constraintsVector.size();i++) { destConstraint = constraintsVector.elementAt(i); if (destConstraint instanceof ConstraintDefinitionNode) { ConstraintDefinitionNode destCDN = (ConstraintDefinitionNode)destConstraint; destName = destCDN.getConstraintMoniker(); destColumnNames = destCDN.getColumnList().getColumnNames(); } else if (destConstraint instanceof ConstraintDescriptor) { //will come here only for pre-existing constraints in case of alter table ConstraintDescriptor destCD = (ConstraintDescriptor)destConstraint; destName = destCD.getConstraintName(); destColumnNames = destCD.getColumnDescriptors().getColumnNames(); } //check if there are multiple constraints with same set of columns if (columnsMatch(cdn.getColumnList().getColumnNames(), destColumnNames)) throw StandardException.newException(SQLState.LANG_MULTIPLE_CONSTRAINTS_WITH_SAME_COLUMNS, cdn.getConstraintMoniker(), destName); } constraintsVector.addElement(cdn); } /* Make sure that there are no duplicate constraint names in the list */ if (cdn instanceof ConstraintDefinitionNode) checkForDuplicateConstraintNames(ddlStmt, constraintHT, cdn.getConstraintMoniker()); /* Make sure that the constraint we are trying to drop exists */ if (cdn.getConstraintType() == DataDictionary.DROP_CONSTRAINT) { /* ** If no schema descriptor, then must be an invalid ** schema name. */ String dropConstraintName = cdn.getConstraintMoniker(); if (dropConstraintName != null) { String dropSchemaName = cdn.getDropSchemaName(); SchemaDescriptor sd = dropSchemaName == null ? td.getSchemaDescriptor() : getSchemaDescriptor(dropSchemaName); ConstraintDescriptor cd = dd.getConstraintDescriptorByName( td, sd, dropConstraintName, false); if (cd == null) { throw StandardException.newException(SQLState.LANG_DROP_NON_EXISTENT_CONSTRAINT, (sd.getSchemaName() + "."+ dropConstraintName), td.getQualifiedName()); } /* Statement is dependendent on the ConstraintDescriptor */ getCompilerContext().createDependency(cd); } } if (cdn.hasPrimaryKeyConstraint()) { // for PRIMARY KEY, check that columns are unique verifyUniqueColumnList(ddlStmt, cdn); if (td == null) { // in CREATE TABLE so set PRIMARY KEY columns to NOT NULL setColumnListToNotNull(cdn); } else { // in ALTER TABLE so raise error if any columns are nullable checkForNullColumns(cdn, td); } // GemStone changes BEGIN checkForLOBOrUDTColumns(cdn, td); // GemStone changes END } else if (cdn.hasUniqueKeyConstraint()) { // for UNIQUE, check that columns are unique verifyUniqueColumnList(ddlStmt, cdn); // unique constraints on nullable columns added in 10.4, // disallow until database hard upgraded at least to 10.4. if (!dd.checkVersion( DataDictionary.DD_VERSION_DERBY_10_4, null)) { checkForNullColumns(cdn, td); } // GemStone changes BEGIN checkForLOBOrUDTColumns(cdn, td); // GemStone changes END } else if (cdn.hasForeignKeyConstraint()) { // for FOREIGN KEY, check that columns are unique verifyUniqueColumnList(ddlStmt, cdn); } } /* Can have only one autoincrement column in DB2 mode */ if (numAutoCols > 1) throw StandardException.newException(SQLState.LANG_MULTIPLE_AUTOINCREMENT_COLUMNS); int numElementsRemoved = 0; for (Integer i : elementsToBeRemoved) { removeElementAt(i - numElementsRemoved); numElementsRemoved++; } } /** * Count the number of constraints of the specified type. * * @param constraintType The constraint type to search for. * * @return int The number of constraints of the specified type. */ public int countConstraints(int constraintType) { int numConstraints = 0; int size = size(); for (int index = 0; index < size; index++) { ConstraintDefinitionNode cdn; TableElementNode element = (TableElementNode) elementAt(index); if (! (element instanceof ConstraintDefinitionNode)) { continue; } cdn = (ConstraintDefinitionNode) element; if (constraintType == cdn.getConstraintType()) { numConstraints++; } } return numConstraints; } /** * Count the number of columns. * * @return int The number of columns. */ public int countNumberOfColumns() { return numColumns; } /** * Fill in the ColumnInfo[] for this table element list. * * @param colInfos The ColumnInfo[] to be filled in. * * @return int The number of constraints in the create table. */ public int genColumnInfos(ColumnInfo[] colInfos) { int numConstraints = 0; int size = size(); for (int index = 0; index < size; index++) { if (((TableElementNode) elementAt(index)).getElementType() == TableElementNode.AT_DROP_COLUMN) { String columnName = ((TableElementNode) elementAt(index)).getName(); colInfos[index] = new ColumnInfo( columnName, td.getColumnDescriptor( columnName ).getType(), null, null, null, null, // GemStone changes BEGIN ColumnInfo.DROP, 0, false,false, 0, 0); /* (original code) ColumnInfo.DROP, 0, 0, 0); */ // GemStone changes END break; } if (! (elementAt(index) instanceof ColumnDefinitionNode)) { if (SanityManager.DEBUG) { SanityManager.ASSERT( elementAt(index) instanceof ConstraintDefinitionNode, "elementAt(index) expected to be instanceof " + "ConstraintDefinitionNode"); } /* Remember how many constraints that we've seen */ numConstraints++; continue; } ColumnDefinitionNode coldef = (ColumnDefinitionNode) elementAt(index); colInfos[index - numConstraints] = new ColumnInfo(coldef.getColumnName(), coldef.getType(), coldef.getDefaultValue(), coldef.getDefaultInfo(), (UUID) null, coldef.getOldDefaultUUID(), coldef.getAction(), (coldef.isAutoincrementColumn() ? coldef.getAutoincrementStart() : 0), // GemStone changes BEGIN (coldef.isAutoincrementColumn() ? coldef.hasAutoIncrementInc : false), (coldef.isGeneratedByDefault() ? coldef.isGeneratedByDefault : false), // GemStone changes END (coldef.isAutoincrementColumn() ? coldef.getAutoincrementIncrement() : 0), (coldef.isAutoincrementColumn() ? coldef.getAutoinc_create_or_modify_Start_Increment() : -1)); /* Remember how many constraints that we've seen */ if (coldef.hasConstraint()) { numConstraints++; } } return numConstraints; } /** * Append goobered up ResultColumns to the table's RCL. * This is useful for binding check constraints for CREATE and ALTER TABLE. * * @param table The table in question. * * @exception StandardException Thrown on error */ public void appendNewColumnsToRCL(FromBaseTable table) throws StandardException { int size = size(); ResultColumnList rcl = table.getResultColumns(); TableName exposedName = table.getTableName(); for (int index = 0; index < size; index++) { if (elementAt(index) instanceof ColumnDefinitionNode) { ColumnDefinitionNode cdn = (ColumnDefinitionNode) elementAt(index); ResultColumn resultColumn; ValueNode valueNode; /* Build a ResultColumn/BaseColumnNode pair for the column */ valueNode = (ValueNode) getNodeFactory().getNode( C_NodeTypes.BASE_COLUMN_NODE, cdn.getColumnName(), exposedName, cdn.getType(), getContextManager()); resultColumn = (ResultColumn) getNodeFactory().getNode( C_NodeTypes.RESULT_COLUMN, cdn.getType(), valueNode, getContextManager()); resultColumn.setName(cdn.getColumnName()); rcl.addElement(resultColumn); } } } /** * Bind and validate all of the check constraints in this list against * the specified FromList. * * @param fromList The FromList in question. * * @exception StandardException Thrown on error */ void bindAndValidateCheckConstraints(FromList fromList) throws StandardException { CompilerContext cc; FromBaseTable table = (FromBaseTable) fromList.elementAt(0); int size = size(); cc = getCompilerContext(); Vector aggregateVector = new Vector(); for (int index = 0; index < size; index++) { ConstraintDefinitionNode cdn; TableElementNode element = (TableElementNode) elementAt(index); ValueNode checkTree; if (! (element instanceof ConstraintDefinitionNode)) { continue; } cdn = (ConstraintDefinitionNode) element; if (cdn.getConstraintType() != DataDictionary.CHECK_CONSTRAINT) { continue; } checkTree = cdn.getCheckCondition(); // bind the check condition // verify that it evaluates to a boolean final int previousReliability = cc.getReliability(); try { /* Each check constraint can have its own set of dependencies. * These dependencies need to be shared with the prepared * statement as well. We create a new auxiliary provider list * for the check constraint, "push" it on the compiler context * by swapping it with the current auxiliary provider list * and the "pop" it when we're done by restoring the old * auxiliary provider list. */ ProviderList apl = new ProviderList(); ProviderList prevAPL = cc.getCurrentAuxiliaryProviderList(); cc.setCurrentAuxiliaryProviderList(apl); // Tell the compiler context to only allow deterministic nodes cc.setReliability( CompilerContext.CHECK_CONSTRAINT ); checkTree = checkTree.bindExpression(fromList, (SubqueryList) null, aggregateVector); // no aggregates, please if (aggregateVector.size() != 0) { throw StandardException.newException(SQLState.LANG_INVALID_CHECK_CONSTRAINT, cdn.getConstraintText()); } checkTree = checkTree.checkIsBoolean(); cdn.setCheckCondition(checkTree); /* Save the APL off in the constraint node */ if (apl.size() > 0) { cdn.setAuxiliaryProviderList(apl); } // Restore the previous AuxiliaryProviderList cc.setCurrentAuxiliaryProviderList(prevAPL); } finally { cc.setReliability(previousReliability); } /* We have a valid check constraint, now build an array of * 1-based columnIds that the constraint references. */ ResultColumnList rcl = table.getResultColumns(); int numReferenced = rcl.countReferencedColumns(); int[] checkColumnReferences = new int[numReferenced]; rcl.recordColumnReferences(checkColumnReferences, 1); cdn.setCheckColumnReferences(checkColumnReferences); /* Now we build a list with only the referenced columns and * copy it to the cdn. Thus we can build the array of * column names for the referenced columns during generate(). */ ResultColumnList refRCL = (ResultColumnList) getNodeFactory().getNode( C_NodeTypes.RESULT_COLUMN_LIST, getContextManager()); rcl.copyReferencedColumnsToNewList(refRCL); /* A column check constraint can only refer to that column. If this is a * column constraint, we should have an RCL with that column */ if (cdn.getColumnList() != null) { String colName = ((ResultColumn)(cdn.getColumnList().elementAt(0))).getName(); if (numReferenced > 1 || !colName.equals(((ResultColumn)(refRCL.elementAt(0))).getName())) throw StandardException.newException(SQLState.LANG_DB2_INVALID_CHECK_CONSTRAINT, colName); } cdn.setColumnList(refRCL); /* Clear the column references in the RCL so each check constraint * starts with a clean list. */ rcl.clearColumnReferences(); } } /** * Fill in the ConstraintConstantAction[] for this create/alter table. * * @param forCreateTable ConstraintConstantAction is for a create table. * @param conActions The ConstraintConstantAction[] to be filled in. * @param tableName The name of the Table being created. * @param tableSd The schema for that table. * @param dd The DataDictionary * * @exception StandardException Thrown on failure */ void genConstraintActions(boolean forCreateTable, ConstraintConstantAction[] conActions, String tableName, SchemaDescriptor tableSd, DataDictionary dd) throws StandardException { int size = size(); int conActionIndex = 0; for (int index = 0; index < size; index++) { String[] columnNames = null; TableElementNode ten = (TableElementNode) elementAt(index); IndexConstantAction indexAction = null; if (! ten.hasConstraint()) { continue; } if (ten instanceof ColumnDefinitionNode) { continue; } ConstraintDefinitionNode constraintDN = (ConstraintDefinitionNode) ten; if (constraintDN.getColumnList() != null) { columnNames = new String[constraintDN.getColumnList().size()]; constraintDN.getColumnList().exportNames(columnNames); } int constraintType = constraintDN.getConstraintType(); String constraintText = constraintDN.getConstraintText(); /* ** If the constraint is not named (e.g. ** create table x (x int primary key)), then ** the constraintSd is the same as the table. */ String constraintName = constraintDN.getConstraintMoniker(); /* At execution time, we will generate a unique name for the backing * index (for CREATE CONSTRAINT) and we will look up the conglomerate * name (for DROP CONSTRAINT). */ if (constraintDN.requiresBackingIndex()) { // implement unique constraints using a unique backing index // unless it is soft upgrade in version before 10.4, or if // constraint contains no nullable columns. In 10.4 use // "unique with duplicate null" backing index for constraints // that contain at least one nullable column. if (constraintDN.constraintType == DataDictionary.UNIQUE_CONSTRAINT && (dd.checkVersion( DataDictionary.DD_VERSION_DERBY_10_4, null))) { boolean contains_nullable_columns = areColumnsNullable(constraintDN, td); // if all the columns are non nullable, continue to use // a unique backing index. boolean unique = !contains_nullable_columns; // Only use a "unique with duplicate nulls" backing index // for constraints with nullable columns. boolean uniqueWithDuplicateNulls = contains_nullable_columns; indexAction = genIndexAction( forCreateTable, unique, uniqueWithDuplicateNulls, null, constraintDN, columnNames, true, tableSd, tableName, constraintType, dd); } else { indexAction = genIndexAction( forCreateTable, constraintDN.requiresUniqueIndex(), false, null, constraintDN, columnNames, true, tableSd, tableName, constraintType, dd); } } if (constraintType == DataDictionary.DROP_CONSTRAINT) { if (SanityManager.DEBUG) { // Can't drop constraints on a create table. SanityManager.ASSERT(!forCreateTable); } conActions[conActionIndex] = getGenericConstantActionFactory(). getDropConstraintConstantAction( constraintName, constraintDN.getDropSchemaName(), /// FiX tableName, td.getUUID(), tableSd.getSchemaName(), indexAction, constraintDN.getDropBehavior(), constraintDN.getVerifyType()); } else { ProviderList apl = constraintDN.getAuxiliaryProviderList(); ConstraintInfo refInfo = null; ProviderInfo[] providerInfos = null; if (constraintDN instanceof FKConstraintDefinitionNode) { refInfo = ((FKConstraintDefinitionNode)constraintDN).getReferencedConstraintInfo(); } /* Create the ProviderInfos, if the constraint is dependent on any Providers */ if (apl != null && apl.size() > 0) { /* Get all the dependencies for the current statement and transfer * them to this view. */ DependencyManager dm = dd.getDependencyManager(); providerInfos = dm.getPersistentProviderInfos(apl); } else { providerInfos = new ProviderInfo[0]; // System.out.println("TABLE ELEMENT LIST EMPTY"); } conActions[conActionIndex++] = getGenericConstantActionFactory(). getCreateConstraintConstantAction( constraintName, constraintType, forCreateTable, tableName, ((td != null) ? td.getUUID() : (UUID) null), tableSd.getSchemaName(), columnNames, indexAction, constraintText, true, // enabled refInfo, providerInfos); } } } //check if one array is same as another private boolean columnsMatch(String[] columnNames1, String[] columnNames2) { int srcCount, srcSize, destCount,destSize; boolean match = true; if (columnNames1.length != columnNames2.length) return false; srcSize = columnNames1.length; destSize = columnNames2.length; for (srcCount = 0; srcCount < srcSize; srcCount++) { match = false; for (destCount = 0; destCount < destSize; destCount++) { if (columnNames1[srcCount].equals(columnNames2[destCount])) { match = true; break; } } if (match == false) return false; } return true; } /** * utility to generated the call to create the index. * <p> * * * @param forCreateTable Executed as part of a CREATE TABLE * @param isUnique True means it will be a unique index * @param isUniqueWithDuplicateNulls True means index check and disallow * any duplicate key if key has no * column with a null value. If any * column in the key has a null value, * no checking is done and insert will * always succeed. * @param indexName The type of index (BTREE, for * example) * @param cdn * @param columnNames Names of the columns in the index, * in order. * @param isConstraint TRUE if index is backing up a * constraint, else FALSE. * @param sd * @param tableName Name of table the index will be on * @param constraintType * @param dd **/ private IndexConstantAction genIndexAction( boolean forCreateTable, boolean isUnique, boolean isUniqueWithDuplicateNulls, String indexName, ConstraintDefinitionNode cdn, String[] columnNames, boolean isConstraint, SchemaDescriptor sd, String tableName, int constraintType, DataDictionary dd) throws StandardException { if (indexName == null) { //Gemstone changes Begin // indexName = cdn.getBackingIndexName(dd); StringBuilder sb = new StringBuilder(); sb.append(constraintType).append(GfxdConstants.INDEX_NAME_DELIMITER).append(tableName); for(int cols = 0; cols < columnNames.length; cols++) sb.append(GfxdConstants.INDEX_NAME_DELIMITER + columnNames[cols]); indexName=sb.toString(); //Gemstone changes ends } if (constraintType == DataDictionary.DROP_CONSTRAINT) { if (SanityManager.DEBUG) { if (forCreateTable) SanityManager.THROWASSERT( "DROP INDEX with forCreateTable true"); } return getGenericConstantActionFactory().getDropIndexConstantAction( null, indexName, tableName, sd.getSchemaName(), td.getUUID(), td.getHeapConglomerateId() // GemStone changes BEGIN ,false // GemStone changes END ); } else { boolean[] isAscending = new boolean[columnNames.length]; for (int i = 0; i < isAscending.length; i++) isAscending[i] = true; Properties properties=null; //GemStone Changes BEGIN //if(cdn.constraintType==DataDictionary.PRIMARYKEY_CONSTRAINT) { properties=cdn.getProperties(); if(properties==null) { properties=new Properties(); } properties.setProperty(GfxdConstants.PROPERTY_CONSTRAINT_TYPE, Integer.toString(constraintType)); //} //GemStone Changes END return getGenericConstantActionFactory().getCreateIndexConstantAction( forCreateTable, isUnique, isUniqueWithDuplicateNulls, //Gemstone changes Begin // "BTREE", // indexType GfxdConstants.LOCAL_SORTEDMAP_INDEX_TYPE, //Gemstone changes End sd.getSchemaName(), indexName, tableName, ((td != null) ? td.getUUID() : (UUID) null), columnNames, isAscending, isConstraint, cdn.getBackingIndexUUID(), //GemStone Changes BEGIN //cdn.getProperties()); properties); //GemStone Changes END } } /** * Check to make sure that there are no duplicate column names * in the list. (The comparison here is case sensitive. * The work of converting column names that are not quoted * identifiers to upper case is handled by the parser.) * RESOLVE: This check will also be performed by alter table. * * @param ddlStmt DDLStatementNode which contains this list * @param ht Hashtable for enforcing uniqueness. * @param colName Column name to check for. * * @exception StandardException Thrown on error */ private void checkForDuplicateColumns(DDLStatementNode ddlStmt, Hashtable ht, String colName) throws StandardException { Object object = ht.put(colName, colName); if (object != null) { /* RESOLVE - different error messages for create and alter table */ if (ddlStmt instanceof CreateTableNode) { throw StandardException.newException(SQLState.LANG_DUPLICATE_COLUMN_NAME_CREATE, colName); } } } /** * Check to make sure that there are no duplicate constraint names * in the list. (The comparison here is case sensitive. * The work of converting column names that are not quoted * identifiers to upper case is handled by the parser.) * RESOLVE: This check will also be performed by alter table. * * @param ddlStmt DDLStatementNode which contains this list * * @exception StandardException Thrown on error */ private void checkForDuplicateConstraintNames(DDLStatementNode ddlStmt, Hashtable ht, String constraintName) throws StandardException { if (constraintName == null) return; Object object = ht.put(constraintName, constraintName); if (object != null) { /* RESOLVE - different error messages for create and alter table */ if (ddlStmt instanceof CreateTableNode) { /* RESOLVE - new error message */ throw StandardException.newException(SQLState.LANG_DUPLICATE_CONSTRAINT_NAME_CREATE, constraintName); } } } /** * Verify that a primary/unique table constraint has a valid column list. * (All columns in table and no duplicates.) * * @param ddlStmt The outer DDLStatementNode * @param cdn The ConstraintDefinitionNode * * @exception StandardException Thrown if the column list is invalid */ private void verifyUniqueColumnList(DDLStatementNode ddlStmt, ConstraintDefinitionNode cdn) throws StandardException { String invalidColName; /* Verify that every column in the list appears in the table's list of columns */ if (ddlStmt instanceof CreateTableNode) { invalidColName = cdn.getColumnList().verifyCreateConstraintColumnList(this); if (invalidColName != null) { throw StandardException.newException(SQLState.LANG_INVALID_CREATE_CONSTRAINT_COLUMN_LIST, ddlStmt.getRelativeName(), invalidColName); } } else { /* RESOLVE - alter table will need to get table descriptor */ } /* Check the uniqueness of the column names within the list */ invalidColName = cdn.getColumnList().verifyUniqueNames(false); if (invalidColName != null) { throw StandardException.newException(SQLState.LANG_DUPLICATE_CONSTRAINT_COLUMN_NAME, invalidColName); } } /** * Set all columns in that appear in a PRIMARY KEY constraint in a CREATE TABLE statement to NOT NULL. * * @param cdn The ConstraintDefinitionNode for a PRIMARY KEY constraint */ private void setColumnListToNotNull(ConstraintDefinitionNode cdn) { ResultColumnList rcl = cdn.getColumnList(); int rclSize = rcl.size(); for (int index = 0; index < rclSize; index++) { String colName = ((ResultColumn) rcl.elementAt(index)).getName(); findColumnDefinition(colName).setNullability(false); } } /** * Checks if any of the columns in the constraint can be null. * * @param cdn Constraint node * @param td tabe descriptor of the target table * * @return true if any of the column can be null false other wise */ private boolean areColumnsNullable ( ConstraintDefinitionNode cdn, TableDescriptor td) { ResultColumnList rcl = cdn.getColumnList(); int rclSize = rcl.size(); for (int index = 0; index < rclSize; index++) { String colName = ((ResultColumn) rcl.elementAt(index)).getName(); DataTypeDescriptor dtd; if (td == null) { dtd = getColumnDataTypeDescriptor(colName); } else { dtd = getColumnDataTypeDescriptor(colName, td); } // todo dtd may be null if the column does not exist, we should check that first if (dtd != null && dtd.isNullable()) { return true; } } return false; } private void checkForNullColumns(ConstraintDefinitionNode cdn, TableDescriptor td) throws StandardException { ResultColumnList rcl = cdn.getColumnList(); int rclSize = rcl.size(); for (int index = 0; index < rclSize; index++) { String colName = ((ResultColumn) rcl.elementAt(index)).getName(); DataTypeDescriptor dtd; if (td == null) { dtd = getColumnDataTypeDescriptor(colName); } else { dtd = getColumnDataTypeDescriptor(colName, td); } // todo dtd may be null if the column does not exist, we should check that first if (dtd != null && dtd.isNullable()) { throw StandardException.newException(SQLState.LANG_DB2_ADD_UNIQUE_OR_PRIMARY_KEY_ON_NULL_COLS, colName); } } } // GemStone changes BEGIN private void checkForLOBOrUDTColumns(ConstraintDefinitionNode cdn, TableDescriptor td) throws StandardException { ResultColumnList rcl = cdn.getColumnList(); int rclSize = rcl.size(); for (int index = 0; index < rclSize; index++) { String colName = ((ResultColumn)rcl.elementAt(index)).getName(); DataTypeDescriptor dtd; if (td == null) { dtd = getColumnDataTypeDescriptor(colName); } else { dtd = getColumnDataTypeDescriptor(colName, td); } // don't allow LOB/UDT types as primary/unique keys if (dtd != null) { TypeId typeId = dtd.getTypeId(); if (typeId.isLOBTypeId() || typeId.isXMLTypeId() || typeId.isUserDefinedTypeId()) { throw StandardException.newException( SQLState.LANG_ADD_PRIMARY_KEY_OR_INDEX_ON_LOB_UDT, colName, typeId.getSQLTypeName()); } } } } // GemStone changes END private DataTypeDescriptor getColumnDataTypeDescriptor(String colName) { ColumnDefinitionNode col = findColumnDefinition(colName); if (col != null) return col.getType(); return null; } private DataTypeDescriptor getColumnDataTypeDescriptor(String colName, TableDescriptor td) { // check existing columns ColumnDescriptor cd = td.getColumnDescriptor(colName); if (cd != null) { return cd.getType(); } // check for new columns return getColumnDataTypeDescriptor(colName); } /** * Find the column definition node in this list that matches * the passed in column name. * @param colName * @return Reference to column definition node or null if the column is * not in the list. */ //Gemstone changes BEGIN (increasing the scope to public) public ColumnDefinitionNode findColumnDefinition(String colName) { //Gemstone changes END int size = size(); for (int index = 0; index < size; index++) { TableElementNode tableElement = (TableElementNode) elementAt(index); if (tableElement instanceof ColumnDefinitionNode) { ColumnDefinitionNode cdn = (ColumnDefinitionNode) tableElement; if (colName.equals(cdn.getName())) { return cdn; } } } return null; } // GemStone changes - BEGIN /** * Returns the first columns which is not nullable or null * @return */ public ColumnDefinitionNode getFirstNonNullableColumn() { int size = size(); for (int index = 0; index < size; index++) { TableElementNode tableElement = (TableElementNode) elementAt(index); if (tableElement instanceof ColumnDefinitionNode) { ColumnDefinitionNode cdn = (ColumnDefinitionNode) tableElement; if (! cdn.getType().isNullable()) { return cdn; } } } return null; } // GemStone Changes - END. /** * Determine whether or not the parameter matches a column name in this * list. * * @param colName * The column name to search for. * * @return boolean Whether or not a match is found. */ public boolean containsColumnName(String colName) { return findColumnDefinition(colName) != null; } // GemStone changes BEGIN public DistributionDescriptor validateAndResolveDistributionPolicy() throws StandardException { int size = size(); for (int index = 0; index < size; index++) { TableElementNode tableElement = (TableElementNode)elementAt(index); if (tableElement instanceof DistributionDefinitionNode) { remove(index); return ((DistributionDefinitionNode)tableElement).bind(this, this.getDataDictionary()); } } SanityManager.THROWASSERT("No partition definition node generated!"); // never reached return null; } // GemStone changes END }
/* * Copyright 2005 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.truth.Truth.assertThat; import static com.google.javascript.jscomp.VarCheck.VAR_MULTIPLY_DECLARED_ERROR; import com.google.javascript.jscomp.CompilerOptions.LanguageMode; import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback; import com.google.javascript.rhino.Node; public final class VarCheckTest extends Es6CompilerTestCase { private static final String EXTERNS = "var window; function alert() {}"; private CheckLevel strictModuleDepErrorLevel; private boolean sanityCheck = false; private CheckLevel externValidationErrorLevel; private boolean declarationCheck; public VarCheckTest() { super(EXTERNS); } @Override protected void setUp() throws Exception { super.setUp(); // Setup value set by individual tests to the appropriate defaults. super.allowExternsChanges(true); super.enableAstValidation(true); strictModuleDepErrorLevel = CheckLevel.OFF; externValidationErrorLevel = null; sanityCheck = false; declarationCheck = false; compareJsDoc = false; } @Override protected CompilerOptions getOptions() { CompilerOptions options = super.getOptions(); options.setWarningLevel(DiagnosticGroups.STRICT_MODULE_DEP_CHECK, strictModuleDepErrorLevel); if (externValidationErrorLevel != null) { options.setWarningLevel(DiagnosticGroups.EXTERNS_VALIDATION, externValidationErrorLevel); } return options; } @Override protected CompilerPass getProcessor(final Compiler compiler) { return new CompilerPass() { @Override public void process(Node externs, Node root) { new VarCheck(compiler, sanityCheck).process(externs, root); if (!sanityCheck && !compiler.hasErrors()) { // If the original test turned off sanity check, make sure our synthesized // code passes it. new VarCheck(compiler, true).process(externs, root); } if (declarationCheck) { new VariableTestCheck(compiler).process(externs, root); } } }; } @Override protected int getNumRepetitions() { // Because we synthesize externs, the second pass won't emit a warning. return 1; } public void testBreak() { testSame("a: while(1) break a;"); } public void testContinue() { testSame("a: while(1) continue a;"); } public void testReferencedVarNotDefined() { testError("x = 0;", VarCheck.UNDEFINED_VAR_ERROR); } public void testReferencedLetNotDefined() { testErrorEs6("{ let x = 1; } var y = x;", VarCheck.UNDEFINED_VAR_ERROR); } public void testReferencedLetDefined1() { testSameEs6("let x; x = 1;"); } public void testReferencedLetDefined2() { testSameEs6("let x; function y() {x = 1;}"); } public void testReferencedConstDefined2() { testSameEs6("const x = 1; var y = x + 1;"); } public void testReferencedVarDefined1() { testSame("var x, y; x=1;"); } public void testReferencedVarDefined2() { testSame("var x; function y() {x=1;}"); } public void testReferencedVarsExternallyDefined() { testSame("var x = window; alert(x);"); } public void testMultiplyDeclaredVars1() { testError("var x = 1; var x = 2;", VarCheck.VAR_MULTIPLY_DECLARED_ERROR); } public void testMultiplyDeclaredVars2() { testSame("var y; try { y=1 } catch (x) {}" + "try { y=1 } catch (x) {}"); } public void testMultiplyDeclaredVars3() { testSame("try { var x = 1; x *=2; } catch (x) {}"); } public void testMultiplyDeclaredVars4() { testSame("x;", "var x = 1; var x = 2;", VarCheck.VAR_MULTIPLY_DECLARED_ERROR, true); } public void testMultiplyDeclaredLets() { testErrorEs6("let x = 1; let x = 2;", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); testErrorEs6("let x = 1; var x = 2;", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); testErrorEs6("var x = 1; let x = 2;", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); } public void testMultiplyDeclaredConsts() { testErrorEs6("const x = 1; const x = 2;", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); testErrorEs6("const x = 1; var x = 2;", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); testErrorEs6("var x = 1; const x = 2;", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); } public void testMultiplyDeclareLetsInDifferentScope() { testSameEs6("let x = 1; if (123) {let x = 2;}"); testSameEs6("try {let x = 1;} catch(x){}"); } public void testReferencedVarDefinedClass() { testErrorEs6("var x; class x{ }", VarCheck.VAR_MULTIPLY_DECLARED_ERROR); testErrorEs6("let x; class x{ }", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); testErrorEs6("const x = 1; class x{ }", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); testErrorEs6("class x{ } let x;", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); } public void testNamedClass() { testSameEs6("class x {}"); testSameEs6("var x = class x {};"); testSameEs6("var y = class x {};"); } public void testVarReferenceInExterns() { testSame("asdf;", "var asdf;", VarCheck.NAME_REFERENCE_IN_EXTERNS_ERROR); } public void testCallInExterns() { testSame("yz();", "function yz() {}", VarCheck.NAME_REFERENCE_IN_EXTERNS_ERROR); } public void testVarDeclarationInExterns() { testSame("var asdf;", "asdf;", null); } public void testVarAssignmentInExterns() { testSame("/** @type{{foo:string}} */ var foo; var asdf = foo;", "asdf.foo;", null); } public void testDuplicateNamespaceInExterns() { this.compareJsDoc = true; testExternChanges( "/** @const */ var ns = {}; /** @const */ var ns = {};", "", "/** @const */ var ns = {};"); this.compareJsDoc = false; } public void testLetDeclarationInExterns() { testSameEs6("let asdf;", "asdf;", null); } public void testConstDeclarationInExterns() { testSameEs6("const asdf = 1;", "asdf;", null); } public void testNewInExterns() { // Class is not hoisted. testSameEs6("x = new Klass();", "class Klass{}", VarCheck.UNDEFINED_VAR_ERROR, true); } public void testPropReferenceInExterns1() { testSame("asdf.foo;", "var asdf;", VarCheck.UNDEFINED_EXTERN_VAR_ERROR); } public void testPropReferenceInExterns2() { testSame("asdf.foo;", "", VarCheck.UNDEFINED_VAR_ERROR, true); } public void testPropReferenceInExterns3() { testSame("asdf.foo;", "var asdf;", VarCheck.UNDEFINED_EXTERN_VAR_ERROR); externValidationErrorLevel = CheckLevel.ERROR; testSame( "asdf.foo;", "var asdf;", VarCheck.UNDEFINED_EXTERN_VAR_ERROR, true); externValidationErrorLevel = CheckLevel.OFF; test("asdf.foo;", "var asdf;", "var asdf;", null, null); } public void testPropReferenceInExterns4() { testSameEs6("asdf.foo;", "let asdf;", VarCheck.UNDEFINED_EXTERN_VAR_ERROR); } public void testPropReferenceInExterns5() { testSameEs6("asdf.foo;", "class asdf {}", VarCheck.UNDEFINED_EXTERN_VAR_ERROR); } public void testVarInWithBlock() { testError("var a = {b:5}; with (a){b;}", VarCheck.UNDEFINED_VAR_ERROR); } public void testFunctionDeclaredInBlock() { testError("if (true) {function foo() {}} foo();", VarCheck.UNDEFINED_VAR_ERROR); testError("foo(); if (true) {function foo() {}}", VarCheck.UNDEFINED_VAR_ERROR); testSameEs6("if (true) {var foo = ()=>{}} foo();"); testErrorEs6("if (true) {let foo = ()=>{}} foo();", VarCheck.UNDEFINED_VAR_ERROR); testErrorEs6("if (true) {const foo = ()=>{}} foo();", VarCheck.UNDEFINED_VAR_ERROR); testSameEs6("foo(); if (true) {var foo = ()=>{}}"); testErrorEs6("foo(); if (true) {let foo = ()=>{}}", VarCheck.UNDEFINED_VAR_ERROR); testErrorEs6("foo(); if (true) {const foo = ()=>{}}", VarCheck.UNDEFINED_VAR_ERROR); } public void testValidFunctionExpr() { testSame("(function() {});"); } public void testRecursiveFunction() { testSame("(function a() { return a(); })();"); } public void testRecursiveFunction2() { testSame("var a = 3; (function a() { return a(); })();"); } public void testParam() { testSame("function fn(a){ var b = a; }"); testSame("function fn(a){ var a = 2; }"); testError("function fn(){ var b = a; }", VarCheck.UNDEFINED_VAR_ERROR); testSameEs6("function fn(a = 2){ var b = a; }"); testSameEs6("function fn(a = 2){ var a = 3; }"); testSameEs6("function fn({a, b}){ var c = a; }"); testSameEs6("function fn({a, b}){ var a = 3; }"); } public void testLegalVarReferenceBetweenModules() { testDependentModules("var x = 10;", "var y = x++;", null); } public void testLegalLetReferenceBetweenModules() { setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testDependentModules("let x = 10;", "let y = x++;", null); } public void testLegalConstReferenceBetweenModules() { setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testDependentModules("const x = 10;", "const y = x + 1;", null); } public void testMissingModuleDependencyDefault() { testIndependentModules("var x = 10;", "var y = x++;", null, VarCheck.MISSING_MODULE_DEP_ERROR); } public void testMissingModuleDependencyLetAndConst() { setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testIndependentModules("let x = 10;", "let y = x++;", null, VarCheck.MISSING_MODULE_DEP_ERROR); testIndependentModules("const x = 10;", "const y = x + 1;", null, VarCheck.MISSING_MODULE_DEP_ERROR); } public void testViolatedModuleDependencyDefault() { testDependentModules("var y = x++;", "var x = 10;", VarCheck.VIOLATED_MODULE_DEP_ERROR); } public void testViolatedModuleDependencyLetAndConst() { setAcceptedLanguage(LanguageMode.ECMASCRIPT6); testDependentModules("let y = x++;", "let x = 10;", VarCheck.VIOLATED_MODULE_DEP_ERROR); testDependentModules("const y = x + 1;", "const x = 10;", VarCheck.VIOLATED_MODULE_DEP_ERROR); } public void testMissingModuleDependencySkipNonStrict() { sanityCheck = true; testIndependentModules("var x = 10;", "var y = x++;", null, null); } public void testViolatedModuleDependencySkipNonStrict() { sanityCheck = true; testDependentModules("var y = x++;", "var x = 10;", null); } public void testMissingModuleDependencySkipNonStrictNotPromoted() { sanityCheck = true; strictModuleDepErrorLevel = CheckLevel.ERROR; testIndependentModules("var x = 10;", "var y = x++;", null, null); } public void testViolatedModuleDependencyNonStrictNotPromoted() { sanityCheck = true; strictModuleDepErrorLevel = CheckLevel.ERROR; testDependentModules("var y = x++;", "var x = 10;", null); } public void testDependentStrictModuleDependencyCheck() { strictModuleDepErrorLevel = CheckLevel.ERROR; testDependentModules("var f = function() {return new B();};", "var B = function() {}", VarCheck.STRICT_MODULE_DEP_ERROR); } public void testIndependentStrictModuleDependencyCheck() { strictModuleDepErrorLevel = CheckLevel.ERROR; testIndependentModules("var f = function() {return new B();};", "var B = function() {}", VarCheck.STRICT_MODULE_DEP_ERROR, null); } public void testStarStrictModuleDependencyCheck() { strictModuleDepErrorLevel = CheckLevel.WARNING; testSame(createModuleStar("function a() {}", "function b() { a(); c(); }", "function c() { a(); }"), VarCheck.STRICT_MODULE_DEP_ERROR); } public void testForwardVarReferenceInLocalScope1() { testDependentModules("var x = 10; function a() {y++;}", "var y = 11; a();", null); } public void testForwardVarReferenceInLocalScope2() { // It would be nice if this pass could use a call graph to flag this case // as an error, but it currently doesn't. testDependentModules("var x = 10; function a() {y++;} a();", "var y = 11;", null); } private void testDependentModules(String code1, String code2, DiagnosticType error) { testDependentModules(code1, code2, error, null); } private void testDependentModules(String code1, String code2, DiagnosticType error, DiagnosticType warning) { testTwoModules(code1, code2, true, error, warning); } private void testIndependentModules(String code1, String code2, DiagnosticType error, DiagnosticType warning) { testTwoModules(code1, code2, false, error, warning); } private void testTwoModules(String code1, String code2, boolean m2DependsOnm1, DiagnosticType error, DiagnosticType warning) { JSModule m1 = new JSModule("m1"); m1.add(SourceFile.fromCode("input1", code1)); JSModule m2 = new JSModule("m2"); m2.add(SourceFile.fromCode("input2", code2)); if (m2DependsOnm1) { m2.addDependency(m1); } if (error == null) { test(new JSModule[] { m1, m2 }, new String[] { code1, code2 }, null, warning); } else { test(new JSModule[] { m1, m2 }, null, error, warning); } } ////////////////////////////////////////////////////////////////////////////// // Test synthesis of externs public void testSimple() { checkSynthesizedExtern("x", "var x;"); checkSynthesizedExtern("var x", ""); } public void testSimpleSanityCheck() { sanityCheck = true; try { checkSynthesizedExtern("x", ""); fail("Expected RuntimeException"); } catch (RuntimeException e) { assertThat(e.getMessage()).contains("Unexpected variable x"); } } public void testParameter() { checkSynthesizedExtern("function f(x){}", ""); } public void testLocalVar() { checkSynthesizedExtern("function f(){x}", "var x"); } public void testTwoLocalVars() { checkSynthesizedExtern("function f(){x}function g() {x}", "var x"); } public void testInnerFunctionLocalVar() { checkSynthesizedExtern("function f(){function g() {x}}", "var x"); } public void testNoCreateVarsForLabels() { checkSynthesizedExtern("x:var y", ""); } public void testVariableInNormalCodeUsedInExterns1() { checkSynthesizedExtern( "x.foo;", "var x;", "var x; x.foo;"); } public void testVariableInNormalCodeUsedInExterns2() { checkSynthesizedExtern( "x;", "var x;", "var x; x;"); } public void testVariableInNormalCodeUsedInExterns3() { checkSynthesizedExtern( "x.foo;", "function x() {}", "var x; x.foo; "); } public void testVariableInNormalCodeUsedInExterns4() { checkSynthesizedExtern( "x;", "function x() {}", "var x; x; "); } public void testRedeclaration1() { String js = "var a; var a;"; testError(js, VarCheck.VAR_MULTIPLY_DECLARED_ERROR); } public void testRedeclaration2() { String js = "var a; /** @suppress {duplicate} */ var a;"; testSame(js); } public void testRedeclaration3() { String js = " /** @suppress {duplicate} */ var a; var a; "; testSame(js); } public void testSuppressionWithInlineJsDoc() { testSame("/** @suppress {duplicate} */ var /** number */ a; var a;"); } public void testDuplicateVar() { testError("/** @define {boolean} */ var DEF = false; var DEF = true;", VAR_MULTIPLY_DECLARED_ERROR); } public void testDontAllowSuppressDupeOnLet() { testErrorEs6("let a; /** @suppress {duplicate} */ let a; ", VarCheck.LET_CONST_MULTIPLY_DECLARED_ERROR); } public void testFunctionScopeArguments() { // A var declaration doesn't mask arguments testSame("function f() {var arguments}"); testError("var f = function arguments() {}", VarCheck.VAR_ARGUMENTS_SHADOWED_ERROR); testError("var f = function (arguments) {}", VarCheck.VAR_ARGUMENTS_SHADOWED_ERROR); testError("function f() {try {} catch(arguments) {}}", VarCheck.VAR_ARGUMENTS_SHADOWED_ERROR); sanityCheck = true; testSame("function f() {var arguments}"); } public void testNoUndeclaredVarWhenUsingClosurePass() { enableClosurePass(); // We don't want to get goog as an undeclared var here. testError("goog.require('namespace.Class1');\n", ProcessClosurePrimitives.MISSING_PROVIDE_ERROR); } private static final class VariableTestCheck implements CompilerPass { final AbstractCompiler compiler; VariableTestCheck(AbstractCompiler compiler) { this.compiler = compiler; } @Override public void process(Node externs, Node root) { NodeTraversal.traverseRootsEs6(compiler, new AbstractPostOrderCallback() { @Override public void visit(NodeTraversal t, Node n, Node parent) { if (n.isName() && !parent.isFunction() && !parent.isLabel()) { assertTrue("Variable " + n.getString() + " should have be declared", t.getScope().isDeclared(n.getString(), true)); } } }, externs, root); } } public void checkSynthesizedExtern( String input, String expectedExtern) { checkSynthesizedExtern("", input, expectedExtern); } public void checkSynthesizedExtern( String extern, String input, String expectedExtern) { declarationCheck = !sanityCheck; this.enableCompareAsTree(false); testExternChanges(extern, input, expectedExtern); } }
// Copyright (C) 2008 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.client.admin; import static com.google.gerrit.common.PageLinks.ADMIN_PROJECTS; import com.google.gerrit.client.Dispatcher; import com.google.gerrit.client.Gerrit; import com.google.gerrit.client.GitwebLink; import com.google.gerrit.client.WebLinkInfo; import com.google.gerrit.client.projects.ProjectInfo; import com.google.gerrit.client.projects.ProjectMap; import com.google.gerrit.client.rpc.GerritCallback; import com.google.gerrit.client.rpc.Natives; import com.google.gerrit.client.ui.HighlightingInlineHyperlink; import com.google.gerrit.client.ui.Hyperlink; import com.google.gerrit.client.ui.ProjectSearchLink; import com.google.gerrit.client.ui.ProjectsTable; import com.google.gerrit.client.ui.Screen; import com.google.gerrit.common.PageLinks; import com.google.gerrit.reviewdb.client.AccountGeneralPreferences; import com.google.gwt.event.dom.client.KeyCodes; import com.google.gwt.event.dom.client.KeyUpEvent; import com.google.gwt.event.dom.client.KeyUpHandler; import com.google.gwt.http.client.URL; import com.google.gwt.user.client.History; import com.google.gwt.user.client.ui.Anchor; import com.google.gwt.user.client.ui.FlowPanel; import com.google.gwt.user.client.ui.HorizontalPanel; import com.google.gwt.user.client.ui.Image; import com.google.gwt.user.client.ui.Label; import com.google.gwtexpui.globalkey.client.NpTextBox; import java.util.List; public class ProjectListScreen extends Screen { private Hyperlink prev; private Hyperlink next; private ProjectsTable projects; private NpTextBox filterTxt; private int pageSize; private String match = ""; private int start; private Query query; public ProjectListScreen() { configurePageSize(); } public ProjectListScreen(String params) { for (String kvPair : params.split("[,;&]")) { String[] kv = kvPair.split("=", 2); if (kv.length != 2 || kv[0].isEmpty()) { continue; } if ("filter".equals(kv[0])) { match = URL.decodeQueryString(kv[1]); } if ("skip".equals(kv[0]) && URL.decodeQueryString(kv[1]).matches("^[\\d]+")) { start = Integer.parseInt(URL.decodeQueryString(kv[1])); } } configurePageSize(); } private void configurePageSize() { if (Gerrit.isSignedIn()) { final AccountGeneralPreferences p = Gerrit.getUserAccount().getGeneralPreferences(); final short m = p.getMaximumPageSize(); pageSize = 0 < m ? m : AccountGeneralPreferences.DEFAULT_PAGESIZE; } else { pageSize = AccountGeneralPreferences.DEFAULT_PAGESIZE; } } @Override protected void onLoad() { super.onLoad(); query = new Query(match).start(start).run(); } private void setupNavigationLink(Hyperlink link, String filter, int skip) { link.setTargetHistoryToken(getTokenForScreen(filter, skip)); link.setVisible(true); } private String getTokenForScreen(String filter, int skip) { String token = ADMIN_PROJECTS; if (filter != null && !filter.isEmpty()) { token += "?filter=" + URL.encodeQueryString(filter); } if (skip > 0) { if (token.contains("?filter=")) { token += ","; } else { token += "?"; } token += "skip=" + skip; } return token; } @Override protected void onInitUI() { super.onInitUI(); setPageTitle(Util.C.projectListTitle()); initPageHeader(); prev = new Hyperlink(Util.C.pagedListPrev(), true, ""); prev.setVisible(false); next = new Hyperlink(Util.C.pagedListNext(), true, ""); next.setVisible(false); projects = new ProjectsTable() { @Override protected void initColumnHeaders() { super.initColumnHeaders(); table.setText(0, ProjectsTable.C_REPO_BROWSER, Util.C.projectRepoBrowser()); table.getFlexCellFormatter(). addStyleName(0, ProjectsTable.C_REPO_BROWSER, Gerrit.RESOURCES.css().dataHeader()); } @Override protected void onOpenRow(final int row) { History.newItem(link(getRowItem(row))); } private String link(final ProjectInfo item) { return Dispatcher.toProject(item.name_key()); } @Override protected void insert(int row, ProjectInfo k) { super.insert(row, k); table.getFlexCellFormatter().addStyleName(row, ProjectsTable.C_REPO_BROWSER, Gerrit.RESOURCES.css().dataCell()); } @Override protected void populate(final int row, final ProjectInfo k) { Image state = new Image(); switch (k.state()) { case HIDDEN: state.setResource(Gerrit.RESOURCES.redNot()); state.setTitle(Util.toLongString(k.state())); table.setWidget(row, ProjectsTable.C_STATE, state); break; case READ_ONLY: state.setResource(Gerrit.RESOURCES.readOnly()); state.setTitle(Util.toLongString(k.state())); table.setWidget(row, ProjectsTable.C_STATE, state); break; default: // Intentionally left blank, do not show an icon when active. break; } FlowPanel fp = new FlowPanel(); fp.add(new ProjectSearchLink(k.name_key())); fp.add(new HighlightingInlineHyperlink(k.name(), link(k), match)); table.setWidget(row, ProjectsTable.C_NAME, fp); table.setText(row, ProjectsTable.C_DESCRIPTION, k.description()); addWebLinks(row, k); setRowItem(row, k); } private void addWebLinks(int row, ProjectInfo k) { GitwebLink gitWebLink = Gerrit.getGitwebLink(); List<WebLinkInfo> webLinks = Natives.asList(k.web_links()); if (gitWebLink != null || (webLinks != null && !webLinks.isEmpty())) { FlowPanel p = new FlowPanel(); table.setWidget(row, ProjectsTable.C_REPO_BROWSER, p); if (gitWebLink != null) { Anchor a = new Anchor(); a.setText(gitWebLink.getLinkName()); a.setHref(gitWebLink.toProject(k.name_key())); p.add(a); } if (webLinks != null) { for (WebLinkInfo weblink : webLinks) { p.add(weblink.toAnchor()); } } } } }; projects.setSavePointerId(PageLinks.ADMIN_PROJECTS); add(projects); final HorizontalPanel buttons = new HorizontalPanel(); buttons.setStyleName(Gerrit.RESOURCES.css().changeTablePrevNextLinks()); buttons.add(prev); buttons.add(next); add(buttons); } private void initPageHeader() { final HorizontalPanel hp = new HorizontalPanel(); hp.setStyleName(Gerrit.RESOURCES.css().projectFilterPanel()); final Label filterLabel = new Label(Util.C.projectFilter()); filterLabel.setStyleName(Gerrit.RESOURCES.css().projectFilterLabel()); hp.add(filterLabel); filterTxt = new NpTextBox(); filterTxt.setValue(match); filterTxt.addKeyUpHandler(new KeyUpHandler() { @Override public void onKeyUp(KeyUpEvent event) { Query q = new Query(filterTxt.getValue()) .open(event.getNativeEvent().getKeyCode() == KeyCodes.KEY_ENTER); if (match.equals(q.qMatch)) { q.start(start); } if (q.open || !match.equals(q.qMatch)) { if (query == null) { q.run(); } query = q; } } }); hp.add(filterTxt); add(hp); } @Override public void onShowView() { super.onShowView(); if (match != null) { filterTxt.setCursorPos(match.length()); } filterTxt.setFocus(true); } @Override public void registerKeys() { super.registerKeys(); projects.setRegisterKeys(true); } private class Query { private final String qMatch; private int qStart; private boolean open; Query(String match) { this.qMatch = match; } Query start(int start) { this.qStart = start; return this; } Query open(boolean open) { this.open = open; return this; } Query run() { int limit = open ? 1 : pageSize + 1; ProjectMap.match(qMatch, limit, qStart, new GerritCallback<ProjectMap>() { @Override public void onSuccess(ProjectMap result) { if (!isAttached()) { // View has been disposed. } else if (query == Query.this) { query = null; showMap(result); } else { query.run(); } } }); return this; } private void showMap(ProjectMap result) { if (open && !result.isEmpty()) { Gerrit.display(PageLinks.toProject(result.values().get(0).name_key())); return; } setToken(getTokenForScreen(qMatch, qStart)); ProjectListScreen.this.match = qMatch; ProjectListScreen.this.start = qStart; if (result.size() <= pageSize) { projects.display(result); next.setVisible(false); } else { projects.displaySubset(result, 0, result.size() - 1); setupNavigationLink(next, qMatch, qStart + pageSize); } if (qStart > 0) { setupNavigationLink(prev, qMatch, qStart - pageSize); } else { prev.setVisible(false); } if (!isCurrentView()) { display(); } } } }
/* * TouchGraph LLC. Apache-Style Software License * * * Copyright (c) 2001-2002 Alexander Shapiro. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by * TouchGraph LLC (http://www.touchgraph.com/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "TouchGraph" or "TouchGraph LLC" must not be used to endorse * or promote products derived from this software without prior written * permission. For written permission, please contact * alex@touchgraph.com * * 5. Products derived from this software may not be called "TouchGraph", * nor may "TouchGraph" appear in their name, without prior written * permission of alex@touchgraph.com. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL TOUCHGRAPH OR ITS CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. * ==================================================================== * */ package com.touchgraph.graphlayout; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Cursor; import java.awt.Dimension; import java.awt.Font; import java.awt.FontMetrics; import java.awt.Frame; import java.awt.Graphics; import java.awt.Graphics2D; import java.awt.Image; import java.awt.Point; import java.awt.RenderingHints; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseMotionListener; import java.awt.event.WindowAdapter; import java.awt.event.WindowEvent; import java.util.Vector; import javax.swing.JPanel; import com.emental.mindraider.core.MindRaider; import com.emental.mindraider.ui.graph.spiders.SpidersGraph; import com.emental.mindraider.ui.outline.OutlineJPanel; import com.touchgraph.graphlayout.graphelements.GraphEltSet; import com.touchgraph.graphlayout.graphelements.ImmutableGraphEltSet; import com.touchgraph.graphlayout.graphelements.TGForEachEdge; import com.touchgraph.graphlayout.graphelements.TGForEachNode; import com.touchgraph.graphlayout.graphelements.VisibleLocality; import com.touchgraph.graphlayout.interaction.GLEditUI; import com.touchgraph.graphlayout.interaction.TGAbstractClickUI; /* Java 1.5 Deadlock hunting (by MartinDvorak <mindraider@users.sourceforge.net> Problem description: Synchronization of paint() and repaintAfterMove() - check also stacktraces below. Solution o repaintAfterMove() method doesn't have to be sychronized (since all the methods called from there (and need synchronization) are already synchronized themselves). An only one that is repaint() (which in fact doesn't need synchronization) and it participates in deadlock o I have also unfolded content of paint() method (in order to isolate the problem) --- Full thread dump Java HotSpot(TM) Client VM (1.5.0_04-b05 mixed mode, sharing): "DestroyJavaVM" prio=5 tid=0x053bc9d8 nid=0xc74 waiting on condition [0x00000000..0x0007fae8] "TimerQueue" daemon prio=5 tid=0x054e0310 nid=0xbfc in Object.wait() [0x0561f000..0x0561fb68] at java.lang.Object.wait(Native Method) - waiting on <0x233302b8> (a javax.swing.TimerQueue) at javax.swing.TimerQueue.run(TimerQueue.java:233) - locked <0x233302b8> (a javax.swing.TimerQueue) at java.lang.Thread.run(Thread.java:595) "Thread-2" prio=5 tid=0x02eacae8 nid=0xe90 waiting for monitor entry [0x055cf000..0x055cfc68] at java.awt.Component.reshape(Component.java:1858) - waiting to lock <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.reshape(JComponent.java:3940) at java.awt.Component.setBounds(Component.java:1847) at javax.swing.plaf.basic.BasicScrollBarUI.layoutHScrollbar(BasicScrollBarUI.java:732) at javax.swing.plaf.basic.BasicScrollBarUI.layoutContainer(BasicScrollBarUI.java:775) at javax.swing.plaf.basic.BasicScrollBarUI$ModelListener.stateChanged(BasicScrollBarUI.java:935) at javax.swing.DefaultBoundedRangeModel.fireStateChanged(DefaultBoundedRangeModel.java:348) at javax.swing.DefaultBoundedRangeModel.setRangeProperties(DefaultBoundedRangeModel.java:285) at javax.swing.DefaultBoundedRangeModel.setValue(DefaultBoundedRangeModel.java:151) at javax.swing.JScrollBar.setValue(JScrollBar.java:441) at com.touchgraph.graphlayout.interaction.HVScroll$DScrollbar.setIValue(HVScroll.java:339) at com.touchgraph.graphlayout.interaction.HVScroll$DScrollbar.setDValue(HVScroll.java:349) at com.touchgraph.graphlayout.interaction.HVScroll.graphMoved(HVScroll.java:240) at com.touchgraph.graphlayout.TGPanel.fireMovedEvent(TGPanel.java:681) at com.touchgraph.graphlayout.TGPanel.repaintAfterMove(TGPanel.java:1078) - locked <0x231ceb20> (a com.touchgraph.graphlayout.TGPanel) at com.touchgraph.graphlayout.TGLayout.relax(TGLayout.java:473) - locked <0x231cead0> (a com.touchgraph.graphlayout.TGLayout) at com.touchgraph.graphlayout.TGLayout.run(TGLayout.java:496) at java.lang.Thread.run(Thread.java:595) "AWT-EventQueue-0" prio=7 tid=0x02e64d88 nid=0xf40 waiting for monitor entry [0x0513e000..0x0513fd68] at com.touchgraph.graphlayout.TGPanel.paint(TGPanel.java:1139) - waiting to lock <0x231ceb20> (a com.touchgraph.graphlayout.TGPanel) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JSplitPane.paintChildren(JSplitPane.java:1021) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JSplitPane.paintChildren(JSplitPane.java:1021) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JSplitPane.paintChildren(JSplitPane.java:1021) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JLayeredPane.paint(JLayeredPane.java:559) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paintWithOffscreenBuffer(JComponent.java:4970) at javax.swing.JComponent.paintDoubleBuffered(JComponent.java:4916) at javax.swing.JComponent.paint(JComponent.java:995) at java.awt.GraphicsCallback$PaintCallback.run(GraphicsCallback.java:21) at sun.awt.SunGraphicsCallback.runOneComponent(SunGraphicsCallback.java:60) at sun.awt.SunGraphicsCallback.runComponents(SunGraphicsCallback.java:97) at java.awt.Container.paint(Container.java:1709) at sun.awt.RepaintArea.paintComponent(RepaintArea.java:248) at sun.awt.RepaintArea.paint(RepaintArea.java:224) at sun.awt.windows.WComponentPeer.handleEvent(WComponentPeer.java:254) at java.awt.Component.dispatchEventImpl(Component.java:4031) at java.awt.Container.dispatchEventImpl(Container.java:2024) at java.awt.Window.dispatchEventImpl(Window.java:1774) at java.awt.Component.dispatchEvent(Component.java:3803) at java.awt.EventQueue.dispatchEvent(EventQueue.java:463) at java.awt.EventDispatchThread.pumpOneEventForHierarchy(EventDispatchThread.java:242) at java.awt.EventDispatchThread.pumpEventsForHierarchy(EventDispatchThread.java:163) at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:157) at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:149) at java.awt.EventDispatchThread.run(EventDispatchThread.java:110) "AWT-Windows" daemon prio=7 tid=0x02e3cfc0 nid=0x3ac runnable [0x0500f000..0x0500fae8] at sun.awt.windows.WToolkit.eventLoop(Native Method) at sun.awt.windows.WToolkit.run(WToolkit.java:269) at java.lang.Thread.run(Thread.java:595) "AWT-Shutdown" prio=5 tid=0x02e476b8 nid=0xf34 in Object.wait() [0x04fcf000..0x04fcfb68] at java.lang.Object.wait(Native Method) - waiting on <0x23004ad0> (a java.lang.Object) at java.lang.Object.wait(Object.java:474) at sun.awt.AWTAutoShutdown.run(AWTAutoShutdown.java:259) - locked <0x23004ad0> (a java.lang.Object) at java.lang.Thread.run(Thread.java:595) "Java2D Disposer" daemon prio=10 tid=0x02e5ba50 nid=0xf68 in Object.wait() [0x04f8f000..0x04f8fbe8] at java.lang.Object.wait(Native Method) - waiting on <0x23004b58> (a java.lang.ref.ReferenceQueue$Lock) at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:116) - locked <0x23004b58> (a java.lang.ref.ReferenceQueue$Lock) at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:132) at sun.java2d.Disposer.run(Disposer.java:107) at java.lang.Thread.run(Thread.java:595) "Low Memory Detector" daemon prio=5 tid=0x00a7f498 nid=0x944 runnable [0x00000000..0x00000000] "CompilerThread0" daemon prio=10 tid=0x00a7e070 nid=0xe74 waiting on condition [0x00000000..0x02c2f6cc] "Signal Dispatcher" daemon prio=10 tid=0x00a7d448 nid=0xe24 waiting on condition [0x00000000..0x00000000] "Finalizer" daemon prio=9 tid=0x00a782f8 nid=0xef4 in Object.wait() [0x02baf000..0x02bafa68] at java.lang.Object.wait(Native Method) - waiting on <0x22fd0f00> (a java.lang.ref.ReferenceQueue$Lock) at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:116) - locked <0x22fd0f00> (a java.lang.ref.ReferenceQueue$Lock) at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:132) at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:159) "Reference Handler" daemon prio=10 tid=0x00a76e18 nid=0x420 in Object.wait() [0x02b6f000..0x02b6fae8] at java.lang.Object.wait(Native Method) - waiting on <0x22fd0f80> (a java.lang.ref.Reference$Lock) at java.lang.Object.wait(Object.java:474) at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:116) - locked <0x22fd0f80> (a java.lang.ref.Reference$Lock) "VM Thread" prio=10 tid=0x00a74578 nid=0xe94 runnable "VM Periodic Task Thread" prio=10 tid=0x00a806e0 nid=0xce4 waiting on condition Found one Java-level deadlock: ============================= "Thread-2": waiting to lock monitor 0x00a77bcc (object 0x22fd2290, a java.awt.Component$AWTTreeLock), which is held by "AWT-EventQueue-0" "AWT-EventQueue-0": waiting to lock monitor 0x00a77b0c (object 0x231ceb20, a com.touchgraph.graphlayout.TGPanel), which is held by "Thread-2" Java stack information for the threads listed above: =================================================== "Thread-2": at java.awt.Component.reshape(Component.java:1858) - waiting to lock <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.reshape(JComponent.java:3940) at java.awt.Component.setBounds(Component.java:1847) at javax.swing.plaf.basic.BasicScrollBarUI.layoutHScrollbar(BasicScrollBarUI.java:732) at javax.swing.plaf.basic.BasicScrollBarUI.layoutContainer(BasicScrollBarUI.java:775) at javax.swing.plaf.basic.BasicScrollBarUI$ModelListener.stateChanged(BasicScrollBarUI.java:935) at javax.swing.DefaultBoundedRangeModel.fireStateChanged(DefaultBoundedRangeModel.java:348) at javax.swing.DefaultBoundedRangeModel.setRangeProperties(DefaultBoundedRangeModel.java:285) at javax.swing.DefaultBoundedRangeModel.setValue(DefaultBoundedRangeModel.java:151) at javax.swing.JScrollBar.setValue(JScrollBar.java:441) at com.touchgraph.graphlayout.interaction.HVScroll$DScrollbar.setIValue(HVScroll.java:339) at com.touchgraph.graphlayout.interaction.HVScroll$DScrollbar.setDValue(HVScroll.java:349) at com.touchgraph.graphlayout.interaction.HVScroll.graphMoved(HVScroll.java:240) at com.touchgraph.graphlayout.TGPanel.fireMovedEvent(TGPanel.java:681) at com.touchgraph.graphlayout.TGPanel.repaintAfterMove(TGPanel.java:1078) - locked <0x231ceb20> (a com.touchgraph.graphlayout.TGPanel) at com.touchgraph.graphlayout.TGLayout.relax(TGLayout.java:473) - locked <0x231cead0> (a com.touchgraph.graphlayout.TGLayout) at com.touchgraph.graphlayout.TGLayout.run(TGLayout.java:496) at java.lang.Thread.run(Thread.java:595) "AWT-EventQueue-0": at com.touchgraph.graphlayout.TGPanel.paint(TGPanel.java:1139) - waiting to lock <0x231ceb20> (a com.touchgraph.graphlayout.TGPanel) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JSplitPane.paintChildren(JSplitPane.java:1021) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JSplitPane.paintChildren(JSplitPane.java:1021) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JSplitPane.paintChildren(JSplitPane.java:1021) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paint(JComponent.java:1014) at javax.swing.JLayeredPane.paint(JLayeredPane.java:559) at javax.swing.JComponent.paintChildren(JComponent.java:842) - locked <0x22fd2290> (a java.awt.Component$AWTTreeLock) at javax.swing.JComponent.paintWithOffscreenBuffer(JComponent.java:4970) at javax.swing.JComponent.paintDoubleBuffered(JComponent.java:4916) at javax.swing.JComponent.paint(JComponent.java:995) at java.awt.GraphicsCallback$PaintCallback.run(GraphicsCallback.java:21) at sun.awt.SunGraphicsCallback.runOneComponent(SunGraphicsCallback.java:60) at sun.awt.SunGraphicsCallback.runComponents(SunGraphicsCallback.java:97) at java.awt.Container.paint(Container.java:1709) at sun.awt.RepaintArea.paintComponent(RepaintArea.java:248) at sun.awt.RepaintArea.paint(RepaintArea.java:224) at sun.awt.windows.WComponentPeer.handleEvent(WComponentPeer.java:254) at java.awt.Component.dispatchEventImpl(Component.java:4031) at java.awt.Container.dispatchEventImpl(Container.java:2024) at java.awt.Window.dispatchEventImpl(Window.java:1774) at java.awt.Component.dispatchEvent(Component.java:3803) at java.awt.EventQueue.dispatchEvent(EventQueue.java:463) at java.awt.EventDispatchThread.pumpOneEventForHierarchy(EventDispatchThread.java:242) at java.awt.EventDispatchThread.pumpEventsForHierarchy(EventDispatchThread.java:163) at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:157) at java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:149) at java.awt.EventDispatchThread.run(EventDispatchThread.java:110) Found 1 deadlock. */ /** * TGPanel contains code for drawing the graph, and storing which nodes are * selected, and which ones the mouse is over. It houses methods to activate * TGLayout, which performs dynamic layout. Whenever the graph is moved, or * repainted, TGPanel fires listner methods on associated objects. * <p> * <b> Parts of this code build upon Sun's Graph Layout example. * http://java.sun.com/applets/jdk/1.1/demo/GraphLayout/Graph.java </b> * </p> * * @author Alexander Shapiro * @author Murray Altheim (2001-11-06; 2002-01-14 cleanup) */ public class TGPanel extends JPanel { /** * The back color constant. */ public static Color BACK_COLOR = Color.white; /** * The serial version uid for serialization constant. */ private static final long serialVersionUID = 1L; /** * The TGLayout property. */ public TGLayout tgLayout; /** * The basic mouse motion listener property. */ protected BasicMouseMotionListener basicMML; /** * The mouseOverE is the edge the mouse is over. */ protected Edge mouseOverE; /** * The mouseOverN is the node the mouse is over. */ protected Node mouseOverN; /** * The maintain mouse over. If <code>true</code>, then don't change * mouseOverN or mouseOverE */ protected boolean maintainMouseOver; protected Node select; Node dragNode; // Node currently being dragged protected Point mousePos; // Mouse location, updated in the // mouseMotionListener /** * The complete graph elt set property. */ private GraphEltSet completeEltSet; /** * The visible locality property. */ private VisibleLocality visibleLocality; /** * The locality utils property. */ private LocalityUtils localityUtils; /** * The offscreen Image property. */ Image offscreen; /** * The offscreen dimension property. */ Dimension offscreensize; /** * The off graphics property. */ Graphics offgraphics; /** * The graph listeners vector. */ private Vector graphListeners; /** * The paint listeners vector. */ private Vector paintListeners; /** * Converts between a nodes visual position (drawx, drawy). */ TGLensSet tgLensSet; /** * Converts between the visual position and absolute position (x,y). */ AdjustOriginLens adjustOriginLens; /** * The switch select ui property. */ SwitchSelectUI switchSelectUI; /** * Default constructor. */ public TGPanel() { setLayout(null); setGraphEltSet(new GraphEltSet()); addMouseListener(new BasicMouseListener()); basicMML = new BasicMouseMotionListener(); addMouseMotionListener(basicMML); graphListeners = new Vector(); paintListeners = new Vector(); adjustOriginLens = new AdjustOriginLens(); switchSelectUI = new SwitchSelectUI(); TGLayout tgLayout = new TGLayout(this); setTGLayout(tgLayout); tgLayout.start(); setGraphEltSet(new GraphEltSet()); } /** * Setter for TGLensSet. * * @param lensSet * the lensSet to set. */ public void setLensSet(TGLensSet lensSet) { tgLensSet = lensSet; } /** * Setter for TGLayout. * * @param tgl * the TGLayout to set. */ public void setTGLayout(TGLayout tgl) { tgLayout = tgl; } /** * Setter for GraphEltSet. * * @param ges * the GraphEltSet to set. */ public void setGraphEltSet(GraphEltSet ges) { completeEltSet = ges; visibleLocality = new VisibleLocality(completeEltSet); localityUtils = new LocalityUtils(visibleLocality, this); } /** * Getter for adjustOriginLens. * * @return Returns the set AdjustOriginLens. */ public AdjustOriginLens getAdjustOriginLens() { return adjustOriginLens; } /** * Getter for switchSelectUI. * * @return Returns the switchSelectUI. */ public SwitchSelectUI getSwitchSelectUI() { return switchSelectUI; } /** * Setter for color. * * @param color * The Color to set. */ public void setBackColor(Color color) { BACK_COLOR = color; } /** * Returns an Iterator over all nodes in the complete graph. public Iterator * getAllNodes() { return completeEltSet.getNodes(); } */ /** * Return the current visible locality. * * @return ImmutableGraphEltSet visible locality. */ public ImmutableGraphEltSet getGES() { return visibleLocality; } /** * Returns the current node count. * * @return the node count. */ public int getNodeCount() { return completeEltSet.nodeCount(); } /** * Returns the current node count within the VisibleLocality. * * @return the number of node of visibileLocality. * @deprecated this method has been replaced by the * <tt>visibleNodeCount()</tt> method. */ public int nodeNum() { return visibleLocality.nodeCount(); } /** * Returns the current node count within the VisibleLocality. * * @return the number of visible nodes. */ public int visibleNodeCount() { return visibleLocality.nodeCount(); } /** * Return the Node whose ID matches the String <tt>id</tt>, null if no * match is found. * * @param id * The ID identifier used as a query. * @return The Node whose ID matches the provided 'id', null if no match is * found. */ public Node findNode(String id) { if (id == null) { return null; } return completeEltSet.findNode(id); } /** * Return the Node whose URL matches the String <tt>strURL</tt>, null if * no match is found. * * @param strURL * The URL identifier used as a query. * @return The Node whose URL matches the provided 'URL', null if no match * is found. */ public Node findNodeByUri(String strURL) { if (strURL == null) { return null; } return completeEltSet.findNodeByURL(strURL); } /** * Return a Collection of all Nodes whose label matches the String * <tt>label</tt>, null if no match is found. */ /* * public Collection findNodesByLabel( String label ) { if ( label == null ) * return null; // ignore return completeEltSet.findNodesByLabel(label); } */ /** * Return the first Nodes whose label contains the String <tt>substring</tt>, * null if no match is found. * * @param substring * The Substring used as a query. * @return the first node. */ public Node findNodeLabelContaining(String substring) { if (substring == null) { return null; } return completeEltSet.findNodeLabelContaining(substring); } /** * Adds a Node, with its ID and label being the current node count plus 1. * * @see com.touchgraph.graphlayout.Node */ public Node addNode() throws TGException { String id = String.valueOf(getNodeCount() + 1); return addNode(id, null); } /** * Adds a Node, provided its label. The node is assigned a unique ID. * * @see com.touchgraph.graphlayout.graphelements.GraphEltSet */ public Node addNode(String label) throws TGException { return addNode(null, label); } /** * Adds a Node, provided its ID and label. * * @see com.touchgraph.graphlayout.Node */ public Node addNode(String id, String label) throws TGException { Node node; if (label == null) { node = new Node(id); } else { node = new Node(id, label); } updateDrawPos(node); // The addNode() call should probably take a // position, this just sets it at 0,0 addNode(node); return node; } /** * Add the Node <tt>node</tt> to the visibleLocality, checking for ID * uniqueness. * * @param node * the Node. * @throws TGException * a TGException. */ public void addNode(final Node node) throws TGException { synchronized (localityUtils) { visibleLocality.addNode(node); resetDamper(); } } /** * Remove the Node object matching the ID <code>id</code>, returning true * if the deletion occurred, false if a Node matching the ID does not exist * (or if the ID value was null). * * @param id * The ID identifier used as a query. * @return true if the deletion occurred. */ public boolean deleteNodeById(String id) { if (id == null) { return false; // ignore } Node node = findNode(id); if (node == null) { return false; } return deleteNode(node); } /** * Delete a node. * * @param node * the node to delete. * @return Returns <code>true</code> if node is deleted, otherwise * <code>false</code>. */ public boolean deleteNode(Node node) { synchronized (localityUtils) { if (visibleLocality.deleteNode(node)) { // delete from // visibleLocality, *AND // completeEltSet if (node == select) { clearSelect(); } resetDamper(); return true; } return false; } } /** * Clear all. */ public void clearAll() { synchronized (localityUtils) { visibleLocality.clearAll(); } } /** * Return the selected node. * * @return Returns the node. */ public Node getSelect() { return select; } /** * Return the node selected by mouse. * * @return Returns node */ public Node getMouseOverN() { return mouseOverN; } /** * Set the node where mouse is over. * * @param node * The node to select */ // TODO FIXME synchronized removed (deadlocks) public void setMouseOverN(Node node) { if (dragNode != null || maintainMouseOver) { return; // So you don't accidentally switch nodes while dragging } if (mouseOverN != node) { // Node oldMouseOverN = mouseOverN; mouseOverN = node; } if (mouseOverN == null) { setCursor(new Cursor(Cursor.MOVE_CURSOR)); } else { setCursor(new Cursor(Cursor.HAND_CURSOR)); } } /* * Returns an Iterator over all edges in the complete graph public Iterator * getAllEdges() { return completeEltSet.getEdges(); } */ /** * Delete edgde. * * @param edge * Edge. */ public void deleteEdge(Edge edge) { synchronized (localityUtils) { visibleLocality.deleteEdge(edge); resetDamper(); } } /** * Delete edge from a Node to another. * * @param from * the start node. * @param to * the end node. */ public void deleteEdge(Node from, Node to) { synchronized (localityUtils) { visibleLocality.deleteEdge(from, to); } } /** * Returns the current edge count in the complete graph. * * @return Returns the edge count. */ public int getEdgeCount() { return completeEltSet.edgeCount(); } /** * Return the number of Edges in the Locality. * * @return the edge number. * @deprecated this method has been replaced by the * <tt>visibleEdgeCount()</tt> method. */ public int edgeNum() { return visibleLocality.edgeCount(); } /** * Return the number of Edges in the Locality. * * @return the number of Edges. */ public int visibleEdgeCount() { return visibleLocality.edgeCount(); } /** * Find edge from nodes file and t. * * @param file * the Node * @param t * the Node * @return the Edge */ public Edge findEdge(Node f, Node t) { return visibleLocality.findEdge(f, t); } /** * Add an Edge. * * @param e * the Edge to add. */ public void addEdge(Edge e) { synchronized (localityUtils) { visibleLocality.addEdge(e); resetDamper(); } } /** * Add a Edge from node to another. * * @param file * a start Node * @param t * a end Node * @param tens * @return */ public Edge addEdge(Node f, Node t, int tens) { synchronized (localityUtils) { return visibleLocality.addEdge(f, t, tens); } } public Edge getMouseOverE() { return mouseOverE; } // TODO FIXME synchronized removed (deadlocks) public void setMouseOverE(Edge edge) { if (dragNode != null || maintainMouseOver) { return; // No funny business while dragging } if (mouseOverE != edge) { // Edge oldMouseOverE = mouseOverE; mouseOverE = edge; } } // miscellany .................................. protected class AdjustOriginLens extends TGAbstractLens { protected void applyLens(TGPoint2D p) { p.x = p.x + TGPanel.this.getSize().width / 2; p.y = p.y + TGPanel.this.getSize().height / 2; } protected void undoLens(TGPoint2D p) { p.x = p.x - TGPanel.this.getSize().width / 2; p.y = p.y - TGPanel.this.getSize().height / 2; } } public class SwitchSelectUI extends TGAbstractClickUI { public void mouseClicked(MouseEvent e) { if (mouseOverN != null) { if (mouseOverN != select) { setSelect(mouseOverN); } else { clearSelect(); } } } } void fireMovedEvent() { Vector listeners; // TODO FIXME deadlocks: this -> graphlisteners synchronized (graphListeners) { listeners = (Vector) graphListeners.clone(); } for (int i = 0; i < listeners.size(); i++) { GraphListener gl = (GraphListener) listeners.elementAt(i); gl.graphMoved(); } } public void fireResetEvent() { Vector listeners; // TODO FIXME deadlocks: this -> graphlisteners synchronized (graphListeners) { listeners = (Vector) graphListeners.clone(); } for (int i = 0; i < listeners.size(); i++) { GraphListener gl = (GraphListener) listeners.elementAt(i); gl.graphReset(); } } public synchronized void addGraphListener(GraphListener gl) { graphListeners.addElement(gl); } public synchronized void removeGraphListener(GraphListener gl) { graphListeners.removeElement(gl); } public synchronized void addPaintListener(TGPaintListener pl) { paintListeners.addElement(pl); } public synchronized void removePaintListener(TGPaintListener pl) { paintListeners.removeElement(pl); } // private void redraw() { // resetDamper(); // } public void setMaintainMouseOver(boolean maintain) { maintainMouseOver = maintain; } public void clearSelect() { if (select != null) { select = null; repaint(); } } /** * A convenience method that selects the first node of a graph, so that * hiding works. */ public void selectFirstNode() { setSelect(getGES().getFirstNode()); } public void setSelect(Node node) { if (node != null) { // try to select concept in the table tree OutlineJPanel.getInstance().setSelectedTreeNodeConcept(node.getURL()); select = node; repaint(); } else { clearSelect(); } } /** * Node was double clicked - node is going to be launched. (added by * MindRaider@users.sourceforge.net). * * @param node */ public void setDoubleSelect(Node node) { MindRaider.spidersGraph.handleDoubleSelect(node); } public void multiSelect(TGPoint2D from, TGPoint2D to) { final double minX, minY, maxX, maxY; if (from.x > to.x) { maxX = from.x; minX = to.x; } else { minX = from.x; maxX = to.x; } if (from.y > to.y) { maxY = from.y; minY = to.y; } else { minY = from.y; maxY = to.y; } final Vector selectedNodes = new Vector(); TGForEachNode fen = new TGForEachNode() { public void forEachNode(Node node) { double x = node.drawx; double y = node.drawy; if (x > minX && x < maxX && y > minY && y < maxY) { selectedNodes.addElement(node); } } }; visibleLocality.forAllNodes(fen); if (selectedNodes.size() > 0) { int r = (int) (Math.random() * selectedNodes.size()); setSelect((Node) selectedNodes.elementAt(r)); } else { clearSelect(); } } public void updateLocalityFromVisibility() throws TGException { visibleLocality.updateLocalityFromVisibility(); } public void setLocale(Node node, int radius, int maxAddEdgeCount, int maxExpandEdgeCount, boolean unidirectional) throws TGException { localityUtils.setLocale(node, radius, maxAddEdgeCount, maxExpandEdgeCount, unidirectional); } public void fastFinishAnimation() { // Quickly wraps up the add node // animation localityUtils.fastFinishAnimation(); } public void setLocale(Node node, int radius) throws TGException { localityUtils.setLocale(node, radius); } public void expandNode(Node node) { localityUtils.expandNode(node); } public void hideNode(Node hideNode) { localityUtils.hideNode(hideNode); } public void collapseNode(Node collapseNode) { localityUtils.collapseNode(collapseNode); } public void hideEdge(Edge hideEdge) { visibleLocality.removeEdge(hideEdge); if (mouseOverE == hideEdge) { setMouseOverE(null); } resetDamper(); } public void setDragNode(Node node) { dragNode = node; tgLayout.setDragNode(node); } public Node getDragNode() { return dragNode; } void setMousePos(Point p) { mousePos = p; } public Point getMousePos() { return mousePos; } /** Start and stop the damper. Should be placed in the TGPanel too. */ public void startDamper() { if (tgLayout != null) { tgLayout.startDamper(); } } public void stopDamper() { if (tgLayout != null) { tgLayout.stopDamper(); } } /** Makes the graph mobile, and slowly slows it down. */ public void resetDamper() { if (tgLayout != null) { tgLayout.resetDamper(); } } /** Gently stops the graph from moving */ public void stopMotion() { if (tgLayout != null) { tgLayout.stopMotion(); } } class BasicMouseListener extends MouseAdapter { public void mouseEntered(MouseEvent e) { addMouseMotionListener(basicMML); } public void mouseExited(MouseEvent e) { removeMouseMotionListener(basicMML); mousePos = null; setMouseOverN(null); setMouseOverE(null); repaint(); } } class BasicMouseMotionListener implements MouseMotionListener { public void mouseDragged(MouseEvent e) { mousePos = e.getPoint(); findMouseOver(); try { Thread.sleep(6); // An attempt to make the cursor flicker // less } catch (InterruptedException ex) { // break; } } public void mouseMoved(MouseEvent e) { mousePos = e.getPoint(); synchronized (this) { Edge oldMouseOverE = mouseOverE; Node oldMouseOverN = mouseOverN; findMouseOver(); if (oldMouseOverE != mouseOverE || oldMouseOverN != mouseOverN) { repaint(); } // Replace the above lines with the commented portion below to // prevent whole graph // from being repainted simply to highlight a node On mouseOver. // This causes some annoying flickering though. /* * if(oldMouseOverE!=mouseOverE) { if (oldMouseOverE!=null) { * synchronized(oldMouseOverE) { * oldMouseOverE.paint(TGPanel.this.getGraphics(),TGPanel.this); * oldMouseOverE.from.paint(TGPanel.this.getGraphics(),TGPanel.this); * oldMouseOverE.to.paint(TGPanel.this.getGraphics(),TGPanel.this); } } * if (mouseOverE!=null) { synchronized(mouseOverE) { * mouseOverE.paint(TGPanel.this.getGraphics(),TGPanel.this); * mouseOverE.from.paint(TGPanel.this.getGraphics(),TGPanel.this); * mouseOverE.to.paint(TGPanel.this.getGraphics(),TGPanel.this); } } } * if(oldMouseOverN!=mouseOverN) { if (oldMouseOverN!=null) * oldMouseOverN.paint(TGPanel.this.getGraphics(),TGPanel.this); * if (mouseOverN!=null) * mouseOverN.paint(TGPanel.this.getGraphics(),TGPanel.this); } */ } } } /** * Find mouse over. * */ // TODO FIXME synchronized removed protected void findMouseOver() { if (mousePos == null) { setMouseOverN(null); setMouseOverE(null); return; } final int mpx = mousePos.x; final int mpy = mousePos.y; final Node[] monA = new Node[1]; final Edge[] moeA = new Edge[1]; TGForEachNode fen = new TGForEachNode() { double minoverdist = 100; // Kind of a hack (see second if // statement) // Nodes can be as wide as 200 (=2*100) public void forEachNode(Node node) { double x = node.drawx; double y = node.drawy; double dist = Math.sqrt((mpx - x) * (mpx - x) + (mpy - y) * (mpy - y)); if ((dist < minoverdist) && node.containsPoint(mpx, mpy)) { minoverdist = dist; monA[0] = node; } } }; visibleLocality.forAllNodes(fen); TGForEachEdge fee = new TGForEachEdge() { double minDist = 8; // Tangential distance to the edge double minFromDist = 1000; // Distance to the edge's "from" node public void forEachEdge(Edge edge) { double x = edge.getFrom().drawx; double y = edge.getFrom().drawy; double dist = edge.distFromPoint(mpx, mpy); if (dist < minDist) { // Set the over edge to the edge with // the minimun tangential distance minDist = dist; minFromDist = Math.sqrt((mpx - x) * (mpx - x) + (mpy - y) * (mpy - y)); moeA[0] = edge; } else if (dist == minDist) { // If tangential distances are // identical, chose // the edge whose "from" node is closest. double fromDist = Math.sqrt((mpx - x) * (mpx - x) + (mpy - y) * (mpy - y)); if (fromDist < minFromDist) { minFromDist = fromDist; moeA[0] = edge; } } } }; visibleLocality.forAllEdges(fee); setMouseOverN(monA[0]); if (monA[0] == null) { setMouseOverE(moeA[0]); } else { setMouseOverE(null); } } TGPoint2D topLeftDraw; TGPoint2D bottomRightDraw; public TGPoint2D getTopLeftDraw() { return new TGPoint2D(topLeftDraw); } public TGPoint2D getBottomRightDraw() { return new TGPoint2D(bottomRightDraw); } public TGPoint2D getCenter() { return tgLensSet.convDrawToReal(getSize().width / 2, getSize().height / 2); } public TGPoint2D getDrawCenter() { return new TGPoint2D(getSize().width / 2, getSize().height / 2); } public void updateGraphSize() { if (topLeftDraw == null) { topLeftDraw = new TGPoint2D(0, 0); } if (bottomRightDraw == null) { bottomRightDraw = new TGPoint2D(0, 0); } TGForEachNode fen = new TGForEachNode() { boolean firstNode = true; public void forEachNode(Node node) { if (firstNode) { // initialize topRight + bottomLeft topLeftDraw.setLocation(node.drawx, node.drawy); bottomRightDraw.setLocation(node.drawx, node.drawy); firstNode = false; } else { // Standard max and min finding topLeftDraw.setLocation(Math.min(node.drawx, topLeftDraw.x), Math.min(node.drawy, topLeftDraw.y)); bottomRightDraw.setLocation(Math.max(node.drawx, bottomRightDraw.x), Math.max(node.drawy, bottomRightDraw.y)); } } }; visibleLocality.forAllNodes(fen); } /** * Process graph move. */ // TODO FIXME synchronized removed (deadlocks) public void processGraphMove() { updateDrawPositions(); updateGraphSize(); } /** * Update draw position of a node. * * @param node * the node position to repaint. */ public void updateDrawPos(Node node) { TGPoint2D p = tgLensSet.convRealToDraw(node.x, node.y); node.drawx = p.x; node.drawy = p.y; } /** * Update position from draw. * * @param node * the node position to repaint. */ public void updatePosFromDraw(Node node) { TGPoint2D p = tgLensSet.convDrawToReal(node.drawx, node.drawy); node.x = p.x; node.y = p.y; } /** * Update draw positions. */ public void updateDrawPositions() { TGForEachNode fen = new TGForEachNode() { public void forEachNode(Node node) { updateDrawPos(node); } }; visibleLocality.forAllNodes(fen); } /** * Returns the brighter <code>Color</code> of given color. * * @param c * the color to process * @return Returns the brighter color. */ Color myBrighter(Color c) { int r = c.getRed(); int g = c.getGreen(); int b = c.getBlue(); r = Math.min(r + 96, 255); g = Math.min(g + 96, 255); b = Math.min(b + 96, 255); return new Color(r, g, b); } /** * Repaint after move. */ public void repaintAfterMove() { processGraphMove(); findMouseOver(); fireMovedEvent(); repaint(); } /* * (non-Javadoc) * @see java.awt.Component#paint(java.awt.Graphics) */ // TODO FIXME synchronized removed (vector operation encapsulate -> change to sync vector and remove also that lock) public void paint(Graphics g) { long start = System.currentTimeMillis(); Dimension d = getSize(); if ((offscreen == null) || (d.width != offscreensize.width) || (d.height != offscreensize.height)) { offscreen = createImage(d.width, d.height); offscreensize = d; offgraphics = offscreen.getGraphics(); processGraphMove(); findMouseOver(); fireMovedEvent(); } offgraphics.setColor(BACK_COLOR); offgraphics.fillRect(0, 0, d.width, d.height); // TODO FIXME synchronized changed: this -> paintListeners synchronized (paintListeners) { paintListeners = (Vector) paintListeners.clone(); } for (int i = 0; i < paintListeners.size(); i++) { TGPaintListener pl = (TGPaintListener) paintListeners.elementAt(i); pl.paintFirst(offgraphics); } TGForEachEdge fee = new TGForEachEdge() { public void forEachEdge(Edge edge) { edge.paint(offgraphics, TGPanel.this); } }; visibleLocality.forAllEdges(fee); for (int i = 0; i < paintListeners.size(); i++) { TGPaintListener pl = (TGPaintListener) paintListeners.elementAt(i); pl.paintAfterEdges(offgraphics); } TGForEachNode fen = new TGForEachNode() { public void forEachNode(Node node) { node.paint(offgraphics, TGPanel.this); } }; visibleLocality.forAllNodes(fen); if (mouseOverE != null) { // Make the edge the mouse is over appear on // top. mouseOverE.paint(offgraphics, this); mouseOverE.getFrom().paint(offgraphics, this); mouseOverE.getTo().paint(offgraphics, this); } if (select != null) { // Make the selected node appear on top. select.paint(offgraphics, this); } if (mouseOverN != null) { // Make the node the mouse is over appear on // top. mouseOverN.paint(offgraphics, this); } for (int i = 0; i < paintListeners.size(); i++) { TGPaintListener pl = (TGPaintListener) paintListeners.elementAt(i); pl.paintLast(offgraphics); } Graphics2D g2 = (Graphics2D) offgraphics; int x, y, w; // animate warp if (warpEnabled) { float ac; alphaChannel += 0.05f; if (alphaChannel > 1f) { if (alphaChannel > 2f) { warpEnabled = false; return; } ac = 2f - alphaChannel; } else { ac = alphaChannel; } // TODO get the right color (white/black profile) g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, ac)); if (SpidersGraph.antialiased) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); } else { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } g2.setFont(new Font("Times New Roman", Font.PLAIN, 75)); FontMetrics fontMetrics = g2.getFontMetrics(); w = fontMetrics.stringWidth(warpMessage); x = d.width / 2 - (w / 2); y = fontMetrics.getHeight(); g2.setPaint(Color.LIGHT_GRAY); g2.fillRoundRect(x - 20, 10, w + 40, fontMetrics.getHeight() + 20, 30, 30); g2.setPaint(Color.BLACK); g2.drawString(warpMessage, x, y); g2.setFont(new Font("Times New Roman", Font.ITALIC, 24)); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 1f)); } if (MindRaider.profile.getActiveOutlineUri() != null && MindRaider.outlineCustodian != null && MindRaider.outlineCustodian.getActiveOutlineResource() != null) { String notebookLabel = MindRaider.outlineCustodian.getActiveOutlineResource().getLabel(); g2.setFont(new Font("Times New Roman", Font.ITALIC, 24)); FontMetrics fontMetrics = g2.getFontMetrics(); x = 15; y = d.height - 50; g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, .6f)); if (SpidersGraph.antialiased) { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); } else { g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF); } g2.setPaint(Color.LIGHT_GRAY); g2.fillRoundRect(x - 35, y, x + 35 + 20 + fontMetrics.stringWidth("Notebook: " + notebookLabel), fontMetrics.getHeight() + 10, 30, 30); g2.setPaint(Color.BLACK); y += fontMetrics.getHeight(); g2.drawString("Notebook: ", x + 10, y); g2.setFont(new Font("Times New Roman", Font.ITALIC, 18)); g2.drawString(notebookLabel, x + 10 + fontMetrics.stringWidth("Notebook: "), y); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 1f)); } /* * y+=50; g2.drawString("Model:", x, y); g2.setFont(new Font("Times New * Roman", Font.PLAIN, 12)); y+=22; * g2.drawString(MindRaider.profile.activeModel, 50, y); */ paintComponents(offgraphics); // Paint any components that have been // added to this panel /* * FPS */ if (SpidersGraph.fps) { long delta = System.currentTimeMillis() - start; g2.setFont(new Font("Verdana", Font.BOLD, 10)); g2.setPaint(Color.YELLOW); g2.drawString("" + (delta == 0 ? 1000 : 1000l / delta) + " FPS/" + visibleLocality.nodeCount() + " nodes", 10, 15); } g.drawImage(offscreen, 0, 0, null); } /** * The warp enabled flag. */ public boolean warpEnabled; /** * The alpha channel value. */ float alphaChannel; /** * The warp message string. */ String warpMessage; /** * Start warp. * * @param message * the message. */ public synchronized void warpStart(String message) { warpEnabled = true; alphaChannel = 0.0f; warpMessage = message; } public static void main(String[] args) { Frame frame; frame = new Frame("TGPanel"); TGPanel tgPanel = new TGPanel(); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { System.exit(0); } }); TGLensSet tgls = new TGLensSet(); tgls.addLens(tgPanel.getAdjustOriginLens()); tgPanel.setLensSet(tgls); try { tgPanel.addNode(); // Add a starting node. } catch (TGException tge) { System.err.println(tge.getMessage()); } tgPanel.setVisible(true); new GLEditUI(tgPanel).activate(); frame.add("Center", tgPanel); frame.setSize(500, 500); frame.setVisible(true); } }
package continuum.rest.client; import continuum.Continuum; import continuum.atom.Atom; import continuum.atom.AtomID; import continuum.control.Controller; import continuum.slab.Iterator; import continuum.slab.Slab; import continuum.slab.Translator; import continuum.slice.Scan; import continuum.slice.Scanner; import continuum.slice.Slice; import continuum.rest.http.HTTP; import continuum.util.datetime.Interval; import java.util.HashMap; import java.util.Map; import java.util.stream.Stream; import static continuum.Continuum.*; /** * REST client wrapper * Created by zack on 2/23/16. */ public class Client implements Controller, Translator<Atom> { private final String baseUrl; public Client() { this("localhost"); } public Client(String host) { this(host, 1337); } //TODO: HttpSlab? public Client(String host, int port) { baseUrl = "http://" + host + ":" + port + "/api/1.0"; } /** * {@inheritDoc} */ @Override public Continuum.AtomBuilder atom() { return new AtomBuilder(); } /** * {@inheritDoc} */ @Override public Continuum.AtomBuilder atom(String name) { return atom().name(name); } /** * {@inheritDoc} */ @Override public ScanBuilder scan(String name) { return Continuum.scan().name(name); } /** * {@inheritDoc} */ @Override public Translator<Atom> translator() { return this; } /** * {@inheritDoc} */ @Override public void write(Atom atom) throws Exception { Map<String, Object> data = new HashMap<>(); Map<String, Object> fields = new HashMap<>(); data.put("name", atom.name()); data.put("value", atom.values().value()); data.put("timestamp", atom.timestamp()); if (atom.particles() != null) for (String key : atom.particles().keySet()) data.put(key, atom.particles().get(key)); if (atom.fields() != null) for (String key : atom.fields().keySet()) fields.put(key, atom.fields().get(key)); if (fields.size() > 0) data.put("fields", fields); HTTP.postJSON(baseUrl + "/write", data); } /** * {@inheritDoc} */ @Override public Atom get(AtomID atomID) throws Exception { return read(atomID); } /** * {@inheritDoc} */ @Override public void delete(AtomID atomId) throws Exception { throw new UnsupportedOperationException("Deleting via REST not allowed"); } /** * {@inheritDoc} */ @Override public void delete(Atom atom) throws Exception { throw new UnsupportedOperationException("Deleting via REST not allowed"); } /** * {@inheritDoc} */ @Override public long count() throws Exception { String url = baseUrl + "/count"; Map<String, Object> data = HTTP.getJSON(url); return (long)data.get("count"); } @Override public void delete(Interval interval) throws Exception { throw new UnsupportedOperationException("Deleting via REST not allowed"); } @Override public void delete(String name, Interval interval) throws Exception { throw new UnsupportedOperationException("Deleting via REST not allowed"); } /** * {@inheritDoc} */ @Override public Atom read(AtomID atomID) throws Exception { throw new UnsupportedOperationException("Mer"); } /** * {@inheritDoc} */ @Override public Slice slice(Scan scan) throws Exception { String url = baseUrl + "/read?name=" + scan.name(); url += "&start=" + scan.start(); url += "&end=" + scan.end(); if (scan.function() != null) url += "&fn=" + scan.function().name(); if (scan.interval() != null) url += "&interval=" + scan.interval().toString(); if (scan.particles() != null) { for (String name : scan.particles().keySet()) { Object value = scan.particles().get(name); url += "&" + name + "=" + value; } } if (scan.fields() != null) { String fields = ""; for (String name : scan.fields().keySet()) { Object value = scan.fields().get(name); fields += name + ":" + value; } url += "&fields=" + fields; } if (scan.groups() != null) { url += "&group=" + String.join(",", scan.groups()); } return HTTP.getJSONObject(url, CSlice.class); } @Override public Stream<Slice> stream(Scan scan) throws Exception { throw new Exception("HTTP Streaming not yet supported"); } /** * {@inheritDoc} */ @Override public Scanner scanner() { System.err.println("Client.scanner() not implemented!!!"); return null; } /** * {@inheritDoc} */ @Override public Iterator<Atom> iterator() { throw new UnsupportedOperationException("Can not iterate via HTTP"); } @Override public Iterator<Atom> iterator(boolean b) { return null; } /** * {@inheritDoc} */ @Override public Slab slab() { return null; } public class AtomBuilder extends Continuum.AtomBuilder { private AtomBuilder() { } public AtomBuilder name(String name) { super.name(name); return this; } @Override public Atom build() { return new CAtom(name, particles, timestamp, fields, values); } } }
/* * Copyright 2002-2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.servlet.view.json; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import com.fasterxml.jackson.annotation.JsonView; import com.fasterxml.jackson.core.JsonGenerator; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ser.FilterProvider; import org.springframework.http.converter.json.Jackson2ObjectMapperBuilder; import org.springframework.http.converter.json.MappingJacksonValue; import org.springframework.util.CollectionUtils; import org.springframework.util.StringUtils; import org.springframework.validation.BindingResult; import org.springframework.web.servlet.View; /** * Spring MVC {@link View} that renders JSON content by serializing the model for the current request * using <a href="http://wiki.fasterxml.com/JacksonHome">Jackson 2's</a> {@link ObjectMapper}. * * <p>By default, the entire contents of the model map (with the exception of framework-specific classes) * will be encoded as JSON. If the model contains only one key, you can have it extracted encoded as JSON * alone via {@link #setExtractValueFromSingleKeyModel}. * * <p>The default constructor uses the default configuration provided by {@link Jackson2ObjectMapperBuilder}. * * <p>Compatible with Jackson 2.6 and higher, as of Spring 4.3. * * @author Jeremy Grelle * @author Arjen Poutsma * @author Rossen Stoyanchev * @author Juergen Hoeller * @author Sebastien Deleuze * @since 3.1.2 */ public class MappingJackson2JsonView extends AbstractJackson2View { /** * Default content type: "application/json". * Overridable through {@link #setContentType}. */ public static final String DEFAULT_CONTENT_TYPE = "application/json"; /** * Default content type for JSONP: "application/javascript". */ public static final String DEFAULT_JSONP_CONTENT_TYPE = "application/javascript"; /** * Pattern for validating jsonp callback parameter values. */ private static final Pattern CALLBACK_PARAM_PATTERN = Pattern.compile("[0-9A-Za-z_\\.]*"); private String jsonPrefix; private Set<String> modelKeys; private boolean extractValueFromSingleKeyModel = false; private Set<String> jsonpParameterNames = new LinkedHashSet<>(Arrays.asList("jsonp", "callback")); /** * Construct a new {@code MappingJackson2JsonView} using default configuration * provided by {@link Jackson2ObjectMapperBuilder} and setting the content type * to {@code application/json}. */ public MappingJackson2JsonView() { super(Jackson2ObjectMapperBuilder.json().build(), DEFAULT_CONTENT_TYPE); } /** * Construct a new {@code MappingJackson2JsonView} using the provided * {@link ObjectMapper} and setting the content type to {@code application/json}. * @since 4.2.1 */ public MappingJackson2JsonView(ObjectMapper objectMapper) { super(objectMapper, DEFAULT_CONTENT_TYPE); } /** * Specify a custom prefix to use for this view's JSON output. * Default is none. * @see #setPrefixJson */ public void setJsonPrefix(String jsonPrefix) { this.jsonPrefix = jsonPrefix; } /** * Indicates whether the JSON output by this view should be prefixed with <tt>")]}', "</tt>. * Default is {@code false}. * <p>Prefixing the JSON string in this manner is used to help prevent JSON Hijacking. * The prefix renders the string syntactically invalid as a script so that it cannot be hijacked. * This prefix should be stripped before parsing the string as JSON. * @see #setJsonPrefix */ public void setPrefixJson(boolean prefixJson) { this.jsonPrefix = (prefixJson ? ")]}', " : null); } /** * {@inheritDoc} */ @Override public void setModelKey(String modelKey) { this.modelKeys = Collections.singleton(modelKey); } /** * Set the attributes in the model that should be rendered by this view. * When set, all other model attributes will be ignored. */ public void setModelKeys(Set<String> modelKeys) { this.modelKeys = modelKeys; } /** * Return the attributes in the model that should be rendered by this view. */ public final Set<String> getModelKeys() { return this.modelKeys; } /** * Set whether to serialize models containing a single attribute as a map or * whether to extract the single value from the model and serialize it directly. * <p>The effect of setting this flag is similar to using * {@code MappingJackson2HttpMessageConverter} with an {@code @ResponseBody} * request-handling method. * <p>Default is {@code false}. */ public void setExtractValueFromSingleKeyModel(boolean extractValueFromSingleKeyModel) { this.extractValueFromSingleKeyModel = extractValueFromSingleKeyModel; } /** * Set JSONP request parameter names. Each time a request has one of those * parameters, the resulting JSON will be wrapped into a function named as * specified by the JSONP request parameter value. * <p>The parameter names configured by default are "jsonp" and "callback". * @since 4.1 * @see <a href="http://en.wikipedia.org/wiki/JSONP">JSONP Wikipedia article</a> */ public void setJsonpParameterNames(Set<String> jsonpParameterNames) { this.jsonpParameterNames = jsonpParameterNames; } private String getJsonpParameterValue(HttpServletRequest request) { if (this.jsonpParameterNames != null) { for (String name : this.jsonpParameterNames) { String value = request.getParameter(name); if (StringUtils.isEmpty(value)) { continue; } if (!isValidJsonpQueryParam(value)) { if (logger.isDebugEnabled()) { logger.debug("Ignoring invalid jsonp parameter value: " + value); } continue; } return value; } } return null; } /** * Validate the jsonp query parameter value. The default implementation * returns true if it consists of digits, letters, or "_" and ".". * Invalid parameter values are ignored. * @param value the query param value, never {@code null} * @since 4.1.8 */ protected boolean isValidJsonpQueryParam(String value) { return CALLBACK_PARAM_PATTERN.matcher(value).matches(); } /** * Filter out undesired attributes from the given model. * The return value can be either another {@link Map} or a single value object. * <p>The default implementation removes {@link BindingResult} instances and entries * not included in the {@link #setModelKeys renderedAttributes} property. * @param model the model, as passed on to {@link #renderMergedOutputModel} * @return the value to be rendered */ @Override protected Object filterModel(Map<String, Object> model) { Map<String, Object> result = new HashMap<>(model.size()); Set<String> modelKeys = (!CollectionUtils.isEmpty(this.modelKeys) ? this.modelKeys : model.keySet()); for (Map.Entry<String, Object> entry : model.entrySet()) { if (!(entry.getValue() instanceof BindingResult) && modelKeys.contains(entry.getKey()) && !entry.getKey().equals(JsonView.class.getName()) && !entry.getKey().equals(FilterProvider.class.getName())) { result.put(entry.getKey(), entry.getValue()); } } return (this.extractValueFromSingleKeyModel && result.size() == 1 ? result.values().iterator().next() : result); } @Override protected Object filterAndWrapModel(Map<String, Object> model, HttpServletRequest request) { Object value = super.filterAndWrapModel(model, request); String jsonpParameterValue = getJsonpParameterValue(request); if (jsonpParameterValue != null) { if (value instanceof MappingJacksonValue) { ((MappingJacksonValue) value).setJsonpFunction(jsonpParameterValue); } else { MappingJacksonValue container = new MappingJacksonValue(value); container.setJsonpFunction(jsonpParameterValue); value = container; } } return value; } @Override protected void writePrefix(JsonGenerator generator, Object object) throws IOException { if (this.jsonPrefix != null) { generator.writeRaw(this.jsonPrefix); } String jsonpFunction = null; if (object instanceof MappingJacksonValue) { jsonpFunction = ((MappingJacksonValue) object).getJsonpFunction(); } if (jsonpFunction != null) { generator.writeRaw("/**/"); generator.writeRaw(jsonpFunction + "(" ); } } @Override protected void writeSuffix(JsonGenerator generator, Object object) throws IOException { String jsonpFunction = null; if (object instanceof MappingJacksonValue) { jsonpFunction = ((MappingJacksonValue) object).getJsonpFunction(); } if (jsonpFunction != null) { generator.writeRaw(");"); } } @Override protected void setResponseContentType(HttpServletRequest request, HttpServletResponse response) { if (getJsonpParameterValue(request) != null) { response.setContentType(DEFAULT_JSONP_CONTENT_TYPE); } else { super.setResponseContentType(request, response); } } }
package com.redhat.ceylon.compiler.js; import java.util.List; import java.util.Map; import com.redhat.ceylon.compiler.js.util.TypeUtils; import com.redhat.ceylon.compiler.typechecker.tree.Node; import com.redhat.ceylon.compiler.typechecker.tree.Tree; import com.redhat.ceylon.compiler.typechecker.tree.Tree.PositionalArgument; import com.redhat.ceylon.compiler.typechecker.tree.Tree.SequencedArgument; import com.redhat.ceylon.model.typechecker.model.Functional; import com.redhat.ceylon.model.typechecker.model.Generic; import com.redhat.ceylon.model.typechecker.model.Type; import com.redhat.ceylon.model.typechecker.model.TypeParameter; import com.redhat.ceylon.model.typechecker.model.ModelUtil; public class SequenceGenerator { static void lazyEnumeration(final List<Tree.PositionalArgument> args, final Node node, final Type seqType, final boolean spread, final GenerateJsVisitor gen) { Tree.PositionalArgument seqarg = spread ? args.get(args.size()-1) : null; if (args.size() == 1 && seqarg instanceof Tree.Comprehension) { //Shortcut: just do the comprehension seqarg.visit(gen); return; } final String idxvar = gen.getNames().createTempVariable(); gen.out(gen.getClAlias(), "sarg$(function(", idxvar,"){switch(",idxvar,"){"); int count=0; for (Tree.PositionalArgument expr : args) { if (expr == seqarg) { gen.out("}return ", gen.getClAlias(), "finished();},function(){return "); if (gen.isInDynamicBlock() && expr instanceof Tree.SpreadArgument && ModelUtil.isTypeUnknown(expr.getTypeModel())) { TypeUtils.spreadArrayCheck(((Tree.SpreadArgument)expr).getExpression(), gen); } else { expr.visit(gen); } gen.out(";},"); } else { gen.out("case ", Integer.toString(count), ":return "); expr.visit(gen); gen.out(";"); } count++; } if (seqarg == null) { gen.out("}return ", gen.getClAlias(), "finished();},undefined,"); } TypeUtils.printTypeArguments(node, seqType.getTypeArguments(), gen, false, seqType.getVarianceOverrides()); gen.out(")"); } static void sequenceEnumeration(final Tree.SequenceEnumeration that, final GenerateJsVisitor gen) { final Tree.SequencedArgument sarg = that.getSequencedArgument(); if (sarg == null) { gen.out(gen.getClAlias(), "empty()"); } else { final List<Tree.PositionalArgument> positionalArguments = sarg.getPositionalArguments(); final boolean spread = isSpread(positionalArguments); final boolean canBeEager = allLiterals(positionalArguments); if (spread || !canBeEager) { lazyEnumeration(positionalArguments, that, that.getTypeModel(), spread, gen); return; } else { gen.out("["); } int count=0; for (Tree.PositionalArgument expr : positionalArguments) { if (count > 0) { gen.out(","); } if (gen.isInDynamicBlock() && expr instanceof Tree.ListedArgument && ModelUtil.isTypeUnknown(expr.getTypeModel()) && expr.getParameter() != null && !ModelUtil.isTypeUnknown(expr.getParameter().getType())) { //TODO find out how to test this, if at all possible TypeUtils.generateDynamicCheck(((Tree.ListedArgument)expr).getExpression(), expr.getParameter().getType(), gen, false, that.getTypeModel().getTypeArguments()); } else { expr.visit(gen); } count++; } closeSequenceWithReifiedType(that, that.getTypeModel().getTypeArguments(), gen, true); } } static void sequencedArgument(final Tree.SequencedArgument that, final GenerateJsVisitor gen) { final List<Tree.PositionalArgument> positionalArguments = that.getPositionalArguments(); final boolean spread = isSpread(positionalArguments); if (!spread) { gen.out("["); } boolean first=true; for (Tree.PositionalArgument arg: positionalArguments) { if (!first) { gen.out(","); } if (arg instanceof Tree.ListedArgument) { ((Tree.ListedArgument) arg).getExpression().visit(gen); } else if(arg instanceof Tree.SpreadArgument) { ((Tree.SpreadArgument) arg).getExpression().visit(gen); } else {// comprehension arg.visit(gen); } first = false; } if (!spread) { gen.out("]"); } } /** SpreadOp cannot be a simple function call because we need to reference the object methods directly, so it's a function */ static void generateSpread(final Tree.QualifiedMemberOrTypeExpression that, final GenerateJsVisitor gen) { //Determine if it's a method or attribute boolean isMethod = that.getDeclaration() instanceof Functional; if (isMethod) { gen.out(gen.getClAlias(), "JsCallableList("); gen.supervisit(that); gen.out(",function(e,a){return ", gen.memberAccess(that, "e"), ".apply(e,a);}"); if (that.getTypeArguments() != null && that.getTypeArguments().getTypeModels()!=null && !that.getTypeArguments().getTypeModels().isEmpty()) { gen.out(","); TypeUtils.printTypeArguments(that, TypeUtils.matchTypeParametersWithArguments( ((Generic)that.getDeclaration()).getTypeParameters(), that.getTypeArguments().getTypeModels()), gen, true, null); } gen.out(")"); } else { gen.supervisit(that); gen.out(".collect(function(e){return ", gen.memberAccess(that, "e"), ";},{Result$collect:"); TypeUtils.typeNameOrList(that, that.getTypeModel().getTypeArgumentList().get(0), gen, false); gen.out("})"); } } static boolean isSpread(List<Tree.PositionalArgument> args) { return !args.isEmpty() && args.get(args.size()-1) instanceof Tree.ListedArgument == false; } static boolean allLiterals(List<Tree.PositionalArgument> args) { for (Tree.PositionalArgument a : args) { if (a instanceof Tree.ListedArgument) { if (((Tree.ListedArgument) a).getExpression().getTerm() instanceof Tree.Literal == false) { return false; } } else { return false; } } return true; } /** Closes a native array and invokes reifyCeylonType (rt$) with the specified type parameters. */ static void closeSequenceWithReifiedType(final Node that, final Map<TypeParameter,Type> types, final GenerateJsVisitor gen, boolean wantsIterable) { if(wantsIterable) gen.out("].rt$("); else gen.out("].$sa$("); boolean nonempty=false; Type elem = null; for (Map.Entry<TypeParameter,Type> e : types.entrySet()) { if (e.getKey().getName().equals("Element")) { elem = e.getValue(); } else if (e.getKey().equals(that.getUnit().getIterableDeclaration().getTypeParameters().get(1))) { //If it's Nothing, it's nonempty nonempty = "ceylon.language::Nothing".equals(e.getValue().asQualifiedString()); } } if (elem == null) { gen.out("/*WARNING no Element found* /"); elem = that.getUnit().getAnythingType(); } TypeUtils.typeNameOrList(that, elem, gen, false); if (nonempty) { gen.out(",1"); } gen.out(")"); } static void tuple(final Tree.Tuple that, final GenerateJsVisitor gen) { SequencedArgument sarg = that.getSequencedArgument(); if (sarg == null) { gen.out(gen.getClAlias(), "empty()"); } else { final List<PositionalArgument> positionalArguments = sarg.getPositionalArguments(); final boolean spread = SequenceGenerator.isSpread(positionalArguments); int lim = positionalArguments.size()-1; gen.out(gen.getClAlias(), "tpl$(["); int count = 0; for (PositionalArgument expr : positionalArguments) { if (!(count==lim && spread)) { if (count > 0) { gen.out(","); } expr.visit(gen); } count++; } gen.out("]"); if (spread) { gen.out(","); positionalArguments.get(lim).visit(gen); } gen.out(")"); } } }
package AST; import java.util.HashSet; import java.util.LinkedHashSet; import java.io.File; import java.util.*; import beaver.*; import java.util.ArrayList; import java.util.zip.*; import java.io.*; import java.util.Stack; import java.util.regex.Pattern; import java.io.FileOutputStream; import java.io.IOException; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.TransformerFactory; import javax.xml.transform.Transformer; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Element; import org.w3c.dom.Document; import java.util.HashMap; import java.util.Map.Entry; import javax.xml.transform.TransformerException; import javax.xml.parsers.ParserConfigurationException; import java.util.Collection; /** * @ast node * @declaredat Annotations.ast:11 */ public class ElementConstantValue extends ElementValue implements Cloneable { /** * @apilvl low-level */ public void flushCache() { super.flushCache(); } /** * @apilvl internal */ public void flushCollectionCache() { super.flushCollectionCache(); } /** * @apilvl internal */ @SuppressWarnings({"unchecked", "cast"}) public ElementConstantValue clone() throws CloneNotSupportedException { ElementConstantValue node = (ElementConstantValue)super.clone(); node.in$Circle(false); node.is$Final(false); return node; } /** * @apilvl internal */ @SuppressWarnings({"unchecked", "cast"}) public ElementConstantValue copy() { try { ElementConstantValue node = (ElementConstantValue)clone(); if(children != null) node.children = (ASTNode[])children.clone(); return node; } catch (CloneNotSupportedException e) { } System.err.println("Error: Could not clone node of type " + getClass().getName() + "!"); return null; } /** * @apilvl low-level */ @SuppressWarnings({"unchecked", "cast"}) public ElementConstantValue fullCopy() { ElementConstantValue res = (ElementConstantValue)copy(); for(int i = 0; i < getNumChildNoTransform(); i++) { ASTNode node = getChildNoTransform(i); if(node != null) node = node.fullCopy(); res.setChild(node, i); } return res; } /** * @ast method * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:169 */ public void nameCheck() { if(enclosingAnnotationDecl().fullName().equals("java.lang.annotation.Target")) { Variable v = getExpr().varDecl(); if(v != null && v.hostType().fullName().equals("java.lang.annotation.ElementType")) if(lookupElementTypeValue(v.name()) != this) error("repeated annotation target"); } } /** * @ast method * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:593 */ public void toString(StringBuffer s) { getExpr().toString(s); } /** * @ast method * @aspect AnnotationsCodegen * @declaredat D:\zhh\JastAddJ\Java1.5Backend\AnnotationsCodegen.jrag:197 */ public void appendAsAttributeTo(Attribute buf) { if(getExpr().isConstant() && !getExpr().type().isEnumDecl()) { char tag = getExpr().type().isString() ? 's' : getExpr().type().typeDescriptor().charAt(0); int const_value_index = getExpr().type().addAnnotConstant(hostType().constantPool(), getExpr().constant()); buf.u1(tag); buf.u2(const_value_index); } else if(getExpr().isClassAccess()) { int const_class_index = hostType().constantPool().addUtf8(getExpr().type().typeDescriptor()); buf.u1('c'); buf.u2(const_class_index); } else { Variable v = getExpr().varDecl(); if(v == null) throw new Error("Expected Enumeration constant"); int type_name_index = hostType().constantPool().addUtf8(v.type().typeDescriptor()); int const_name_index = hostType().constantPool().addUtf8(v.name()); buf.u1('e'); buf.u2(type_name_index); buf.u2(const_name_index); } } /** * @ast method * @declaredat Annotations.ast:1 */ public ElementConstantValue() { super(); } /** * @ast method * @declaredat Annotations.ast:7 */ public ElementConstantValue(Expr p0) { setChild(p0, 0); } /** * @apilvl low-level * @ast method * @declaredat Annotations.ast:13 */ protected int numChildren() { return 1; } /** * @apilvl internal * @ast method * @declaredat Annotations.ast:19 */ public boolean mayHaveRewrite() { return false; } /** * Setter for Expr * @apilvl high-level * @ast method * @declaredat Annotations.ast:5 */ public void setExpr(Expr node) { setChild(node, 0); } /** * Getter for Expr * @apilvl high-level * @ast method * @declaredat Annotations.ast:12 */ public Expr getExpr() { return (Expr)getChild(0); } /** * @apilvl low-level * @ast method * @declaredat Annotations.ast:18 */ public Expr getExprNoTransform() { return (Expr)getChildNoTransform(0); } /** * @attribute syn * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:58 */ @SuppressWarnings({"unchecked", "cast"}) public boolean validTarget(Annotation a) { ASTNode$State state = state(); boolean validTarget_Annotation_value = validTarget_compute(a); return validTarget_Annotation_value; } /** * @apilvl internal */ private boolean validTarget_compute(Annotation a) { Variable v = getExpr().varDecl(); if(v == null) return true; return v.hostType().fullName().equals("java.lang.annotation.ElementType") && a.mayUseAnnotationTarget(v.name()); } /** * @attribute syn * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:182 */ @SuppressWarnings({"unchecked", "cast"}) public ElementValue definesElementTypeValue(String name) { ASTNode$State state = state(); ElementValue definesElementTypeValue_String_value = definesElementTypeValue_compute(name); return definesElementTypeValue_String_value; } /** * @apilvl internal */ private ElementValue definesElementTypeValue_compute(String name) { Variable v = getExpr().varDecl(); if(v != null && v.hostType().fullName().equals("java.lang.annotation.ElementType") && v.name().equals(name)) return this; return null; } /** * @attribute syn * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:296 */ @SuppressWarnings({"unchecked", "cast"}) public boolean hasValue(String s) { ASTNode$State state = state(); boolean hasValue_String_value = hasValue_compute(s); return hasValue_String_value; } /** * @apilvl internal */ private boolean hasValue_compute(String s) { return getExpr().type().isString() && getExpr().isConstant() && getExpr().constant().stringValue().equals(s); } /** * @attribute syn * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:474 */ @SuppressWarnings({"unchecked", "cast"}) public boolean commensurateWithTypeDecl(TypeDecl type) { ASTNode$State state = state(); boolean commensurateWithTypeDecl_TypeDecl_value = commensurateWithTypeDecl_compute(type); return commensurateWithTypeDecl_TypeDecl_value; } /** * @apilvl internal */ private boolean commensurateWithTypeDecl_compute(TypeDecl type) { Expr v = getExpr(); if(!v.type().assignConversionTo(type, v)) return false; if((type.isPrimitive() || type.isString()) && !v.isConstant()) return false; if(v.type().isNull()) return false; if(type.fullName().equals("java.lang.Class") && !v.isClassAccess()) return false; if(type.isEnumDecl() && (v.varDecl() == null || !(v.varDecl() instanceof EnumConstant))) return false; return true; } /** * @attribute syn * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:507 */ @SuppressWarnings({"unchecked", "cast"}) public TypeDecl type() { ASTNode$State state = state(); TypeDecl type_value = type_compute(); return type_value; } /** * @apilvl internal */ private TypeDecl type_compute() { return getExpr().type(); } /** * @attribute inh * @aspect Annotations * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:177 */ @SuppressWarnings({"unchecked", "cast"}) public ElementValue lookupElementTypeValue(String name) { ASTNode$State state = state(); ElementValue lookupElementTypeValue_String_value = getParent().Define_ElementValue_lookupElementTypeValue(this, null, name); return lookupElementTypeValue_String_value; } /** * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:546 * @apilvl internal */ public NameType Define_NameType_nameType(ASTNode caller, ASTNode child) { if(caller == getExprNoTransform()) { return NameType.AMBIGUOUS_NAME; } return getParent().Define_NameType_nameType(this, caller); } /** * @declaredat D:\zhh\JastAddJ\Java1.5Frontend\Annotations.jrag:551 * @apilvl internal */ public String Define_String_methodHost(ASTNode caller, ASTNode child) { if(caller == getExprNoTransform()) { return enclosingAnnotationDecl().typeName(); } return getParent().Define_String_methodHost(this, caller); } /** * @apilvl internal */ public ASTNode rewriteTo() { return super.rewriteTo(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws.msk.springboot; import javax.annotation.Generated; import com.amazonaws.services.kafka.AWSKafka; import org.apache.camel.component.aws.msk.MSKOperations; import org.apache.camel.spring.boot.ComponentConfigurationPropertiesCommon; import org.springframework.boot.context.properties.ConfigurationProperties; /** * The aws-kms is used for managing Amazon KMS * * Generated by camel-package-maven-plugin - do not edit this file! */ @Generated("org.apache.camel.maven.packaging.SpringBootAutoConfigurationMojo") @ConfigurationProperties(prefix = "camel.component.aws-msk") public class MSKComponentConfiguration extends ComponentConfigurationPropertiesCommon { /** * Whether to enable auto configuration of the aws-msk component. This is * enabled by default. */ private Boolean enabled; /** * The AWS MSK default configuration */ private MSKConfigurationNestedConfiguration configuration; /** * Amazon AWS Access Key */ private String accessKey; /** * Amazon AWS Secret Key */ private String secretKey; /** * The region in which MSK client needs to work */ private String region; /** * Whether the component should resolve property placeholders on itself when * starting. Only properties which are of String type can use property * placeholders. */ private Boolean resolvePropertyPlaceholders = true; /** * Whether the component should use basic property binding (Camel 2.x) or * the newer property binding with additional capabilities */ private Boolean basicPropertyBinding = false; public MSKConfigurationNestedConfiguration getConfiguration() { return configuration; } public void setConfiguration( MSKConfigurationNestedConfiguration configuration) { this.configuration = configuration; } public String getAccessKey() { return accessKey; } public void setAccessKey(String accessKey) { this.accessKey = accessKey; } public String getSecretKey() { return secretKey; } public void setSecretKey(String secretKey) { this.secretKey = secretKey; } public String getRegion() { return region; } public void setRegion(String region) { this.region = region; } public Boolean getResolvePropertyPlaceholders() { return resolvePropertyPlaceholders; } public void setResolvePropertyPlaceholders( Boolean resolvePropertyPlaceholders) { this.resolvePropertyPlaceholders = resolvePropertyPlaceholders; } public Boolean getBasicPropertyBinding() { return basicPropertyBinding; } public void setBasicPropertyBinding(Boolean basicPropertyBinding) { this.basicPropertyBinding = basicPropertyBinding; } public static class MSKConfigurationNestedConfiguration { public static final Class CAMEL_NESTED_CLASS = org.apache.camel.component.aws.msk.MSKConfiguration.class; /** * To use a existing configured AWS MSK as client */ private AWSKafka mskClient; /** * Amazon AWS Access Key */ private String accessKey; /** * Amazon AWS Secret Key */ private String secretKey; /** * The operation to perform */ private MSKOperations operation; /** * To define a proxy host when instantiating the MSK client */ private String proxyHost; /** * To define a proxy port when instantiating the MSK client */ private Integer proxyPort; /** * The region in which MSK client needs to work. When using this * parameter, the configuration will expect the capitalized name of the * region (for example AP_EAST_1) You'll need to use the name * Regions.EU_WEST_1.name() */ private String region; public AWSKafka getMskClient() { return mskClient; } public void setMskClient(AWSKafka mskClient) { this.mskClient = mskClient; } public String getAccessKey() { return accessKey; } public void setAccessKey(String accessKey) { this.accessKey = accessKey; } public String getSecretKey() { return secretKey; } public void setSecretKey(String secretKey) { this.secretKey = secretKey; } public MSKOperations getOperation() { return operation; } public void setOperation(MSKOperations operation) { this.operation = operation; } public String getProxyHost() { return proxyHost; } public void setProxyHost(String proxyHost) { this.proxyHost = proxyHost; } public Integer getProxyPort() { return proxyPort; } public void setProxyPort(Integer proxyPort) { this.proxyPort = proxyPort; } public String getRegion() { return region; } public void setRegion(String region) { this.region = region; } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.tribe; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Supplier; import org.apache.lucene.util.BytesRef; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterChangedEvent; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ClusterStateListener; import org.elasticsearch.cluster.ClusterStateTaskConfig; import org.elasticsearch.cluster.ClusterStateTaskExecutor; import org.elasticsearch.cluster.block.ClusterBlock; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.env.Environment; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.transport.TransportSettings; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Function; import java.util.stream.Collectors; import static java.util.Collections.unmodifiableMap; /** * The tribe service holds a list of node clients connected to a list of tribe members, and uses their * cluster state events to update this local node cluster state with the merged view of it. * <p> * The {@link #processSettings(org.elasticsearch.common.settings.Settings)} method should be called before * starting the node, so it will make sure to configure this current node properly with the relevant tribe node * settings. * <p> * The tribe node settings make sure the discovery used is "local", but with no master elected. This means no * write level master node operations will work ({@link org.elasticsearch.discovery.MasterNotDiscoveredException} * will be thrown), and state level metadata operations with automatically use the local flag. * <p> * The state merged from different clusters include the list of nodes, metadata, and routing table. Each node merged * will have in its tribe which tribe member it came from. Each index merged will have in its settings which tribe * member it came from. In case an index has already been merged from one cluster, and the same name index is discovered * in another cluster, the conflict one will be discarded. This happens because we need to have the correct index name * to propagate to the relevant cluster. */ public class TribeService extends AbstractLifecycleComponent { public static final ClusterBlock TRIBE_METADATA_BLOCK = new ClusterBlock(10, "tribe node, metadata not allowed", false, false, RestStatus.BAD_REQUEST, EnumSet.of(ClusterBlockLevel.METADATA_READ, ClusterBlockLevel.METADATA_WRITE)); public static final ClusterBlock TRIBE_WRITE_BLOCK = new ClusterBlock(11, "tribe node, write not allowed", false, false, RestStatus.BAD_REQUEST, EnumSet.of(ClusterBlockLevel.WRITE)); public static Settings processSettings(Settings settings) { if (TRIBE_NAME_SETTING.exists(settings)) { // if its a node client started by this service as tribe, remove any tribe group setting // to avoid recursive configuration Settings.Builder sb = Settings.builder().put(settings); for (String s : settings.getAsMap().keySet()) { if (s.startsWith("tribe.") && !s.equals(TRIBE_NAME_SETTING.getKey())) { sb.remove(s); } } return sb.build(); } Map<String, Settings> nodesSettings = settings.getGroups("tribe", true); if (nodesSettings.isEmpty()) { return settings; } // its a tribe configured node..., force settings Settings.Builder sb = Settings.builder().put(settings); sb.put(Node.NODE_MASTER_SETTING.getKey(), false); sb.put(Node.NODE_DATA_SETTING.getKey(), false); sb.put(Node.NODE_INGEST_SETTING.getKey(), false); if (!NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.exists(settings)) { sb.put(NodeEnvironment.MAX_LOCAL_STORAGE_NODES_SETTING.getKey(), nodesSettings.size()); } sb.put(DiscoveryModule.DISCOVERY_TYPE_SETTING.getKey(), "none"); // a tribe node should not use zen discovery // nothing is going to be discovered, since no master will be elected sb.put(DiscoverySettings.INITIAL_STATE_TIMEOUT_SETTING.getKey(), 0); if (sb.get("cluster.name") == null) { sb.put("cluster.name", "tribe_" + UUIDs.randomBase64UUID()); // make sure it won't join other tribe nodes in the same JVM } sb.put(TransportMasterNodeReadAction.FORCE_LOCAL_SETTING.getKey(), true); return sb.build(); } /** * Interface to allow merging {@link org.elasticsearch.cluster.metadata.MetaData.Custom} in tribe node * When multiple Mergable Custom metadata of the same type is found (from underlying clusters), the * Custom metadata will be merged using {@link #merge(MetaData.Custom)} and the result will be stored * in the tribe cluster state * * @param <T> type of custom meta data */ public interface MergableCustomMetaData<T extends MetaData.Custom> { /** * Merges this custom metadata with other, returning either this or <code>other</code> custom metadata * for tribe cluster state. This method should not mutate either <code>this</code> or the * <code>other</code> custom metadata. * * @param other custom meta data * @return the same instance or <code>other</code> custom metadata based on implementation * if both the instances are considered equal, implementations should return this * instance to avoid redundant cluster state changes. */ T merge(T other); } // internal settings only public static final Setting<String> TRIBE_NAME_SETTING = Setting.simpleString("tribe.name", Property.NodeScope); private final ClusterService clusterService; private final String[] blockIndicesWrite; private final String[] blockIndicesRead; private final String[] blockIndicesMetadata; private static final String ON_CONFLICT_ANY = "any", ON_CONFLICT_DROP = "drop", ON_CONFLICT_PREFER = "prefer_"; public static final Setting<String> ON_CONFLICT_SETTING = new Setting<>("tribe.on_conflict", ON_CONFLICT_ANY, (s) -> { switch (s) { case ON_CONFLICT_ANY: case ON_CONFLICT_DROP: return s; default: if (s.startsWith(ON_CONFLICT_PREFER) && s.length() > ON_CONFLICT_PREFER.length()) { return s; } throw new IllegalArgumentException( "Invalid value for [tribe.on_conflict] must be either [any, drop or start with prefer_] but was: [" + s + "]"); } }, Property.NodeScope); public static final Setting<Boolean> BLOCKS_METADATA_SETTING = Setting.boolSetting("tribe.blocks.metadata", false, Property.NodeScope); public static final Setting<Boolean> BLOCKS_WRITE_SETTING = Setting.boolSetting("tribe.blocks.write", false, Property.NodeScope); public static final Setting<List<String>> BLOCKS_WRITE_INDICES_SETTING = Setting.listSetting("tribe.blocks.write.indices", Collections.emptyList(), Function.identity(), Property.NodeScope); public static final Setting<List<String>> BLOCKS_READ_INDICES_SETTING = Setting.listSetting("tribe.blocks.read.indices", Collections.emptyList(), Function.identity(), Property.NodeScope); public static final Setting<List<String>> BLOCKS_METADATA_INDICES_SETTING = Setting.listSetting("tribe.blocks.metadata.indices", Collections.emptyList(), Function.identity(), Property.NodeScope); public static final Set<String> TRIBE_SETTING_KEYS = Sets.newHashSet(TRIBE_NAME_SETTING.getKey(), ON_CONFLICT_SETTING.getKey(), BLOCKS_METADATA_INDICES_SETTING.getKey(), BLOCKS_METADATA_SETTING.getKey(), BLOCKS_READ_INDICES_SETTING.getKey(), BLOCKS_WRITE_INDICES_SETTING.getKey(), BLOCKS_WRITE_SETTING.getKey()); // these settings should be passed through to each tribe client, if they are not set explicitly private static final List<Setting<?>> PASS_THROUGH_SETTINGS = Arrays.asList( NetworkService.GLOBAL_NETWORK_HOST_SETTING, NetworkService.GLOBAL_NETWORK_BINDHOST_SETTING, NetworkService.GLOBAL_NETWORK_PUBLISHHOST_SETTING, TransportSettings.HOST, TransportSettings.BIND_HOST, TransportSettings.PUBLISH_HOST ); private final String onConflict; private final Set<String> droppedIndices = ConcurrentCollections.newConcurrentSet(); private final List<Node> nodes = new CopyOnWriteArrayList<>(); public TribeService(Settings settings, ClusterService clusterService, final String tribeNodeId, Function<Settings, Node> clientNodeBuilder) { super(settings); this.clusterService = clusterService; Map<String, Settings> nodesSettings = new HashMap<>(settings.getGroups("tribe", true)); nodesSettings.remove("blocks"); // remove prefix settings that don't indicate a client nodesSettings.remove("on_conflict"); // remove prefix settings that don't indicate a client for (Map.Entry<String, Settings> entry : nodesSettings.entrySet()) { Settings clientSettings = buildClientSettings(entry.getKey(), tribeNodeId, settings, entry.getValue()); nodes.add(clientNodeBuilder.apply(clientSettings)); } this.blockIndicesMetadata = BLOCKS_METADATA_INDICES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); this.blockIndicesRead = BLOCKS_READ_INDICES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); this.blockIndicesWrite = BLOCKS_WRITE_INDICES_SETTING.get(settings).toArray(Strings.EMPTY_ARRAY); if (!nodes.isEmpty()) { if (BLOCKS_WRITE_SETTING.get(settings)) { clusterService.addInitialStateBlock(TRIBE_WRITE_BLOCK); } if (BLOCKS_METADATA_SETTING.get(settings)) { clusterService.addInitialStateBlock(TRIBE_METADATA_BLOCK); } } this.onConflict = ON_CONFLICT_SETTING.get(settings); } // pkg private for testing /** * Builds node settings for a tribe client node from the tribe node's global settings, * combined with tribe specific settings. */ static Settings buildClientSettings(String tribeName, String parentNodeId, Settings globalSettings, Settings tribeSettings) { for (String tribeKey : tribeSettings.getAsMap().keySet()) { if (tribeKey.startsWith("path.")) { throw new IllegalArgumentException("Setting [" + tribeKey + "] not allowed in tribe client [" + tribeName + "]"); } } Settings.Builder sb = Settings.builder().put(tribeSettings); sb.put(Node.NODE_NAME_SETTING.getKey(), Node.NODE_NAME_SETTING.get(globalSettings) + "/" + tribeName); sb.put(Environment.PATH_HOME_SETTING.getKey(), Environment.PATH_HOME_SETTING.get(globalSettings)); // pass through ES home dir if (Environment.PATH_CONF_SETTING.exists(globalSettings)) { sb.put(Environment.PATH_CONF_SETTING.getKey(), Environment.PATH_CONF_SETTING.get(globalSettings)); } if (Environment.PATH_LOGS_SETTING.exists(globalSettings)) { sb.put(Environment.PATH_LOGS_SETTING.getKey(), Environment.PATH_LOGS_SETTING.get(globalSettings)); } if (Environment.PATH_SCRIPTS_SETTING.exists(globalSettings)) { sb.put(Environment.PATH_SCRIPTS_SETTING.getKey(), Environment.PATH_SCRIPTS_SETTING.get(globalSettings)); } for (Setting<?> passthrough : PASS_THROUGH_SETTINGS) { if (passthrough.exists(tribeSettings) == false && passthrough.exists(globalSettings)) { sb.put(passthrough.getKey(), globalSettings.get(passthrough.getKey())); } } sb.put(TRIBE_NAME_SETTING.getKey(), tribeName); if (sb.get(NetworkModule.HTTP_ENABLED.getKey()) == null) { sb.put(NetworkModule.HTTP_ENABLED.getKey(), false); } sb.put(Node.NODE_DATA_SETTING.getKey(), false); sb.put(Node.NODE_MASTER_SETTING.getKey(), false); sb.put(Node.NODE_INGEST_SETTING.getKey(), false); // node id of a tribe client node is determined by node id of parent node and tribe name final BytesRef seedAsString = new BytesRef(parentNodeId + "/" + tribeName); long nodeIdSeed = MurmurHash3.hash128(seedAsString.bytes, seedAsString.offset, seedAsString.length, 0, new MurmurHash3.Hash128()).h1; sb.put(NodeEnvironment.NODE_ID_SEED_SETTING.getKey(), nodeIdSeed); sb.put(Node.NODE_LOCAL_STORAGE_SETTING.getKey(), false); return sb.build(); } @Override protected void doStart() { if (nodes.isEmpty() == false) { // remove the initial election / recovery blocks since we are not going to have a // master elected in this single tribe node local "cluster" clusterService.removeInitialStateBlock(DiscoverySettings.NO_MASTER_BLOCK_ID); clusterService.removeInitialStateBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK); } } public void startNodes() { for (Node node : nodes) { try { getClusterService(node).addListener(new TribeClusterStateListener(node)); node.start(); } catch (Exception e) { // calling close is safe for non started nodes, we can just iterate over all for (Node otherNode : nodes) { try { otherNode.close(); } catch (Exception inner) { inner.addSuppressed(e); logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to close node {} on failed start", otherNode), inner); } } if (e instanceof RuntimeException) { throw (RuntimeException) e; } throw new ElasticsearchException(e); } } } @Override protected void doStop() { doClose(); } @Override protected void doClose() { for (Node node : nodes) { try { node.close(); } catch (Exception e) { logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to close node {}", node), e); } } } class TribeClusterStateListener implements ClusterStateListener { private final String tribeName; private final TribeNodeClusterStateTaskExecutor executor; TribeClusterStateListener(Node tribeNode) { String tribeName = TRIBE_NAME_SETTING.get(tribeNode.settings()); this.tribeName = tribeName; executor = new TribeNodeClusterStateTaskExecutor(tribeName); } @Override public void clusterChanged(final ClusterChangedEvent event) { logger.debug("[{}] received cluster event, [{}]", tribeName, event.source()); clusterService.submitStateUpdateTask( "cluster event from " + tribeName, event, ClusterStateTaskConfig.build(Priority.NORMAL), executor, (source, e) -> logger.warn((Supplier<?>) () -> new ParameterizedMessage("failed to process [{}]", source), e)); } } class TribeNodeClusterStateTaskExecutor implements ClusterStateTaskExecutor<ClusterChangedEvent> { private final String tribeName; TribeNodeClusterStateTaskExecutor(String tribeName) { this.tribeName = tribeName; } @Override public boolean runOnlyOnMaster() { return false; } @Override public String describeTasks(List<ClusterChangedEvent> tasks) { return tasks.stream().map(ClusterChangedEvent::source).reduce((s1, s2) -> s1 + ", " + s2).orElse(""); } @Override public BatchResult<ClusterChangedEvent> execute(ClusterState currentState, List<ClusterChangedEvent> tasks) throws Exception { BatchResult.Builder<ClusterChangedEvent> builder = BatchResult.builder(); ClusterState.Builder newState = ClusterState.builder(currentState).incrementVersion(); boolean clusterStateChanged = updateNodes(currentState, tasks, newState); clusterStateChanged |= updateIndicesAndMetaData(currentState, tasks, newState); builder.successes(tasks); return builder.build(clusterStateChanged ? newState.build() : currentState); } private boolean updateNodes(ClusterState currentState, List<ClusterChangedEvent> tasks, ClusterState.Builder newState) { boolean clusterStateChanged = false; // we only need to apply the latest cluster state update ClusterChangedEvent latestTask = tasks.get(tasks.size() - 1); ClusterState tribeState = latestTask.state(); DiscoveryNodes.Builder nodes = DiscoveryNodes.builder(currentState.nodes()); // -- merge nodes // go over existing nodes, and see if they need to be removed for (DiscoveryNode discoNode : currentState.nodes()) { String markedTribeName = discoNode.getAttributes().get(TRIBE_NAME_SETTING.getKey()); if (markedTribeName != null && markedTribeName.equals(tribeName)) { if (tribeState.nodes().get(discoNode.getId()) == null) { clusterStateChanged = true; logger.info("[{}] removing node [{}]", tribeName, discoNode); nodes.remove(discoNode.getId()); } } } // go over tribe nodes, and see if they need to be added for (DiscoveryNode tribe : tribeState.nodes()) { if (currentState.nodes().nodeExists(tribe) == false) { // a new node, add it, but also add the tribe name to the attributes Map<String, String> tribeAttr = new HashMap<>(tribe.getAttributes()); tribeAttr.put(TRIBE_NAME_SETTING.getKey(), tribeName); DiscoveryNode discoNode = new DiscoveryNode(tribe.getName(), tribe.getId(), tribe.getEphemeralId(), tribe.getHostName(), tribe.getHostAddress(), tribe.getAddress(), unmodifiableMap(tribeAttr), tribe.getRoles(), tribe.getVersion()); clusterStateChanged = true; logger.info("[{}] adding node [{}]", tribeName, discoNode); nodes.remove(tribe.getId()); // remove any existing node with the same id but different ephemeral id nodes.add(discoNode); } } if (clusterStateChanged) { newState.nodes(nodes); } return clusterStateChanged; } private boolean updateIndicesAndMetaData(ClusterState currentState, List<ClusterChangedEvent> tasks, ClusterState.Builder newState) { // we only need to apply the latest cluster state update ClusterChangedEvent latestTask = tasks.get(tasks.size() - 1); ClusterState tribeState = latestTask.state(); boolean clusterStateChanged = false; ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); MetaData.Builder metaData = MetaData.builder(currentState.metaData()); RoutingTable.Builder routingTable = RoutingTable.builder(currentState.routingTable()); // go over existing indices, and see if they need to be removed for (IndexMetaData index : currentState.metaData()) { String markedTribeName = TRIBE_NAME_SETTING.get(index.getSettings()); if (markedTribeName != null && markedTribeName.equals(tribeName)) { IndexMetaData tribeIndex = tribeState.metaData().index(index.getIndex()); clusterStateChanged = true; if (tribeIndex == null || tribeIndex.getState() == IndexMetaData.State.CLOSE) { logger.info("[{}] removing index {}", tribeName, index.getIndex()); removeIndex(blocks, metaData, routingTable, index); } else { // always make sure to update the metadata and routing table, in case // there are changes in them (new mapping, shards moving from initializing to started) routingTable.add(tribeState.routingTable().index(index.getIndex())); Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()) .put(TRIBE_NAME_SETTING.getKey(), tribeName).build(); metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); } } } // go over tribe one, and see if they need to be added for (IndexMetaData tribeIndex : tribeState.metaData()) { // if there is no routing table yet, do nothing with it... IndexRoutingTable table = tribeState.routingTable().index(tribeIndex.getIndex()); if (table == null) { continue; } //NOTE: we have to use the index name here since UUID are different even if the name is the same final String indexName = tribeIndex.getIndex().getName(); final IndexMetaData indexMetaData = currentState.metaData().index(indexName); if (indexMetaData == null) { if (!droppedIndices.contains(indexName)) { // a new index, add it, and add the tribe name as a setting clusterStateChanged = true; logger.info("[{}] adding index {}", tribeName, tribeIndex.getIndex()); addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); } } else { String existingFromTribe = TRIBE_NAME_SETTING.get(indexMetaData.getSettings()); if (!tribeName.equals(existingFromTribe)) { // we have a potential conflict on index names, decide what to do... if (ON_CONFLICT_ANY.equals(onConflict)) { // we chose any tribe, carry on } else if (ON_CONFLICT_DROP.equals(onConflict)) { // drop the indices, there is a conflict clusterStateChanged = true; logger.info("[{}] dropping index {} due to conflict with [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); removeIndex(blocks, metaData, routingTable, tribeIndex); droppedIndices.add(indexName); } else if (onConflict.startsWith(ON_CONFLICT_PREFER)) { // on conflict, prefer a tribe... String preferredTribeName = onConflict.substring(ON_CONFLICT_PREFER.length()); if (tribeName.equals(preferredTribeName)) { // the new one is hte preferred one, replace... clusterStateChanged = true; logger.info("[{}] adding index {}, preferred over [{}]", tribeName, tribeIndex.getIndex(), existingFromTribe); removeIndex(blocks, metaData, routingTable, tribeIndex); addNewIndex(tribeState, blocks, metaData, routingTable, tribeIndex); } // else: either the existing one is the preferred one, or we haven't seen one, carry on } } } } clusterStateChanged |= updateCustoms(currentState, tasks, metaData); if (clusterStateChanged) { newState.blocks(blocks); newState.metaData(metaData); newState.routingTable(routingTable.build()); } return clusterStateChanged; } private boolean updateCustoms(ClusterState currentState, List<ClusterChangedEvent> tasks, MetaData.Builder metaData) { boolean clusterStateChanged = false; Set<String> changedCustomMetaDataTypeSet = tasks.stream() .map(ClusterChangedEvent::changedCustomMetaDataSet) .flatMap(Collection::stream) .collect(Collectors.toSet()); final List<Node> tribeClientNodes = TribeService.this.nodes; Map<String, MetaData.Custom> mergedCustomMetaDataMap = mergeChangedCustomMetaData(changedCustomMetaDataTypeSet, customMetaDataType -> tribeClientNodes.stream() .map(TribeService::getClusterService).map(ClusterService::state) .map(ClusterState::metaData) .map(clusterMetaData -> ((MetaData.Custom) clusterMetaData.custom(customMetaDataType))) .filter(custom1 -> custom1 != null && custom1 instanceof MergableCustomMetaData) .map(custom2 -> (MergableCustomMetaData) custom2) .collect(Collectors.toList()) ); for (String changedCustomMetaDataType : changedCustomMetaDataTypeSet) { MetaData.Custom mergedCustomMetaData = mergedCustomMetaDataMap.get(changedCustomMetaDataType); if (mergedCustomMetaData == null) { // we ignore merging custom md which doesn't implement MergableCustomMetaData interface if (currentState.metaData().custom(changedCustomMetaDataType) instanceof MergableCustomMetaData) { // custom md has been removed clusterStateChanged = true; logger.info("[{}] removing custom meta data type [{}]", tribeName, changedCustomMetaDataType); metaData.removeCustom(changedCustomMetaDataType); } } else { // custom md has been changed clusterStateChanged = true; logger.info("[{}] updating custom meta data type [{}] data [{}]", tribeName, changedCustomMetaDataType, mergedCustomMetaData); metaData.putCustom(changedCustomMetaDataType, mergedCustomMetaData); } } return clusterStateChanged; } private void removeIndex(ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData index) { metaData.remove(index.getIndex().getName()); routingTable.remove(index.getIndex().getName()); blocks.removeIndexBlocks(index.getIndex().getName()); } private void addNewIndex(ClusterState tribeState, ClusterBlocks.Builder blocks, MetaData.Builder metaData, RoutingTable.Builder routingTable, IndexMetaData tribeIndex) { Settings tribeSettings = Settings.builder().put(tribeIndex.getSettings()).put(TRIBE_NAME_SETTING.getKey(), tribeName).build(); metaData.put(IndexMetaData.builder(tribeIndex).settings(tribeSettings)); routingTable.add(tribeState.routingTable().index(tribeIndex.getIndex())); if (Regex.simpleMatch(blockIndicesMetadata, tribeIndex.getIndex().getName())) { blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_METADATA_BLOCK); } if (Regex.simpleMatch(blockIndicesRead, tribeIndex.getIndex().getName())) { blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_READ_BLOCK); } if (Regex.simpleMatch(blockIndicesWrite, tribeIndex.getIndex().getName())) { blocks.addIndexBlock(tribeIndex.getIndex().getName(), IndexMetaData.INDEX_WRITE_BLOCK); } } } private static ClusterService getClusterService(Node node) { return node.injector().getInstance(ClusterService.class); } // pkg-private for testing static Map<String, MetaData.Custom> mergeChangedCustomMetaData(Set<String> changedCustomMetaDataTypeSet, Function<String, List<MergableCustomMetaData>> customMetaDataByTribeNode) { Map<String, MetaData.Custom> changedCustomMetaDataMap = new HashMap<>(changedCustomMetaDataTypeSet.size()); for (String customMetaDataType : changedCustomMetaDataTypeSet) { customMetaDataByTribeNode.apply(customMetaDataType).stream() .reduce((mergableCustomMD, mergableCustomMD2) -> ((MergableCustomMetaData) mergableCustomMD.merge((MetaData.Custom) mergableCustomMD2))) .ifPresent(mergedCustomMetaData -> changedCustomMetaDataMap.put(customMetaDataType, ((MetaData.Custom) mergedCustomMetaData))); } return changedCustomMetaDataMap; } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.openapi.vcs.changes.committed; import com.intellij.ide.util.treeView.TreeState; import com.intellij.openapi.module.Module; import com.intellij.openapi.project.Project; import com.intellij.openapi.roots.ModuleRootManager; import com.intellij.openapi.util.BooleanGetter; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.VcsBundle; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.ChangesUtil; import com.intellij.openapi.vcs.changes.ui.ChangesBrowserNode; import com.intellij.openapi.vcs.changes.ui.ChangesBrowserNodeRenderer; import com.intellij.openapi.vcs.changes.ui.DirectoryChangesGroupingPolicy; import com.intellij.openapi.vcs.changes.ui.TreeModelBuilder; import com.intellij.openapi.vcs.versionBrowser.CommittedChangeList; import com.intellij.ui.ScrollPaneFactory; import com.intellij.ui.treeStructure.Tree; import com.intellij.util.containers.ContainerUtil; import com.intellij.vcsUtil.VcsUtil; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import javax.swing.*; import javax.swing.event.ChangeEvent; import javax.swing.event.ChangeListener; import javax.swing.event.TreeSelectionEvent; import javax.swing.event.TreeSelectionListener; import javax.swing.tree.DefaultMutableTreeNode; import javax.swing.tree.TreePath; import java.util.*; /** * @author yole */ public class StructureFilteringStrategy implements ChangeListFilteringStrategy { private final List<ChangeListener> myListeners = ContainerUtil.createLockFreeCopyOnWriteList(); private MyUI myUI; private final Project myProject; private final List<FilePath> mySelection = new ArrayList<>(); public StructureFilteringStrategy(final Project project) { myProject = project; } @NotNull @Override public CommittedChangesFilterKey getKey() { return new CommittedChangesFilterKey(toString(), CommittedChangesFilterPriority.STRUCTURE); } public String toString() { return VcsBundle.message("filter.structure.name"); } @Override @Nullable public JComponent getFilterUI() { if (myUI == null) { myUI = new MyUI(); } return myUI.getComponent(); } @Override public void setFilterBase(@NotNull List<? extends CommittedChangeList> changeLists) { // todo cycle here if (myUI == null) { myUI = new MyUI(); } myUI.reset(); myUI.append(changeLists); } @Override public void addChangeListener(@NotNull ChangeListener listener) { myListeners.add(listener); } @Override public void removeChangeListener(@NotNull ChangeListener listener) { myListeners.remove(listener); } @Override public void resetFilterBase() { myUI.reset(); } @Override public void appendFilterBase(@NotNull List<? extends CommittedChangeList> changeLists) { myUI.append(changeLists); } @Override @NotNull public List<CommittedChangeList> filterChangeLists(@NotNull List<? extends CommittedChangeList> changeLists) { if (mySelection.size() == 0) { return new ArrayList<>(changeLists); } final ArrayList<CommittedChangeList> result = new ArrayList<>(); for (CommittedChangeList list : changeLists) { if (listMatchesSelection(list)) { result.add(list); } } return result; } private boolean listMatchesSelection(final CommittedChangeList list) { for (Change change : list.getChanges()) { FilePath path = ChangesUtil.getFilePath(change); for (FilePath selPath : mySelection) { if (path.isUnder(selPath, false)) { return true; } } } return false; } private class MyUI { private final JComponent myScrollPane; private final Tree myStructureTree; private boolean myRendererInitialized; private final Set<FilePath> myFilePaths = new HashSet<>(); private TreeState myState; MyUI() { myStructureTree = new Tree(); myStructureTree.setRootVisible(false); myStructureTree.setShowsRootHandles(true); myStructureTree.getSelectionModel().addTreeSelectionListener(new TreeSelectionListener() { @Override public void valueChanged(final TreeSelectionEvent e) { final List<FilePath> filePaths = new ArrayList<>(mySelection); mySelection.clear(); final TreePath[] selectionPaths = myStructureTree.getSelectionPaths(); if (selectionPaths != null) { for (TreePath selectionPath : selectionPaths) { mySelection.addAll(getFilePathsUnder((ChangesBrowserNode<?>)selectionPath.getLastPathComponent())); } } if (Comparing.haveEqualElements(filePaths, mySelection)) return; for (ChangeListener listener : myListeners) { listener.stateChanged(new ChangeEvent(this)); } } }); myScrollPane = ScrollPaneFactory.createScrollPane(myStructureTree); } @NotNull private List<FilePath> getFilePathsUnder(@NotNull ChangesBrowserNode<?> node) { List<FilePath> result = Collections.emptyList(); Object userObject = node.getUserObject(); if (userObject instanceof FilePath) { result = Collections.singletonList(((FilePath)userObject)); } else if (userObject instanceof Module) { result = ContainerUtil.map(ModuleRootManager.getInstance((Module)userObject).getContentRoots(), VcsUtil::getFilePath); } return result; } public void initRenderer() { if (!myRendererInitialized) { myRendererInitialized = true; myStructureTree.setCellRenderer(new ChangesBrowserNodeRenderer(myProject, BooleanGetter.FALSE, false)); } } public JComponent getComponent() { return myScrollPane; } public void reset() { myFilePaths.clear(); myState = TreeState.createOn(myStructureTree, (DefaultMutableTreeNode)myStructureTree.getModel().getRoot()); myStructureTree.setModel(TreeModelBuilder.buildEmpty()); } public void append(final List<? extends CommittedChangeList> changeLists) { final TreeState localState = myState != null && myFilePaths.isEmpty() ? myState : TreeState.createOn(myStructureTree, (DefaultMutableTreeNode)myStructureTree.getModel().getRoot()); for (CommittedChangeList changeList : changeLists) { for (Change change : changeList.getChanges()) { final FilePath path = ChangesUtil.getFilePath(change); if (path.getParentPath() != null) { myFilePaths.add(path.getParentPath()); } } } myStructureTree.setModel(TreeModelBuilder.buildFromFilePaths(myProject, new DirectoryChangesGroupingPolicy.Factory(), myFilePaths)); localState.applyTo(myStructureTree, myStructureTree.getModel().getRoot()); myStructureTree.revalidate(); myStructureTree.repaint(); initRenderer(); } } }
/* * Copyright (C) 2017 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.source.hls.offline; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.ENC_MEDIA_PLAYLIST_DATA; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.ENC_MEDIA_PLAYLIST_URI; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MASTER_MEDIA_PLAYLIST_1_INDEX; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MASTER_MEDIA_PLAYLIST_2_INDEX; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MASTER_PLAYLIST_DATA; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MASTER_PLAYLIST_URI; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_0_DIR; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_0_URI; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_1_DIR; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_1_URI; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_2_DIR; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_2_URI; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_3_DIR; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_3_URI; import static com.google.android.exoplayer2.source.hls.offline.HlsDownloadTestData.MEDIA_PLAYLIST_DATA; import static com.google.android.exoplayer2.testutil.CacheAsserts.assertCacheEmpty; import static com.google.android.exoplayer2.testutil.CacheAsserts.assertCachedData; import static com.google.common.truth.Truth.assertThat; import android.net.Uri; import androidx.test.core.app.ApplicationProvider; import androidx.test.ext.junit.runners.AndroidJUnit4; import com.google.android.exoplayer2.MediaItem; import com.google.android.exoplayer2.offline.DefaultDownloaderFactory; import com.google.android.exoplayer2.offline.DownloadRequest; import com.google.android.exoplayer2.offline.Downloader; import com.google.android.exoplayer2.offline.DownloaderFactory; import com.google.android.exoplayer2.offline.StreamKey; import com.google.android.exoplayer2.source.hls.playlist.HlsMasterPlaylist; import com.google.android.exoplayer2.testutil.CacheAsserts.RequestSet; import com.google.android.exoplayer2.testutil.FakeDataSet; import com.google.android.exoplayer2.testutil.FakeDataSource; import com.google.android.exoplayer2.upstream.DummyDataSource; import com.google.android.exoplayer2.upstream.cache.Cache; import com.google.android.exoplayer2.upstream.cache.CacheDataSource; import com.google.android.exoplayer2.upstream.cache.NoOpCacheEvictor; import com.google.android.exoplayer2.upstream.cache.SimpleCache; import com.google.android.exoplayer2.util.Util; import java.io.File; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; /** Unit tests for {@link HlsDownloader}. */ @RunWith(AndroidJUnit4.class) public class HlsDownloaderTest { private SimpleCache cache; private File tempFolder; private ProgressListener progressListener; private FakeDataSet fakeDataSet; @Before public void setUp() throws Exception { tempFolder = Util.createTempDirectory(ApplicationProvider.getApplicationContext(), "ExoPlayerTest"); cache = new SimpleCache(tempFolder, new NoOpCacheEvictor()); progressListener = new ProgressListener(); fakeDataSet = new FakeDataSet() .setData(MASTER_PLAYLIST_URI, MASTER_PLAYLIST_DATA) .setData(MEDIA_PLAYLIST_1_URI, MEDIA_PLAYLIST_DATA) .setRandomData(MEDIA_PLAYLIST_1_DIR + "fileSequence0.ts", 10) .setRandomData(MEDIA_PLAYLIST_1_DIR + "fileSequence1.ts", 11) .setRandomData(MEDIA_PLAYLIST_1_DIR + "fileSequence2.ts", 12) .setData(MEDIA_PLAYLIST_2_URI, MEDIA_PLAYLIST_DATA) .setRandomData(MEDIA_PLAYLIST_2_DIR + "fileSequence0.ts", 13) .setRandomData(MEDIA_PLAYLIST_2_DIR + "fileSequence1.ts", 14) .setRandomData(MEDIA_PLAYLIST_2_DIR + "fileSequence2.ts", 15); } @After public void tearDown() { Util.recursiveDelete(tempFolder); } @Test public void createWithDefaultDownloaderFactory() { CacheDataSource.Factory cacheDataSourceFactory = new CacheDataSource.Factory() .setCache(Mockito.mock(Cache.class)) .setUpstreamDataSourceFactory(DummyDataSource.FACTORY); DownloaderFactory factory = new DefaultDownloaderFactory(cacheDataSourceFactory); Downloader downloader = factory.createDownloader( new DownloadRequest( "id", DownloadRequest.TYPE_HLS, Uri.parse("https://www.test.com/download"), Collections.singletonList(new StreamKey(/* groupIndex= */ 0, /* trackIndex= */ 0)), /* customCacheKey= */ null, /* data= */ null)); assertThat(downloader).isInstanceOf(HlsDownloader.class); } @Test public void counterMethods() throws Exception { HlsDownloader downloader = getHlsDownloader(MASTER_PLAYLIST_URI, getKeys(MASTER_MEDIA_PLAYLIST_1_INDEX)); downloader.download(progressListener); progressListener.assertBytesDownloaded(MEDIA_PLAYLIST_DATA.length + 10 + 11 + 12); } @Test public void downloadRepresentation() throws Exception { HlsDownloader downloader = getHlsDownloader(MASTER_PLAYLIST_URI, getKeys(MASTER_MEDIA_PLAYLIST_1_INDEX)); downloader.download(progressListener); assertCachedData( cache, new RequestSet(fakeDataSet) .subset( MASTER_PLAYLIST_URI, MEDIA_PLAYLIST_1_URI, MEDIA_PLAYLIST_1_DIR + "fileSequence0.ts", MEDIA_PLAYLIST_1_DIR + "fileSequence1.ts", MEDIA_PLAYLIST_1_DIR + "fileSequence2.ts")); } @Test public void downloadMultipleRepresentations() throws Exception { HlsDownloader downloader = getHlsDownloader( MASTER_PLAYLIST_URI, getKeys(MASTER_MEDIA_PLAYLIST_1_INDEX, MASTER_MEDIA_PLAYLIST_2_INDEX)); downloader.download(progressListener); assertCachedData(cache, fakeDataSet); } @Test public void downloadAllRepresentations() throws Exception { // Add data for the rest of the playlists fakeDataSet .setData(MEDIA_PLAYLIST_0_URI, MEDIA_PLAYLIST_DATA) .setRandomData(MEDIA_PLAYLIST_0_DIR + "fileSequence0.ts", 10) .setRandomData(MEDIA_PLAYLIST_0_DIR + "fileSequence1.ts", 11) .setRandomData(MEDIA_PLAYLIST_0_DIR + "fileSequence2.ts", 12) .setData(MEDIA_PLAYLIST_3_URI, MEDIA_PLAYLIST_DATA) .setRandomData(MEDIA_PLAYLIST_3_DIR + "fileSequence0.ts", 13) .setRandomData(MEDIA_PLAYLIST_3_DIR + "fileSequence1.ts", 14) .setRandomData(MEDIA_PLAYLIST_3_DIR + "fileSequence2.ts", 15); HlsDownloader downloader = getHlsDownloader(MASTER_PLAYLIST_URI, getKeys()); downloader.download(progressListener); assertCachedData(cache, fakeDataSet); } @Test public void remove() throws Exception { HlsDownloader downloader = getHlsDownloader( MASTER_PLAYLIST_URI, getKeys(MASTER_MEDIA_PLAYLIST_1_INDEX, MASTER_MEDIA_PLAYLIST_2_INDEX)); downloader.download(progressListener); downloader.remove(); assertCacheEmpty(cache); } @Test public void downloadMediaPlaylist() throws Exception { HlsDownloader downloader = getHlsDownloader(MEDIA_PLAYLIST_1_URI, getKeys()); downloader.download(progressListener); assertCachedData( cache, new RequestSet(fakeDataSet) .subset( MEDIA_PLAYLIST_1_URI, MEDIA_PLAYLIST_1_DIR + "fileSequence0.ts", MEDIA_PLAYLIST_1_DIR + "fileSequence1.ts", MEDIA_PLAYLIST_1_DIR + "fileSequence2.ts")); } @Test public void downloadEncMediaPlaylist() throws Exception { fakeDataSet = new FakeDataSet() .setData(ENC_MEDIA_PLAYLIST_URI, ENC_MEDIA_PLAYLIST_DATA) .setRandomData("enc.key", 8) .setRandomData("enc2.key", 9) .setRandomData("fileSequence0.ts", 10) .setRandomData("fileSequence1.ts", 11) .setRandomData("fileSequence2.ts", 12); HlsDownloader downloader = getHlsDownloader(ENC_MEDIA_PLAYLIST_URI, getKeys()); downloader.download(progressListener); assertCachedData(cache, fakeDataSet); } private HlsDownloader getHlsDownloader(String mediaPlaylistUri, List<StreamKey> keys) { CacheDataSource.Factory cacheDataSourceFactory = new CacheDataSource.Factory() .setCache(cache) .setUpstreamDataSourceFactory(new FakeDataSource.Factory().setFakeDataSet(fakeDataSet)); return new HlsDownloader( new MediaItem.Builder().setUri(mediaPlaylistUri).setStreamKeys(keys).build(), cacheDataSourceFactory); } private static ArrayList<StreamKey> getKeys(int... variantIndices) { ArrayList<StreamKey> streamKeys = new ArrayList<>(); for (int variantIndex : variantIndices) { streamKeys.add(new StreamKey(HlsMasterPlaylist.GROUP_INDEX_VARIANT, variantIndex)); } return streamKeys; } private static final class ProgressListener implements Downloader.ProgressListener { private long bytesDownloaded; @Override public void onProgress(long contentLength, long bytesDownloaded, float percentDownloaded) { this.bytesDownloaded = bytesDownloaded; } public void assertBytesDownloaded(long bytesDownloaded) { assertThat(this.bytesDownloaded).isEqualTo(bytesDownloaded); } } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.index.shard; import org.apache.lucene.codecs.CodecUtil; import org.apache.lucene.document.Field; import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.NoMergePolicy; import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentInfos; import org.apache.lucene.index.Terms; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.Sort; import org.apache.lucene.search.SortField; import org.apache.lucene.search.SortedNumericSortField; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.util.BytesRef; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.Uid; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.test.ESTestCase; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.security.AccessControlException; import java.util.Arrays; import java.util.Map; import java.util.function.Predicate; import static org.hamcrest.CoreMatchers.equalTo; public class StoreRecoveryTests extends ESTestCase { public void testAddIndices() throws IOException { Directory[] dirs = new Directory[randomIntBetween(1, 10)]; final int numDocs = randomIntBetween(50, 100); final Sort indexSort; if (randomBoolean()) { indexSort = new Sort(new SortedNumericSortField("num", SortField.Type.LONG, true)); } else { indexSort = null; } int id = 0; for (int i = 0; i < dirs.length; i++) { dirs[i] = newFSDirectory(createTempDir()); IndexWriterConfig iwc = newIndexWriterConfig() .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); if (indexSort != null) { iwc.setIndexSort(indexSort); } IndexWriter writer = new IndexWriter(dirs[i], iwc); for (int j = 0; j < numDocs; j++) { writer.addDocument(Arrays.asList( new StringField("id", Integer.toString(id++), Field.Store.YES), new SortedNumericDocValuesField("num", randomLong()) )); } writer.commit(); writer.close(); } StoreRecovery storeRecovery = new StoreRecovery(new ShardId("foo", "bar", 1), logger); RecoveryState.Index indexStats = new RecoveryState.Index(); Directory target = newFSDirectory(createTempDir()); final long maxSeqNo = randomNonNegativeLong(); final long maxUnsafeAutoIdTimestamp = randomNonNegativeLong(); storeRecovery.addIndices(indexStats, target, indexSort, dirs, maxSeqNo, maxUnsafeAutoIdTimestamp, null, 0, false, false); int numFiles = 0; Predicate<String> filesFilter = (f) -> f.startsWith("segments") == false && f.equals("write.lock") == false && f.startsWith("extra") == false; for (Directory d : dirs) { numFiles += Arrays.asList(d.listAll()).stream().filter(filesFilter).count(); } final long targetNumFiles = Arrays.asList(target.listAll()).stream().filter(filesFilter).count(); assertEquals(numFiles, targetNumFiles); assertEquals(indexStats.totalFileCount(), targetNumFiles); if (hardLinksSupported(createTempDir())) { assertEquals(targetNumFiles, indexStats.reusedFileCount()); } else { assertEquals(0, indexStats.reusedFileCount(), 0); } DirectoryReader reader = DirectoryReader.open(target); SegmentInfos segmentCommitInfos = SegmentInfos.readLatestCommit(target); final Map<String, String> userData = segmentCommitInfos.getUserData(); assertThat(userData.get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); for (SegmentCommitInfo info : segmentCommitInfos) { // check that we didn't merge assertEquals("all sources must be flush", info.info.getDiagnostics().get("source"), "flush"); if (indexSort != null) { assertEquals(indexSort, info.info.getIndexSort()); } } assertEquals(reader.numDeletedDocs(), 0); assertEquals(reader.numDocs(), id); reader.close(); target.close(); IOUtils.close(dirs); } public void testSplitShard() throws IOException { Directory dir = newFSDirectory(createTempDir()); final int numDocs = randomIntBetween(50, 100); final Sort indexSort; if (randomBoolean()) { indexSort = new Sort(new SortedNumericSortField("num", SortField.Type.LONG, true)); } else { indexSort = null; } int id = 0; IndexWriterConfig iwc = newIndexWriterConfig() .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); if (indexSort != null) { iwc.setIndexSort(indexSort); } IndexWriter writer = new IndexWriter(dir, iwc); for (int j = 0; j < numDocs; j++) { writer.addDocument(Arrays.asList( new StringField(IdFieldMapper.NAME, Uid.encodeId(Integer.toString(j)), Field.Store.YES), new SortedNumericDocValuesField("num", randomLong()) )); } writer.commit(); writer.close(); StoreRecovery storeRecovery = new StoreRecovery(new ShardId("foo", "bar", 1), logger); RecoveryState.Index indexStats = new RecoveryState.Index(); Directory target = newFSDirectory(createTempDir()); final long maxSeqNo = randomNonNegativeLong(); final long maxUnsafeAutoIdTimestamp = randomNonNegativeLong(); int numShards = randomIntBetween(2, 10); int targetShardId = randomIntBetween(0, numShards-1); IndexMetaData metaData = IndexMetaData.builder("test") .settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) .numberOfShards(numShards) .setRoutingNumShards(numShards * 1000000) .numberOfReplicas(0).build(); storeRecovery.addIndices(indexStats, target, indexSort, new Directory[] {dir}, maxSeqNo, maxUnsafeAutoIdTimestamp, metaData, targetShardId, true, false); SegmentInfos segmentCommitInfos = SegmentInfos.readLatestCommit(target); final Map<String, String> userData = segmentCommitInfos.getUserData(); assertThat(userData.get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(maxSeqNo))); assertThat(userData.get(Engine.MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID), equalTo(Long.toString(maxUnsafeAutoIdTimestamp))); for (SegmentCommitInfo info : segmentCommitInfos) { // check that we didn't merge assertEquals("all sources must be flush", info.info.getDiagnostics().get("source"), "flush"); if (indexSort != null) { assertEquals(indexSort, info.info.getIndexSort()); } } iwc = newIndexWriterConfig() .setMergePolicy(NoMergePolicy.INSTANCE) .setOpenMode(IndexWriterConfig.OpenMode.CREATE); if (indexSort != null) { iwc.setIndexSort(indexSort); } writer = new IndexWriter(target, iwc); writer.forceMerge(1, true); writer.commit(); writer.close(); DirectoryReader reader = DirectoryReader.open(target); for (LeafReaderContext ctx : reader.leaves()) { LeafReader leafReader = ctx.reader(); Terms terms = leafReader.terms(IdFieldMapper.NAME); TermsEnum iterator = terms.iterator(); BytesRef ref; while((ref = iterator.next()) != null) { String value = ref.utf8ToString(); assertEquals("value has wrong shards: " + value, targetShardId, OperationRouting.generateShardId(metaData, value, null)); } for (int i = 0; i < numDocs; i++) { ref = new BytesRef(Integer.toString(i)); int shardId = OperationRouting.generateShardId(metaData, ref.utf8ToString(), null); if (shardId == targetShardId) { assertTrue(ref.utf8ToString() + " is missing", terms.iterator().seekExact(ref)); } else { assertFalse(ref.utf8ToString() + " was found but shouldn't", terms.iterator().seekExact(ref)); } } } reader.close(); target.close(); IOUtils.close(dir); } public void testStatsDirWrapper() throws IOException { Directory dir = newDirectory(); Directory target = newDirectory(); RecoveryState.Index indexStats = new RecoveryState.Index(); StoreRecovery.StatsDirectoryWrapper wrapper = new StoreRecovery.StatsDirectoryWrapper(target, indexStats); try (IndexOutput output = dir.createOutput("foo.bar", IOContext.DEFAULT)) { CodecUtil.writeHeader(output, "foo", 0); int numBytes = randomIntBetween(100, 20000); for (int i = 0; i < numBytes; i++) { output.writeByte((byte)i); } CodecUtil.writeFooter(output); } wrapper.copyFrom(dir, "foo.bar", "bar.foo", IOContext.DEFAULT); assertNotNull(indexStats.getFileDetails("bar.foo")); assertNull(indexStats.getFileDetails("foo.bar")); assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").length()); assertEquals(dir.fileLength("foo.bar"), indexStats.getFileDetails("bar.foo").recovered()); assertFalse(indexStats.getFileDetails("bar.foo").reused()); IOUtils.close(dir, target); } public boolean hardLinksSupported(Path path) throws IOException { try { Files.createFile(path.resolve("foo.bar")); Files.createLink(path.resolve("test"), path.resolve("foo.bar")); BasicFileAttributes destAttr = Files.readAttributes(path.resolve("test"), BasicFileAttributes.class); BasicFileAttributes sourceAttr = Files.readAttributes(path.resolve("foo.bar"), BasicFileAttributes.class); // we won't get here - no permission ;) return destAttr.fileKey() != null && destAttr.fileKey().equals(sourceAttr.fileKey()); } catch (AccessControlException ex) { return true; // if we run into that situation we know it's supported. } catch (UnsupportedOperationException ex) { return false; } } }
/** * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. */ package com.microsoft.azure.management.servicebus; import com.microsoft.azure.management.apigeneration.Fluent; import com.microsoft.azure.management.resources.fluentcore.arm.models.IndependentChildResource; import com.microsoft.azure.management.resources.fluentcore.model.Appliable; import com.microsoft.azure.management.resources.fluentcore.model.Creatable; import com.microsoft.azure.management.resources.fluentcore.model.Refreshable; import com.microsoft.azure.management.resources.fluentcore.model.Updatable; import com.microsoft.azure.management.servicebus.implementation.QueueInner; import com.microsoft.azure.management.servicebus.implementation.ServiceBusManager; import org.joda.time.DateTime; import org.joda.time.Period; /** * Type representing Service Bus queue. */ @Fluent public interface Queue extends IndependentChildResource<ServiceBusManager, QueueInner>, Refreshable<Queue>, Updatable<Queue.Update> { /** * @return the exact time the queue was created */ DateTime createdAt(); /** * @return last time a message was sent, or the last time there was a receive request to this queue */ DateTime accessedAt(); /** * @return the exact time the queue was updated */ DateTime updatedAt(); /** * @return the maximum size of memory allocated for the queue in megabytes */ long maxSizeInMB(); /** * @return current size of the queue, in bytes */ long currentSizeInBytes(); /** * @return indicates whether server-side batched operations are enabled */ boolean isBatchedOperationsEnabled(); /** * @return indicates whether this queue has dead letter support when a message expires */ boolean isDeadLetteringEnabledForExpiredMessages(); /** * @return indicates whether express entities are enabled */ boolean isExpressEnabled(); /** * @return indicates whether the queue is to be partitioned across multiple message brokers */ boolean isPartitioningEnabled(); /** * @return indicates whether the queue supports sessions */ boolean isSessionEnabled(); /** * @return indicates if this queue requires duplicate detection */ boolean isDuplicateDetectionEnabled(); /** * @return the duration of peek-lock which is the amount of time that the message is locked for other receivers */ long lockDurationInSeconds(); /** * @return the idle duration after which the queue is automatically deleted */ long deleteOnIdleDurationInMinutes(); /** * @return the duration after which the message expires, starting from when the message is sent to queue */ Period defaultMessageTtlDuration(); /** * @return the duration of the duplicate detection history */ Period duplicateMessageDetectionHistoryDuration(); /** * @return the maximum number of a message delivery before marking it as dead-lettered */ int maxDeliveryCountBeforeDeadLetteringMessage(); /** * @return the number of messages in the queue */ long messageCount(); /** * @return number of active messages in the queue */ long activeMessageCount(); /** * @return number of messages in the dead-letter queue */ long deadLetterMessageCount(); /** * @return number of messages sent to the queue that are yet to be released * for consumption */ long scheduledMessageCount(); /** * @return number of messages transferred into dead letters */ long transferDeadLetterMessageCount(); /** * @return number of messages transferred to another queue, topic, or subscription */ long transferMessageCount(); /** * @return the current status of the queue */ EntityStatus status(); /** * @return entry point to manage authorization rules for the Service Bus queue */ QueueAuthorizationRules authorizationRules(); /** * The entirety of the Service Bus queue definition. */ interface Definition extends Queue.DefinitionStages.Blank, Queue.DefinitionStages.WithCreate { } /** * Grouping of Service Bus queue definition stages. */ interface DefinitionStages { /** * The first stage of a queue definition. */ interface Blank extends WithCreate { } /** * The stage of the queue definition allowing to specify size. */ interface WithSize { /** * Specifies the maximum size of memory allocated for the queue. * * @param sizeInMB size in MB * @return the next stage of queue definition */ WithCreate withSizeInMB(long sizeInMB); } /** * The stage of the queue definition allowing to specify partitioning behaviour. */ interface WithPartitioning { /** * Specifies that partitioning should be enabled on this queue. * * @return the next stage of queue definition */ WithCreate withPartitioning(); /** * Specifies that the default partitioning should be disabled on this queue. * Note: if the parent Service Bus is Premium SKU then partition cannot be * disabled * * @return the next stage of queue definition */ WithCreate withoutPartitioning(); } /** * The stage of the queue definition allowing to define auto delete behaviour. */ interface WithDeleteOnIdle { /** * The idle interval after which the queue is automatically deleted. * Note: unless it is explicitly overridden the default delete on idle duration * is infinite (TimeSpan.Max). * * @param durationInMinutes idle duration in minutes * @return the next stage of queue definition */ WithCreate withDeleteOnIdleDurationInMinutes(int durationInMinutes); } /** * The stage of the queue definition allowing to define duration for message lock. */ interface WithMessageLockDuration { /** * Specifies the amount of time that the message is locked for other receivers. * Note: unless it is explicitly overridden the default lock duration is 60 seconds, * the maximum allowed value is 300 seconds. * * @param durationInSeconds duration of a lock in seconds * @return the next stage of queue definition */ WithCreate withMessageLockDurationInSeconds(int durationInSeconds); } /** * The stage of the queue definition allowing to define default TTL for messages. */ interface WithDefaultMessageTTL { /** * Specifies the duration after which the message expires. * Note: unless it is explicitly overridden the default ttl is infinite (TimeSpan.Max). * * @param ttl time to live duration * @return the next stage of queue definition */ WithCreate withDefaultMessageTTL(Period ttl); } /** * The stage of the queue definition allowing to enable session support. */ interface WithSession { /** * Specifies that session support should be enabled for the queue. * * @return the next stage of queue definition */ WithCreate withSession(); } /** * The stage of the queue definition allowing to mark messages as express messages. */ interface WithExpressMessage { /** * Specifies that messages in this queue are express hence they can be cached in memory * for some time before storing it in messaging store. * Note: By default queue is not express. * * @return the next stage of queue definition */ WithCreate withExpressMessage(); } /** * The stage of the queue definition allowing specify batching behaviour. */ interface WithMessageBatching { /** * Specifies that the default batching should be disabled on this queue. * With batching Service Bus can batch multiple message when it write or delete messages * from it's internal store. * * @return the next stage of queue definition */ WithCreate withoutMessageBatching(); } /** * The stage of the queue definition allowing to specify duration of the duplicate message * detection history. */ interface WithDuplicateMessageDetection { /** * Specifies the duration of the duplicate message detection history. * * @param duplicateDetectionHistoryDuration duration of the history * @return the next stage of queue definition */ WithCreate withDuplicateMessageDetection(Period duplicateDetectionHistoryDuration); } /** * The stage of the queue definition allowing to specify whether expired message can be moved * to secondary dead-letter queue. */ interface WithExpiredMessageMovedToDeadLetterQueue { /** * Specifies that expired message must be moved to dead-letter queue. * * @return the next stage of queue definition */ WithCreate withExpiredMessageMovedToDeadLetterQueue(); } /** * The stage of the queue definition allowing to specify maximum delivery count of message before * moving it to dead-letter queue. */ interface WithMessageMovedToDeadLetterQueueOnMaxDeliveryCount { /** * Specifies maximum number of times a message can be delivered. Once this count has exceeded, * message will be moved to dead-letter queue. * * @param deliveryCount maximum delivery count * @return the next stage of queue definition */ WithCreate withMessageMovedToDeadLetterQueueOnMaxDeliveryCount(int deliveryCount); } /** * The stage of the queue definition allowing to add an authorization rule for accessing * the queue. */ interface WithAuthorizationRule { /** * Creates a send authorization rule for the queue. * * @param name rule name * @return next stage of the queue definition */ WithCreate withNewSendRule(String name); /** * Creates a listen authorization rule for the queue. * * @param name rule name * @return next stage of the queue definition */ WithCreate withNewListenRule(String name); /** * Creates a manage authorization rule for the queue. * * @param name rule name * @return next stage of the queue definition */ WithCreate withNewManageRule(String name); } /** * The stage of the definition which contains all the minimum required inputs for * the resource to be created (via {@link WithCreate#create()}), but also allows * for any other optional settings to be specified. */ interface WithCreate extends Creatable<Queue>, Queue.DefinitionStages.WithSize, Queue.DefinitionStages.WithPartitioning, Queue.DefinitionStages.WithDeleteOnIdle, Queue.DefinitionStages.WithMessageLockDuration, Queue.DefinitionStages.WithDefaultMessageTTL, Queue.DefinitionStages.WithSession, Queue.DefinitionStages.WithExpressMessage, Queue.DefinitionStages.WithMessageBatching, Queue.DefinitionStages.WithDuplicateMessageDetection, Queue.DefinitionStages.WithExpiredMessageMovedToDeadLetterQueue, Queue.DefinitionStages.WithMessageMovedToDeadLetterQueueOnMaxDeliveryCount, Queue.DefinitionStages.WithAuthorizationRule { } } /** * The template for Service Bus queue update operation, containing all the settings that can be modified. */ interface Update extends Appliable<Queue>, Queue.UpdateStages.WithSize, Queue.UpdateStages.WithDeleteOnIdle, Queue.UpdateStages.WithMessageLockDuration, Queue.UpdateStages.WithDefaultMessageTTL, Queue.UpdateStages.WithSession, Queue.UpdateStages.WithExpressMessage, Queue.UpdateStages.WithMessageBatching, Queue.UpdateStages.WithDuplicateMessageDetection, Queue.UpdateStages.WithExpiredMessageMovedToDeadLetterQueue, Queue.UpdateStages.WithMessageMovedToDeadLetterQueueOnMaxDeliveryCount, Queue.UpdateStages.WithAuthorizationRule { } /** * Grouping of Service Bus queue update stages. */ interface UpdateStages { /** * The stage of the queue definition allowing to specify size. */ interface WithSize { /** * Specifies the maximum size of memory allocated for the queue. * * @param sizeInMB size in MB * @return the next stage of queue update */ Update withSizeInMB(long sizeInMB); } /** * The stage of the queue definition allowing to define auto delete behaviour. */ interface WithDeleteOnIdle { /** * The idle interval after which the queue is automatically deleted. * * @param durationInMinutes idle duration in minutes * @return the next stage of queue update */ Update withDeleteOnIdleDurationInMinutes(int durationInMinutes); } /** * The stage of the queue definition allowing to define duration for message lock. */ interface WithMessageLockDuration { /** * Specifies the amount of time that the message is locked for other receivers. * * @param durationInSeconds duration of a lock in seconds * @return the next stage of queue update */ Update withMessageLockDurationInSeconds(int durationInSeconds); } /** * The stage of the queue definition allowing to define default TTL for messages. */ interface WithDefaultMessageTTL { /** * Specifies the duration after which the message expires. * * @param ttl time to live duration * @return the next stage of queue update */ Update withDefaultMessageTTL(Period ttl); } /** * The stage of the queue definition allowing to enable session support. */ interface WithSession { /** * Specifies that session support should be enabled for the queue. * * @return the next stage of queue update */ Update withSession(); /** * Specifies that session support should be disabled for the queue. * * @return the next stage of queue update */ Update withoutSession(); } /** * The stage of the queue definition allowing to mark it as either holding regular or express * messages. */ interface WithExpressMessage { /** * Specifies that messages in this queue are express hence they can be cached in memory * for some time before storing it in messaging store. * * @return the next stage of queue update */ Update withExpressMessage(); /** * Specifies that messages in this queue are not express hence they should be cached in memory. * * @return the next stage of queue update */ Update withoutExpressMessage(); } /** * The stage of the queue definition allowing configure message batching behaviour. */ interface WithMessageBatching { /** * Specifies that Service Bus can batch multiple message when it write messages to or delete * messages from it's internal store. This increases the throughput. * * @return the next stage of queue update */ Update withMessageBatching(); /** * Specifies that batching of messages should be disabled when Service Bus write messages to * or delete messages from it's internal store. * * @return the next stage of queue update */ Update withoutMessageBatching(); } /** * The stage of the queue definition allowing to specify duration of the duplicate message * detection history. */ interface WithDuplicateMessageDetection { /** * Specifies the duration of the duplicate message detection history. * * @param duration duration of the history * @return the next stage of queue update */ Update withDuplicateMessageDetectionHistoryDuration(Period duration); /** * Specifies that duplicate message detection needs to be disabled. * * @return the next stage of queue update */ Update withoutDuplicateMessageDetection(); } /** * The stage of the queue definition allowing to specify whether expired message can be moved * to secondary dead-letter queue. */ interface WithExpiredMessageMovedToDeadLetterQueue { /** * Specifies that expired message must be moved to dead-letter queue. * * @return the next stage of queue update */ Update withExpiredMessageMovedToDeadLetterQueue(); /** * Specifies that expired message should not be moved to dead-letter queue. * * @return the next stage of queue update */ Update withoutExpiredMessageMovedToDeadLetterQueue(); } /** * The stage of the queue definition allowing to specify maximum delivery count of message before * moving it to dead-letter queue. */ interface WithMessageMovedToDeadLetterQueueOnMaxDeliveryCount { /** * Specifies maximum number of times a message can be delivered. Once this count has exceeded, * message will be moved to dead-letter queue. * * @param deliveryCount maximum delivery count * @return the next stage of queue update */ Update withMessageMovedToDeadLetterQueueOnMaxDeliveryCount(int deliveryCount); } /** * The stage of the queue definition allowing to add an authorization rule for accessing * the queue. */ interface WithAuthorizationRule { /** * Creates a send authorization rule for the queue. * * @param name rule name * @return next stage of the queue update */ Update withNewSendRule(String name); /** * Creates a listen authorization rule for the queue. * * @param name rule name * @return next stage of the queue update */ Update withNewListenRule(String name); /** * Creates a manage authorization rule for the queue. * * @param name rule name * @return next stage of the queue update */ Update withNewManageRule(String name); /** * Removes an authorization rule for the queue. * * @param name rule name * @return next stage of the queue update */ Update withoutAuthorizationRule(String name); } } }
package io.github.paulszefer; import io.github.paulszefer.sim.Creature; import io.github.paulszefer.sim.Guppy; import io.github.paulszefer.sim.Pool; import org.junit.After; import org.junit.Before; import org.junit.Test; import java.util.ArrayList; import java.util.List; import java.util.Random; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Tests the constructors and methods of the Pool class. * * @author Paul Szefer * @version 1.0 */ public class PoolTest { public static final String NAME_VALID = "Poolname"; public static final String NAME_WHITESPACE_UNFORMATTED = "p O o L n A m E"; public static final String NAME_NULL = null; public static final double VOLUME_LITRES_BELOW_LOWER_BOUND = -3.6; public static final double VOLUME_LITRES_LOWER_BOUND = 0.0; public static final double VOLUME_LITRES_VALID = 203.2; public static final double TEMP_CELSIUS_BELOW_LOWER_BOUND = Pool.MINIMUM_WATER_TEMP_CELSIUS - 1.0; public static final double TEMP_CELSIUS_LOWER_BOUND = Pool.MINIMUM_WATER_TEMP_CELSIUS; public static final double TEMP_CELSIUS_VALID_1 = Pool.MINIMUM_WATER_TEMP_CELSIUS + 1.0; public static final double TEMP_CELSIUS_VALID_2 = Pool.MAXIMUM_WATER_TEMP_CELSIUS - 1.0; public static final double TEMP_CELSIUS_UPPER_BOUND = Pool.MAXIMUM_WATER_TEMP_CELSIUS; public static final double TEMP_CELSIUS_ABOVE_UPPER_BOUND = Pool.MAXIMUM_WATER_TEMP_CELSIUS + 1.0; public static final double PH_BELOW_LOWER_BOUND = Pool.MINIMUM_PH - 1.0; public static final double PH_LOWER_BOUND = Pool.MINIMUM_PH; public static final double PH_VALID_1 = Pool.MINIMUM_PH + 1.0; public static final double PH_VALID_2 = Pool.MAXIMUM_PH - 1.0; public static final double PH_UPPER_BOUND = Pool.MAXIMUM_PH; public static final double PH_ABOVE_UPPER_BOUND = Pool.MAXIMUM_PH + 1.0; public static final double NUTRIENT_COEFFICIENT_BELOW_LOWER_BOUND = Pool.MINIMUM_NUTRIENT_COEFFICIENT - 0.1; public static final double NUTRIENT_COEFFICIENT_LOWER_BOUND = Pool.MINIMUM_NUTRIENT_COEFFICIENT; public static final double NUTRIENT_COEFFICIENT_VALID_1 = Pool.MINIMUM_NUTRIENT_COEFFICIENT + 0.1; public static final double NUTRIENT_COEFFICIENT_VALID_2 = Pool.MAXIMUM_NUTRIENT_COEFFICIENT - 0.1; public static final double NUTRIENT_COEFFICIENT_UPPER_BOUND = Pool.MAXIMUM_NUTRIENT_COEFFICIENT; public static final double NUTRIENT_COEFFICIENT_ABOVE_UPPER_BOUND = Pool.MAXIMUM_NUTRIENT_COEFFICIENT + 0.1; public static final double TOLERANCE = 0.000001; private static final Random GENERATOR = new Random(); private Pool pool; private Pool pool0ParameterConstructor; private Pool pool5ParameterConstructorValidParameterValues; private Pool pool5ParameterConstructorInvalidParameterValuesBelowBound; private Pool pool5ParameterConstructorInvalidParameterValuesAboveBound; private Pool pool5ParameterConstructor5; private List<Creature> testCreatures; @Before public void setUp() throws Exception { pool = new Pool(NAME_VALID, VOLUME_LITRES_VALID, TEMP_CELSIUS_VALID_2, PH_VALID_2, NUTRIENT_COEFFICIENT_VALID_2); pool0ParameterConstructor = new Pool(); pool5ParameterConstructorValidParameterValues = new Pool(NAME_VALID, VOLUME_LITRES_VALID, TEMP_CELSIUS_VALID_1, PH_VALID_1, NUTRIENT_COEFFICIENT_VALID_1); pool5ParameterConstructorInvalidParameterValuesBelowBound = new Pool( NAME_WHITESPACE_UNFORMATTED, VOLUME_LITRES_BELOW_LOWER_BOUND, TEMP_CELSIUS_BELOW_LOWER_BOUND, PH_BELOW_LOWER_BOUND, NUTRIENT_COEFFICIENT_BELOW_LOWER_BOUND); pool5ParameterConstructorInvalidParameterValuesAboveBound = new Pool(NAME_NULL, VOLUME_LITRES_LOWER_BOUND, TEMP_CELSIUS_ABOVE_UPPER_BOUND, PH_ABOVE_UPPER_BOUND, NUTRIENT_COEFFICIENT_ABOVE_UPPER_BOUND); testCreatures = new ArrayList<>(); // must be even final int numberOfTestCreatures = 100; for (int i = 0; i < numberOfTestCreatures; i++) { testCreatures.add(new Guppy()); } pool.setCreatures(testCreatures); } @After public void tearDown() throws Exception { } @Test public void testPoolName() { assertThat(pool0ParameterConstructor.getName(), is(Pool.DEFAULT_WATER_BODY_NAME)); } @Test public void testPoolVolumeLitres() { assertThat(pool0ParameterConstructor.getVolumeLitres(), is(0.0)); } @Test public void testPoolTemperature() { assertThat(pool0ParameterConstructor.getTemperature(), is(Pool.DEFAULT_WATER_TEMP_CELSIUS)); } @Test public void testPoolpH() { assertThat(pool0ParameterConstructor.getpH(), is(Pool.NEUTRAL_PH)); } @Test public void testPoolNutrientCoefficient() { assertThat(pool0ParameterConstructor.getNutrientCoefficient(), is(Pool.DEFAULT_NUTRIENT_COEFFICIENT)); } @Test public void testPoolIdentificationNumber() { pool0ParameterConstructor = new Pool(); assertThat(pool0ParameterConstructor.getIdentificationNumber(), is(Pool.getNumberCreated())); } @Test public void testPoolCreaturesInPoolCreatesList() { assertThat(testCreatures, is(instanceOf(List.class))); } @Test public void testPoolCreaturesInPoolIsEmptyList() { assertThat(pool0ParameterConstructor.getCreatures().size(), is(0)); } @Test public void testPoolRandomNumberGeneratorGeneratesInteger() { int identificationNumber = pool0ParameterConstructor.getIdentificationNumber(); assertThat(identificationNumber, is(instanceOf(Integer.class))); } @Test public void testPoolRandomNumberGeneratorGeneratesPositiveNumber() { int identificationNumber = pool0ParameterConstructor.getIdentificationNumber(); assertThat(identificationNumber, is(greaterThanOrEqualTo(0))); } @Test public void testPoolIncrementsNumberOfPools() { int initialNumberOfPools = Pool.getNumberCreated(); new Pool(); int incrementedNumberOfPools = initialNumberOfPools + 1; int result = Pool.getNumberCreated(); assertThat(result, is(equalTo(incrementedNumberOfPools))); } @Test public void testPoolStringDoubleDoubleDoubleDoubleName() { assertThat(pool5ParameterConstructorValidParameterValues.getName(), is(NAME_VALID)); } @Test public void testPoolStringDoubleDoubleDoubleDoubleVolumeLitres() { assertThat(pool5ParameterConstructorValidParameterValues.getVolumeLitres(), is(VOLUME_LITRES_VALID)); } @Test public void testPoolStringDoubleDoubleDoubleDoubleTemperature() { assertThat(pool5ParameterConstructorValidParameterValues.getTemperature(), is(TEMP_CELSIUS_VALID_1)); } @Test public void testPoolStringDoubleDoubleDoubleDoublePH() { assertThat(pool5ParameterConstructorValidParameterValues.getpH(), is(PH_VALID_1)); } @Test public void testPoolStringDoubleDoubleDoubleDoubleNutrientCoefficient() { assertThat(pool5ParameterConstructorValidParameterValues.getNutrientCoefficient(), is(NUTRIENT_COEFFICIENT_VALID_1)); } @Test public void testPoolStringDoubleDoubleDoubleDoubleIdentificationNumber() { pool5ParameterConstructorValidParameterValues = new Pool(NAME_VALID, VOLUME_LITRES_VALID, TEMP_CELSIUS_VALID_1, PH_VALID_1, NUTRIENT_COEFFICIENT_VALID_1); int numberOfPools = Pool.getNumberCreated(); assertThat(pool5ParameterConstructorValidParameterValues.getIdentificationNumber(), is(numberOfPools)); } @Test public void testPoolStringDoubleDoubleDoubleDoubleCreaturesInPoolCreatesList() { assertThat(pool5ParameterConstructorValidParameterValues.getCreatures(), is(instanceOf(List.class))); } @Test public void testPoolStringDoubleDoubleDoubleDoubleCreaturesInPoolCreatesEmptyList() { assertThat(pool5ParameterConstructorValidParameterValues.getCreatures().size(), is(0)); } @Test public void testPoolStringDoubleDoubleDoubleDoubleRandomNumberGeneratorGeneratesInteger() { assertThat(pool5ParameterConstructorValidParameterValues.getIdentificationNumber(), is(instanceOf(Integer.class))); } @Test public void testPoolStringDoubleDoubleDoubleDoubleRandomNumberGeneratorGeneratesPositiveNumber() { assertThat(pool5ParameterConstructorValidParameterValues.getIdentificationNumber(), is(greaterThanOrEqualTo(0))); } @Test public void testPoolStringDoubleDoubleDoubleDoubleIncrementsNumberOfPools() { int initial = Pool.getNumberCreated(); new Pool(NAME_VALID, VOLUME_LITRES_VALID, TEMP_CELSIUS_VALID_1, PH_VALID_1, NUTRIENT_COEFFICIENT_VALID_1); assertThat(initial + 1, is(equalTo(Pool.getNumberCreated()))); } @Test public void testGetNumberCreated() { Pool testPool = new Pool(); assertThat(Pool.getNumberCreated(), is(testPool.getIdentificationNumber())); } @Test public void testGetName() { assertThat(pool5ParameterConstructorValidParameterValues.getName(), is(NAME_VALID)); } @Test public void testGetVolumeLitres() { assertThat(pool5ParameterConstructorValidParameterValues.getVolumeLitres(), is(VOLUME_LITRES_VALID)); } @Test public void testGetTemperature() { assertThat(pool5ParameterConstructorValidParameterValues.getTemperature(), is(TEMP_CELSIUS_VALID_1)); } @Test public void testGetpH() { assertThat(pool5ParameterConstructorValidParameterValues.getpH(), is(PH_VALID_1)); } @Test public void testGetNutrientCoefficient() { assertThat(pool5ParameterConstructorValidParameterValues.getNutrientCoefficient(), is(NUTRIENT_COEFFICIENT_VALID_1)); } @Test public void testGetCreaturesInPool() { assertThat(pool5ParameterConstructorValidParameterValues.getName(), is(NAME_VALID)); } @Test public void testGetIdentificationNumber() { assertThat(pool5ParameterConstructorValidParameterValues.getName(), is(NAME_VALID)); } @Test public void testSetNameValid() { pool.setName(NAME_VALID); assertThat(pool.getName(), is(NAME_VALID)); } @Test public void testSetNameWhitespaceUnformatted() { pool.setName(NAME_WHITESPACE_UNFORMATTED); String nameNoSpaces = NAME_WHITESPACE_UNFORMATTED.replace(" ", ""); String nameFormatted = nameNoSpaces.substring(0, 1).toUpperCase() + nameNoSpaces.substring( 1).toLowerCase(); assertThat(pool.getName(), is(nameFormatted)); } @Test public void testSetNameNull() { pool.setName(null); assertThat(pool.getName(), is(Pool.DEFAULT_WATER_BODY_NAME)); } @Test public void testSetVolumeLitresBelowLowerBound() { pool.setVolumeLitres(VOLUME_LITRES_BELOW_LOWER_BOUND); assertThat(pool.getVolumeLitres(), is(0.0)); } @Test public void testSetVolumeLitresLowerBound() { pool.setVolumeLitres(VOLUME_LITRES_LOWER_BOUND); assertThat(pool.getVolumeLitres(), is(0.0)); } @Test public void testSetVolumeLitresValid() { pool.setVolumeLitres(VOLUME_LITRES_VALID); assertThat(pool.getVolumeLitres(), is(VOLUME_LITRES_VALID)); } @Test public void testSetTemperatureBelowLowerBound() { pool.setTemperature(TEMP_CELSIUS_BELOW_LOWER_BOUND); assertThat(pool.getTemperature(), is(Pool.DEFAULT_WATER_TEMP_CELSIUS)); } @Test public void testSetTemperatureLowerBound() { pool.setTemperature(TEMP_CELSIUS_LOWER_BOUND); assertThat(pool.getTemperature(), is(TEMP_CELSIUS_LOWER_BOUND)); } @Test public void testSetTemperatureValid1() { pool.setTemperature(TEMP_CELSIUS_VALID_1); assertThat(pool.getTemperature(), is(TEMP_CELSIUS_VALID_1)); } @Test public void testSetTemperatureValid2() { pool.setTemperature(TEMP_CELSIUS_VALID_2); assertThat(pool.getTemperature(), is(TEMP_CELSIUS_VALID_2)); } @Test public void testSetTemperatureUpperBound() { pool.setTemperature(TEMP_CELSIUS_UPPER_BOUND); assertThat(pool.getTemperature(), is(TEMP_CELSIUS_UPPER_BOUND)); } @Test public void testSetTemperatureAboveUpperBound() { pool.setTemperature(TEMP_CELSIUS_ABOVE_UPPER_BOUND); assertThat(pool.getTemperature(), is(Pool.DEFAULT_WATER_TEMP_CELSIUS)); } @Test public void testSetpHBelowLowerBound() { pool.setpH(PH_BELOW_LOWER_BOUND); assertThat(pool.getpH(), is(Pool.NEUTRAL_PH)); } @Test public void testSetpHLowerBound() { pool.setpH(PH_LOWER_BOUND); assertThat(pool.getpH(), is(PH_LOWER_BOUND)); } @Test public void testSetpHValid1() { pool.setpH(PH_VALID_1); assertThat(pool.getpH(), is(PH_VALID_1)); } @Test public void testSetpHValid2() { pool.setpH(PH_VALID_2); assertThat(pool.getpH(), is(PH_VALID_2)); } @Test public void testSetpHUpperBound() { pool.setpH(PH_UPPER_BOUND); assertThat(pool.getpH(), is(PH_UPPER_BOUND)); } @Test public void testSetpHAboveUpperBound() { pool.setpH(PH_ABOVE_UPPER_BOUND); assertThat(pool.getpH(), is(Pool.NEUTRAL_PH)); } @Test public void testSetNutrientCoefficientBelowLowerBound() { pool.setNutrientCoefficient(NUTRIENT_COEFFICIENT_BELOW_LOWER_BOUND); assertThat(pool.getNutrientCoefficient(), is(Pool.DEFAULT_NUTRIENT_COEFFICIENT)); } @Test public void testSetNutrientCoefficientLowerBound() { pool.setNutrientCoefficient(NUTRIENT_COEFFICIENT_LOWER_BOUND); assertThat(pool.getNutrientCoefficient(), is(NUTRIENT_COEFFICIENT_LOWER_BOUND)); } @Test public void testSetNutrientCoefficientValid1() { pool.setNutrientCoefficient(NUTRIENT_COEFFICIENT_VALID_1); assertThat(pool.getNutrientCoefficient(), is(NUTRIENT_COEFFICIENT_VALID_1)); } @Test public void testSetNutrientCoefficientValid2() { pool.setNutrientCoefficient(NUTRIENT_COEFFICIENT_VALID_2); assertThat(pool.getNutrientCoefficient(), is(NUTRIENT_COEFFICIENT_VALID_2)); } @Test public void testSetNutrientCoefficientUpperBound() { pool.setNutrientCoefficient(NUTRIENT_COEFFICIENT_UPPER_BOUND); assertThat(pool.getNutrientCoefficient(), is(NUTRIENT_COEFFICIENT_UPPER_BOUND)); } @Test public void testSetNutrientCoefficientAboveUpperBound() { pool.setNutrientCoefficient(NUTRIENT_COEFFICIENT_ABOVE_UPPER_BOUND); assertThat(pool.getNutrientCoefficient(), is(Pool.DEFAULT_NUTRIENT_COEFFICIENT)); } @Test public void testSetCreaturesInPoolSetsCorrectList() { assertThat(pool.getCreatures(), is(testCreatures)); } @Test public void testSetCreaturesInPoolSetsListOfCreatures() { assertThat(pool.getCreatures(), is(instanceOf(List.class))); } @Test public void testSetCreaturesInPoolIgnoresNull() { pool.setCreatures(null); assertThat(pool.getCreatures(), is(testCreatures)); } @Test public void testChangeNutrientCoefficientDecreaseToBelowLowerBound() { final double delta = 0.1; pool.setNutrientCoefficient(Pool.MINIMUM_NUTRIENT_COEFFICIENT + delta); pool.changeNutrientCoefficient(-delta * 2); assertThat(pool.getNutrientCoefficient(), is(Pool.MINIMUM_NUTRIENT_COEFFICIENT)); } @Test public void testChangeNutrientCoefficientDecreaseToLowerBound() { final double delta = 0.1; pool.setNutrientCoefficient(Pool.MINIMUM_NUTRIENT_COEFFICIENT + delta); pool.changeNutrientCoefficient(-delta); assertThat(pool.getNutrientCoefficient(), is(Pool.MINIMUM_NUTRIENT_COEFFICIENT)); } @Test public void testChangeNutrientCoefficientValidDecrease() { final double delta = 0.1; double initial = delta * 2; double result = initial - delta; pool.setNutrientCoefficient(Pool.MINIMUM_NUTRIENT_COEFFICIENT + initial); pool.changeNutrientCoefficient(-delta); assertThat(pool.getNutrientCoefficient(), is(result)); } @Test public void testChangeNutrientCoefficientNoChange() { final double delta = 0.1; double initial = delta * 2; pool.setNutrientCoefficient(Pool.MINIMUM_NUTRIENT_COEFFICIENT + initial); pool.changeNutrientCoefficient(0.0); assertThat(pool.getNutrientCoefficient(), is(initial)); } @Test public void testChangeNutrientCoefficientValidIncrease() { final double delta = 0.1; double initial = delta * 2; double result = initial + delta; pool.setNutrientCoefficient(Pool.MINIMUM_NUTRIENT_COEFFICIENT + initial); pool.changeNutrientCoefficient(delta); assertThat(pool.getNutrientCoefficient(), is(result)); } @Test public void testChangeNutrientCoefficientIncreaseToUpperBound() { final double delta = 0.5; double result = delta + delta; pool.setNutrientCoefficient(delta); pool.changeNutrientCoefficient(delta); assertThat(pool.getNutrientCoefficient(), is(result)); } @Test public void testChangeNutrientCoefficientIncreaseToAboveUpperBound() { final double delta = 0.5; double result = Pool.MAXIMUM_NUTRIENT_COEFFICIENT; pool.setNutrientCoefficient(delta); pool.changeNutrientCoefficient(delta); assertThat(pool.getNutrientCoefficient(), is(result)); } @Test public void testChangeTemperatureToBelowLowerBound() { double delta = -1; pool.setTemperature(Pool.MINIMUM_WATER_TEMP_CELSIUS); pool.changeTemperature(delta); assertThat(pool.getTemperature(), is(Pool.MINIMUM_WATER_TEMP_CELSIUS)); } @Test public void testChangeTemperatureToLowerBound() { double delta = -1; pool.setTemperature(Pool.MINIMUM_WATER_TEMP_CELSIUS + 1); pool.changeTemperature(delta); assertThat(pool.getTemperature(), is(Pool.MINIMUM_WATER_TEMP_CELSIUS)); } @Test public void testChangeTemperatureValidDecrease() { double delta = -1; pool.setTemperature(Pool.MINIMUM_WATER_TEMP_CELSIUS + 2); pool.changeTemperature(delta); assertThat(pool.getTemperature(), is(Pool.MINIMUM_WATER_TEMP_CELSIUS + 1)); } @Test public void testChangeTemperatureNoChange() { double delta = 0; pool.setTemperature(Pool.MINIMUM_WATER_TEMP_CELSIUS + 1); pool.changeTemperature(delta); assertThat(pool.getTemperature(), is(Pool.MINIMUM_WATER_TEMP_CELSIUS + 1)); } @Test public void testChangeTemperatureValidIncrease() { double delta = 1; pool.setTemperature(Pool.MAXIMUM_WATER_TEMP_CELSIUS - 2); pool.changeTemperature(delta); assertThat(pool.getTemperature(), is(Pool.MAXIMUM_WATER_TEMP_CELSIUS - 1)); } @Test public void testChangeTemperatureToUpperBound() { double delta = 1; pool.setTemperature(Pool.MAXIMUM_WATER_TEMP_CELSIUS - 1); pool.changeTemperature(delta); assertThat(pool.getTemperature(), is(Pool.MAXIMUM_WATER_TEMP_CELSIUS)); } @Test public void testChangeTemperatureToAboveUpperBound() { double delta = 2; pool.setTemperature(Pool.MAXIMUM_WATER_TEMP_CELSIUS); pool.changeTemperature(delta); assertThat(pool.getTemperature(), is(Pool.MAXIMUM_WATER_TEMP_CELSIUS)); } @Test public void testAddCreatureEmptyList() { List<Creature> emptyList = new ArrayList<>(); pool.setCreatures(emptyList); Creature creature = new Guppy(); pool.addCreature(creature); assertThat(pool.getCreatures().get(0), is(creature)); } @Test public void testAddCreatureNotEmptyList() { List<Creature> emptyList = new ArrayList<>(); pool.setCreatures(emptyList); pool.addCreature(new Guppy()); Creature creature = new Guppy(); pool.addCreature(creature); assertThat(pool.getCreatures().get(1), is(creature)); } @Test public void testAddCreatureValidCreatureSucceeds() { assertTrue(pool.addCreature(new Guppy())); } @Test public void testAddCreatureNullReturnsFalse() { assertFalse(pool.addCreature(null)); } @Test public void testAddCreaturesEmptyList() { List<Creature> emptyList = new ArrayList<>(); pool.setCreatures(emptyList); pool.addCreatures(testCreatures); Creature result = pool.getCreatures().get(pool.getCreatures().size() - 1); Creature expected = testCreatures.get(testCreatures.size() - 1); assertThat(result, is(expected)); } @Test public void testAddCreaturesNotEmptyList() { List<Creature> emptyList = new ArrayList<>(); pool.setCreatures(emptyList); pool.addCreature(new Guppy()); pool.addCreatures(testCreatures); Creature result = pool.getCreatures().get(pool.getCreatures().size() - 1); Creature expected = testCreatures.get(testCreatures.size() - 1); assertThat(result, is(expected)); } public void testAddCreaturesNullReturnsFalse() { assertFalse(pool.addCreatures(null)); } @Test public void testAddCreaturesValidSetSucceeds() { assertTrue(pool.addCreatures(testCreatures)); } @Test public void testGetPopulation() { assertThat(pool.getPopulation(), is(testCreatures.size())); } @Test public void testGetPopulationZero() { pool.setCreatures(new ArrayList<>()); assertThat(pool.getPopulation(), is(0)); } @Test public void testApplyNutrientCoefficient() { double expectedDead = Math.round( (1 - pool.getNutrientCoefficient()) * pool.getPopulation()); final double tolerance = pool.getPopulation() * 0.1; assertThat((double) pool.applyNutrientCoefficient(), is(closeTo(expectedDead, tolerance))); } @Test public void testRemoveDeadCreaturesNoDead() { int removed = pool.removeDeadCreatures(); assertThat(removed, is(0)); } @Test public void testRemoveDeadCreaturesSomeDead() { int countDead = 0; for (Creature creature : testCreatures) { if (GENERATOR.nextBoolean()) { creature.getHealth().setAlive(false); countDead++; } } int removed = pool.removeDeadCreatures(); assertThat(removed, is(countDead)); } @Test public void testRemoveDeadCreaturesAllDead() { int countDead = 0; for (Creature creature : testCreatures) { creature.getHealth().setAlive(false); countDead++; } int removed = pool.removeDeadCreatures(); assertThat(removed, is(countDead)); } @Test public void testGetCreatureVolumeRequirementInLitres() { final double mLPerL = 1000.0; double volume = 0.0; for (Creature creature : testCreatures) { volume += creature.getVolumeNeeded() / mLPerL; } assertThat(pool.getCreatureVolumeRequirementInLitres(), is(volume)); } @Test public void testGetCreatureVolumeRequirementInLitresExcludeDeadCreatures() { final double mLPerL = 1000.0; double volume = 0.0; for (Creature creature : testCreatures) { if (GENERATOR.nextBoolean()) { volume += creature.getVolumeNeeded() / mLPerL; } else { creature.getHealth().setAlive(false); } } assertThat(pool.getCreatureVolumeRequirementInLitres(), is(volume)); } @Test public void testGetAverageAgeInWeeks() { int totalAge = 0; int count = 0; for (Creature creature : testCreatures) { creature.getHealth().setAge(GENERATOR.nextInt(Guppy.MAXIMUM_AGE)); totalAge += creature.getHealth().getAge(); count++; } double average = count > 0 ? (double) totalAge / count : 0.0; assertThat(pool.getAverageAgeInWeeks(), is(average)); } @Test public void testGetAverageAgeInWeeksExcludeDeadCreatures() { int totalAge = 0; int count = 0; for (Creature creature : testCreatures) { if (GENERATOR.nextBoolean()) { creature.getHealth().setAge(GENERATOR.nextInt(Guppy.MAXIMUM_AGE)); totalAge += creature.getHealth().getAge(); count++; } else { creature.getHealth().setAlive(false); } } double average = count > 0 ? (double) totalAge / count : 0.0; assertThat(pool.getAverageAgeInWeeks(), is(average)); } @Test public void testGetAverageHealthCoefficient() { double totalHealthCoefficient = 0.0; int count = 0; for (Creature creature : testCreatures) { creature.getHealth().setCoefficient(GENERATOR.nextDouble()); totalHealthCoefficient += creature.getHealth().getCoefficient(); count++; } double average = count > 0 ? totalHealthCoefficient / count : 0.0; assertThat(pool.getAverageHealthCoefficient(), is(average)); } @Test public void testGetAverageHealthCoefficientExcludeDeadCreatures() { double totalHealthCoefficient = 0.0; int count = 0; for (Creature creature : testCreatures) { if (GENERATOR.nextBoolean()) { creature.getHealth().setCoefficient(GENERATOR.nextDouble()); totalHealthCoefficient += creature.getHealth().getCoefficient(); count++; } else { creature.getHealth().setAlive(false); } } double average = count > 0 ? totalHealthCoefficient / count : 0.0; assertThat(pool.getAverageHealthCoefficient(), is(average)); } @Test public void testGetFemalePercentage() { int countFemale = 0; int countAll = 0; for (Creature creature : testCreatures) { creature.setFemale(GENERATOR.nextBoolean()); countFemale += creature.isFemale() ? 1 : 0; countAll++; } double average = countAll > 0 ? (double) countFemale / countAll : 0.0; assertThat(pool.getFemaleProportion(), is(average)); } @Test public void testGetFemalePercentageExcludeDeadCreatures() { int countFemale = 0; int countAll = 0; for (Creature creature : testCreatures) { if (GENERATOR.nextBoolean()) { creature.setFemale(GENERATOR.nextBoolean()); countFemale += creature.isFemale() ? 1 : 0; countAll++; } else { creature.getHealth().setAlive(false); } } double average = countAll > 0 ? (double) countFemale / countAll : 0.0; assertThat(pool.getFemaleProportion(), is(average)); } @Test public void testGetMedianAgeEvenAmount() { for (Creature creature : testCreatures) { creature.getHealth().setAge(GENERATOR.nextInt(Guppy.MAXIMUM_AGE - 1)); } List<Integer> ages = pool.sortLivingCreatureAges(); if (ages.size() % 2 != 0) { ages.add(50); pool.addCreature(new Guppy(50, 0, true, 0)); } double medianAge = (ages.get(ages.size() / 2 - 1) + ages.get(ages.size() / 2)) / 2.0; assertThat(pool.getMedianAge(), is(medianAge)); } @Test public void testGetMedianAgeOddAmount() { for (Creature creature : testCreatures) { creature.getHealth().setAge(GENERATOR.nextInt(Guppy.MAXIMUM_AGE - 1)); } List<Integer> ages = pool.sortLivingCreatureAges(); if (ages.size() % 2 == 0) { ages.add(50); pool.addCreature(new Guppy(49, 0, true, 0)); } double medianAge = ages.get(ages.size() / 2 - 1); assertThat(pool.getMedianAge(), is(medianAge)); } @Test public void testGetMedianAgeExcludeDeadCreatures() { for (Creature creature : testCreatures) { if (GENERATOR.nextBoolean()) { creature.getHealth().setAge(GENERATOR.nextInt(Guppy.MAXIMUM_AGE - 1)); } else { creature.getHealth().setAlive(false); } } List<Integer> ages = pool.sortLivingCreatureAges(); double medianAge; if (ages.size() % 2 == 0) { medianAge = (ages.get(ages.size() / 2 - 1) + ages.get(ages.size() / 2)) / 2.0; } else if (ages.size() == 1) { medianAge = ages.get(0); } else { medianAge = ages.get(ages.size() / 2 - 1); } assertThat(pool.getMedianAge(), is(medianAge)); } @Test public void testAdjustForCrowdingRemovesEnough() { pool.setVolumeLitres(1.4); pool.adjustForCrowding(); assertThat(pool.getVolumeLitres(), is(greaterThanOrEqualTo(pool.getCreatureVolumeRequirementInLitres()))); } @Test public void testAdjustForCrowdingRemovesJustEnough() { pool.setVolumeLitres(1.5); for (Creature creature : pool.getCreatures()) { creature.getHealth().setAge(0); } pool.adjustForCrowding(); assertThat(pool.getVolumeLitres(), is(equalTo(pool.getCreatureVolumeRequirementInLitres()))); } @Test public void testAdjustForCrowdingRemovesCreatures() { pool.setVolumeLitres(1.5); int initial = pool.getPopulation(); int removedCreatures = pool.adjustForCrowding().size(); assertThat(removedCreatures, is(equalTo(initial - pool.getPopulation()))); } @Test public void testAdjustForCrowdingReturnsRemovedCreatures() { pool.setVolumeLitres(1.5); List<Creature> removedCreatures = pool.adjustForCrowding(); assertThat(removedCreatures, is(instanceOf(List.class))); } @Test public void testPrintDetails() { // if toString works, this should work // how do you monitor the output to the console/isolate to only this fail("Not yet implemented"); // TODO } @Test public void testToString() { /* * assertThat(pool, is("[name=" + pool.getName() + ",volumeLitres=" + pool.getVolumeLitres() * + ",temperatureCelsius=" + pool.getTemperature() + ",pH=" + pool.getpH() + * ",nutrientCoefficient=" + pool.getNutrientCoefficient() + ",identificationNumber=" + * pool.getIdentifier() + ",creaturesInPool=" + pool.getCreatures() + * ",randomNumberGenerator=" + pool.getran() + "]";)) */ // no getter for random number generator, cannot test from outside pool // class fail("Not yet implemented"); // TODO } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.spi.security.authorization.principalbased.impl; import com.google.common.collect.ImmutableMap; import org.apache.jackrabbit.api.security.JackrabbitAccessControlManager; import org.apache.jackrabbit.api.security.principal.ItemBasedPrincipal; import org.apache.jackrabbit.oak.api.PropertyState; import org.apache.jackrabbit.oak.api.Tree; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.namepath.NamePathMapper; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.plugins.tree.TreeLocation; import org.apache.jackrabbit.oak.plugins.tree.TreeType; import org.apache.jackrabbit.oak.spi.security.authorization.permission.Permissions; import org.apache.jackrabbit.oak.spi.security.authorization.permission.TreePermission; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBits; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeBitsProvider; import org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants; import org.junit.Before; import org.junit.Test; import javax.jcr.PropertyType; import javax.jcr.Value; import java.security.Principal; import java.util.Map; import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE; import static org.apache.jackrabbit.JcrConstants.NT_FOLDER; import static org.apache.jackrabbit.oak.spi.security.authorization.accesscontrol.AccessControlConstants.REP_NT_NAMES; import static org.apache.jackrabbit.oak.spi.security.authorization.principalbased.impl.Constants.REP_PRINCIPAL_POLICY; import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.JCR_READ; import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.JCR_REMOVE_CHILD_NODES; import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.JCR_REMOVE_NODE; import static org.apache.jackrabbit.oak.spi.security.privilege.PrivilegeConstants.JCR_VERSION_MANAGEMENT; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; public class PrincipalBasedPermissionProviderTest extends AbstractPrincipalBasedTest { private Principal testPrincipal; private PrincipalBasedPermissionProvider permissionProvider; private String contentPath; private String childPath; @Before public void before() throws Exception { super.before(); contentPath = PathUtils.getAncestorPath(TEST_OAK_PATH, 3); childPath = PathUtils.getAncestorPath(TEST_OAK_PATH, 2); testPrincipal = getTestSystemUser().getPrincipal(); setupContentTrees(TEST_OAK_PATH); setupContentTrees(NT_FOLDER, childPath + "/folder", TEST_OAK_PATH + "/folder"); // setup permissions on childPath + TEST_OAK_PATH PrincipalPolicyImpl policy = setupPrincipalBasedAccessControl(testPrincipal, getNamePathMapper().getJcrPath(childPath), JCR_READ, JCR_REMOVE_CHILD_NODES); addPrincipalBasedEntry(policy, getNamePathMapper().getJcrPath(TEST_OAK_PATH), JCR_VERSION_MANAGEMENT); // add an entry with nt-name restriction at childPath JackrabbitAccessControlManager jacm = getAccessControlManager(root); policy = getPrincipalPolicyImpl(testPrincipal, jacm); Map<String, Value[]> restrictions = ImmutableMap.of(REP_NT_NAMES, new Value[] {getValueFactory(root).createValue(NT_FOLDER, PropertyType.NAME)}); policy.addEntry(childPath, privilegesFromNames(JCR_REMOVE_NODE), ImmutableMap.of(), restrictions); jacm.setPolicy(policy.getPath(), policy); root.commit(); permissionProvider = createPermissionProvider(root, getTestSystemUser().getPrincipal()); } @Override protected NamePathMapper getNamePathMapper() { return NamePathMapper.DEFAULT; } @Test public void testSupportedPrivileges() { for (PrivilegeBits bits : PrivilegeBits.BUILT_IN.values()) { assertEquals(bits, permissionProvider.supportedPrivileges(null, bits)); assertEquals(bits, permissionProvider.supportedPrivileges(mock(Tree.class), bits)); } } @Test public void testSupportedPrivilegesAllBits() { PrivilegeBits all = new PrivilegeBitsProvider(root).getBits(PrivilegeConstants.JCR_ALL); assertEquals(all, permissionProvider.supportedPrivileges(null, all)); assertEquals(all, permissionProvider.supportedPrivileges(mock(Tree.class), all)); } @Test public void testSupportedPrivilegesNullBits() { PrivilegeBits all = new PrivilegeBitsProvider(root).getBits(PrivilegeConstants.JCR_ALL); assertEquals(all, permissionProvider.supportedPrivileges(null, null)); assertEquals(all, permissionProvider.supportedPrivileges(mock(Tree.class), null)); } @Test public void testSupportedPermissions() { Tree tree = mock(Tree.class); PropertyState property = mock(PropertyState.class); for (long permission : Permissions.aggregates(Permissions.ALL)) { assertEquals(permission, permissionProvider.supportedPermissions(tree, property, permission)); assertEquals(permission, permissionProvider.supportedPermissions(tree, null, permission)); } assertEquals(Permissions.ALL, permissionProvider.supportedPermissions(tree, property, Permissions.ALL)); assertEquals(Permissions.ALL, permissionProvider.supportedPermissions(tree, null, Permissions.ALL)); } @Test public void testSupportedPermissionsTreeLocation() { TreeLocation location = mock(TreeLocation.class); for (long permission : Permissions.aggregates(Permissions.ALL)) { assertEquals(permission, permissionProvider.supportedPermissions(location, permission)); } assertEquals(Permissions.ALL, permissionProvider.supportedPermissions(location, Permissions.ALL)); } @Test public void testSupportedPermissionsTreePermission() { TreePermission tp = mock(TreePermission.class); PropertyState property = mock(PropertyState.class); for (long permission : Permissions.aggregates(Permissions.ALL)) { assertEquals(permission, permissionProvider.supportedPermissions(tp, property, permission)); assertEquals(permission, permissionProvider.supportedPermissions(tp, null, permission)); } assertEquals(Permissions.ALL, permissionProvider.supportedPermissions(tp, property, Permissions.ALL)); assertEquals(Permissions.ALL, permissionProvider.supportedPermissions(tp, null, Permissions.ALL)); } @Test public void testHasPrivileges() { assertTrue(permissionProvider.hasPrivileges(root.getTree(childPath), PrivilegeConstants.JCR_READ)); assertFalse(permissionProvider.hasPrivileges(root.getTree(childPath), PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_REMOVE_NODE)); assertTrue(permissionProvider.hasPrivileges(root.getTree(childPath + "/folder"), PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_REMOVE_NODE)); assertFalse(permissionProvider.hasPrivileges(root.getTree(childPath), PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_VERSION_MANAGEMENT)); assertTrue(permissionProvider.hasPrivileges(root.getTree(TEST_OAK_PATH), PrivilegeConstants.JCR_READ, PrivilegeConstants.JCR_VERSION_MANAGEMENT)); } @Test public void testGetTreePermission() { Tree tree = root.getTree(PathUtils.ROOT_PATH); TreePermission tp = permissionProvider.getTreePermission(tree, TreePermission.EMPTY); for (String elem : PathUtils.elements(TEST_OAK_PATH)) { tree = tree.getChild(elem); tp = permissionProvider.getTreePermission(tree, tp); assertTrue(tp instanceof AbstractTreePermission); assertSame(TreeType.DEFAULT, ((AbstractTreePermission) tp).getType()); } } @Test public void testIsGranted() { Tree t = root.getTree(childPath); assertTrue(permissionProvider.isGranted(t, null, Permissions.READ_NODE)); assertTrue(permissionProvider.isGranted(t, t.getProperty(JCR_PRIMARYTYPE), Permissions.READ_PROPERTY)); assertFalse(permissionProvider.isGranted(t, null, Permissions.READ_NODE|Permissions.VERSION_MANAGEMENT)); t = root.getTree(TEST_OAK_PATH); assertTrue(permissionProvider.isGranted(t, null, Permissions.READ_NODE|Permissions.VERSION_MANAGEMENT)); assertTrue(permissionProvider.isGranted(t, t.getProperty(JCR_PRIMARYTYPE), Permissions.READ_PROPERTY|Permissions.VERSION_MANAGEMENT)); } @Test public void testIsGrantedNonExistingTree() { Tree nonExisting = root.getTree(TEST_OAK_PATH).getChild("nonExisting"); assertTrue(permissionProvider.isGranted(nonExisting, null, Permissions.READ)); assertTrue(permissionProvider.isGranted(nonExisting, PropertyStates.createProperty("propName", "value"), Permissions.READ)); } @Test public void testIsGrantedWithRestriction() { assertFalse(permissionProvider.isGranted(root.getTree(TEST_OAK_PATH), null, Permissions.REMOVE_NODE)); assertFalse(permissionProvider.isGranted(root.getTree(childPath), null, Permissions.REMOVE_NODE)); assertTrue(permissionProvider.isGranted(root.getTree(TEST_OAK_PATH + "/folder"), null, Permissions.REMOVE_NODE)); assertTrue(permissionProvider.isGranted(root.getTree(childPath + "/folder"), null, Permissions.REMOVE_NODE)); } @Test public void testIsGrantedTreeLocation() { TreeLocation tl = TreeLocation.create(root, TEST_OAK_PATH); assertFalse(permissionProvider.isGranted(tl, Permissions.READ|Permissions.REMOVE_NODE)); } @Test public void testIsGrantedNonExistingTreeLocation() { TreeLocation tl = TreeLocation.create(root, childPath + "/nonExisting"); assertTrue(permissionProvider.isGranted(tl, Permissions.READ)); assertFalse(permissionProvider.isGranted(tl, Permissions.REMOVE_NODE)); } @Test public void testIsGrantedNonExistingParentTreeLocation() { TreeLocation tl = TreeLocation.create(root, childPath + "/nonExistingParent/nonExisting"); assertTrue(permissionProvider.isGranted(tl, Permissions.READ)); assertFalse(permissionProvider.isGranted(tl, Permissions.REMOVE_NODE)); } @Test public void testIsGrantedAccessControlTreeLocation() throws Exception{ TreeLocation tl = TreeLocation.create(root, PathUtils.concat(getTestSystemUser().getPath(), REP_PRINCIPAL_POLICY)); assertFalse(permissionProvider.isGranted(tl, Permissions.READ)); } }
/* * Copyright 2014 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.twitter.hpack; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Set; import com.google.gson.FieldNamingPolicy; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonDeserializationContext; import com.google.gson.JsonDeserializer; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParseException; final class TestCase { private static final Gson GSON = new GsonBuilder() .setFieldNamingPolicy(FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES) .registerTypeAdapter(HeaderField.class, new HeaderFieldDeserializer()) .create(); int maxHeaderTableSize = -1; boolean useIndexing = true; boolean sensitiveHeaders; boolean forceHuffmanOn; boolean forceHuffmanOff; List<HeaderBlock> headerBlocks; private TestCase() {} static TestCase load(InputStream is) throws IOException { InputStreamReader r = new InputStreamReader(is); TestCase testCase = GSON.fromJson(r, TestCase.class); for (HeaderBlock headerBlock : testCase.headerBlocks) { headerBlock.encodedBytes = Hex.decodeHex(headerBlock.getEncodedStr().toCharArray()); } return testCase; } void testCompress() throws Exception { Encoder encoder = createEncoder(); for (HeaderBlock headerBlock : headerBlocks) { byte[] actual = encode(encoder, headerBlock.getHeaders(), headerBlock.getMaxHeaderTableSize(), sensitiveHeaders); if (!Arrays.equals(actual, headerBlock.encodedBytes)) { throw new AssertionError( "\nEXPECTED:\n" + headerBlock.getEncodedStr() + "\nACTUAL:\n" + Hex.encodeHexString(actual)); } List<HeaderField> actualDynamicTable = new ArrayList<HeaderField>(); for (int index = 0; index < encoder.length(); index++) { actualDynamicTable.add(encoder.getHeaderField(index)); } List<HeaderField> expectedDynamicTable = headerBlock.getDynamicTable(); if (!expectedDynamicTable.equals(actualDynamicTable)) { throw new AssertionError( "\nEXPECTED DYNAMIC TABLE:\n" + expectedDynamicTable + "\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable); } if (headerBlock.getTableSize() != encoder.size()) { throw new AssertionError( "\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() + "\n ACTUAL TABLE SIZE : " + encoder.size()); } } } void testDecompress() throws Exception { Decoder decoder = createDecoder(); for (HeaderBlock headerBlock : headerBlocks) { List<HeaderField> actualHeaders = decode(decoder, headerBlock.encodedBytes); List<HeaderField> expectedHeaders = new ArrayList<HeaderField>(); for (HeaderField h : headerBlock.getHeaders()) { expectedHeaders.add(new HeaderField(h.name, h.value)); } if (!expectedHeaders.equals(actualHeaders)) { throw new AssertionError( "\nEXPECTED:\n" + expectedHeaders + "\nACTUAL:\n" + actualHeaders); } List<HeaderField> actualDynamicTable = new ArrayList<HeaderField>(); for (int index = 0; index < decoder.length(); index++) { actualDynamicTable.add(decoder.getHeaderField(index)); } List<HeaderField> expectedDynamicTable = headerBlock.getDynamicTable(); if (!expectedDynamicTable.equals(actualDynamicTable)) { throw new AssertionError( "\nEXPECTED DYNAMIC TABLE:\n" + expectedDynamicTable + "\nACTUAL DYNAMIC TABLE:\n" + actualDynamicTable); } if (headerBlock.getTableSize() != decoder.size()) { throw new AssertionError( "\nEXPECTED TABLE SIZE: " + headerBlock.getTableSize() + "\n ACTUAL TABLE SIZE : " + decoder.size()); } } } private Encoder createEncoder() { int maxHeaderTableSize = this.maxHeaderTableSize; if (maxHeaderTableSize == -1) { maxHeaderTableSize = Integer.MAX_VALUE; } return new Encoder(maxHeaderTableSize, useIndexing, forceHuffmanOn, forceHuffmanOff); } private Decoder createDecoder() { int maxHeaderTableSize = this.maxHeaderTableSize; if (maxHeaderTableSize == -1) { maxHeaderTableSize = Integer.MAX_VALUE; } return new Decoder(8192, maxHeaderTableSize); } private static byte[] encode(Encoder encoder, List<HeaderField> headers, int maxHeaderTableSize, boolean sensitive) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (maxHeaderTableSize != -1) { encoder.setMaxHeaderTableSize(baos, maxHeaderTableSize); } for (HeaderField e: headers) { encoder.encodeHeader(baos, e.name, e.value, sensitive); } return baos.toByteArray(); } private static List<HeaderField> decode(Decoder decoder, byte[] expected) throws IOException { List<HeaderField> headers = new ArrayList<HeaderField>(); TestHeaderListener listener = new TestHeaderListener(headers); decoder.decode(new ByteArrayInputStream(expected), listener); decoder.endHeaderBlock(); return headers; } private static String concat(List<String> l) { StringBuilder ret = new StringBuilder(); for (String s : l) { ret.append(s); } return ret.toString(); } static class HeaderBlock { private int maxHeaderTableSize = -1; private byte[] encodedBytes; private List<String> encoded; private List<HeaderField> headers; private List<HeaderField> dynamicTable; private int tableSize; private int getMaxHeaderTableSize() { return maxHeaderTableSize; } public String getEncodedStr() { return concat(encoded).replaceAll(" ", ""); } public List<HeaderField> getHeaders() { return headers; } public List<HeaderField> getDynamicTable() { return dynamicTable; } public int getTableSize() { return tableSize; } } static class HeaderFieldDeserializer implements JsonDeserializer<HeaderField> { @Override public HeaderField deserialize(JsonElement json, Type typeOfT, JsonDeserializationContext context) throws JsonParseException { JsonObject jsonObject = json.getAsJsonObject(); Set<Map.Entry<String, JsonElement>> entrySet = jsonObject.entrySet(); if (entrySet.size() != 1) { throw new JsonParseException("JSON Object has multiple entries: " + entrySet); } Map.Entry<String, JsonElement> entry = entrySet.iterator().next(); String name = entry.getKey(); String value = entry.getValue().getAsString(); return new HeaderField(name, value); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.web.dao.impl; import org.apache.nifi.bundle.BundleCoordinate; import org.apache.nifi.components.state.Scope; import org.apache.nifi.components.state.StateMap; import org.apache.nifi.controller.ConfiguredComponent; import org.apache.nifi.controller.FlowController; import org.apache.nifi.controller.ScheduledState; import org.apache.nifi.controller.exception.ControllerServiceInstantiationException; import org.apache.nifi.controller.exception.ValidationException; import org.apache.nifi.controller.service.ControllerServiceNode; import org.apache.nifi.controller.service.ControllerServiceProvider; import org.apache.nifi.controller.service.ControllerServiceState; import org.apache.nifi.groups.ProcessGroup; import org.apache.nifi.util.BundleUtils; import org.apache.nifi.web.NiFiCoreException; import org.apache.nifi.web.ResourceNotFoundException; import org.apache.nifi.web.api.dto.BundleDTO; import org.apache.nifi.web.api.dto.ControllerServiceDTO; import org.apache.nifi.web.dao.ComponentStateDAO; import org.apache.nifi.web.dao.ControllerServiceDAO; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Map; import java.util.Set; import static org.apache.nifi.controller.FlowController.ROOT_GROUP_ID_ALIAS; public class StandardControllerServiceDAO extends ComponentDAO implements ControllerServiceDAO { private ControllerServiceProvider serviceProvider; private ComponentStateDAO componentStateDAO; private FlowController flowController; private ControllerServiceNode locateControllerService(final String controllerServiceId) { // get the controller service final ControllerServiceNode controllerService = serviceProvider.getControllerServiceNode(controllerServiceId); // ensure the controller service exists if (controllerService == null) { throw new ResourceNotFoundException(String.format("Unable to locate controller service with id '%s'.", controllerServiceId)); } return controllerService; } @Override public void verifyCreate(final ControllerServiceDTO controllerServiceDTO) { verifyCreate(controllerServiceDTO.getType(), controllerServiceDTO.getBundle()); } @Override public ControllerServiceNode createControllerService(final ControllerServiceDTO controllerServiceDTO) { // ensure the type is specified if (controllerServiceDTO.getType() == null) { throw new IllegalArgumentException("The controller service type must be specified."); } try { // create the controller service final ControllerServiceNode controllerService = serviceProvider.createControllerService( controllerServiceDTO.getType(), controllerServiceDTO.getId(), BundleUtils.getBundle(controllerServiceDTO.getType(), controllerServiceDTO.getBundle()), true); // ensure we can perform the update verifyUpdate(controllerService, controllerServiceDTO); // perform the update configureControllerService(controllerService, controllerServiceDTO); final String groupId = controllerServiceDTO.getParentGroupId(); if (groupId == null) { flowController.addRootControllerService(controllerService); } else { final ProcessGroup group; if (groupId.equals(ROOT_GROUP_ID_ALIAS)) { group = flowController.getGroup(flowController.getRootGroupId()); } else { group = flowController.getGroup(flowController.getRootGroupId()).findProcessGroup(groupId); } if (group == null) { throw new ResourceNotFoundException(String.format("Unable to locate group with id '%s'.", groupId)); } group.addControllerService(controllerService); } return controllerService; } catch (final ControllerServiceInstantiationException csie) { throw new NiFiCoreException(csie.getMessage(), csie); } } @Override public ControllerServiceNode getControllerService(final String controllerServiceId) { return locateControllerService(controllerServiceId); } @Override public boolean hasControllerService(final String controllerServiceId) { return serviceProvider.getControllerServiceNode(controllerServiceId) != null; } @Override public Set<ControllerServiceNode> getControllerServices(final String groupId) { if (groupId == null) { return flowController.getRootControllerServices(); } else { final String searchId = groupId.equals(ROOT_GROUP_ID_ALIAS) ? flowController.getRootGroupId() : groupId; final ProcessGroup procGroup = flowController.getGroup(flowController.getRootGroupId()).findProcessGroup(searchId); if (procGroup == null) { throw new ResourceNotFoundException("Could not find Process Group with ID " + groupId); } return procGroup.getControllerServices(true); } } @Override public ControllerServiceNode updateControllerService(final ControllerServiceDTO controllerServiceDTO) { // get the controller service final ControllerServiceNode controllerService = locateControllerService(controllerServiceDTO.getId()); // ensure we can perform the update verifyUpdate(controllerService, controllerServiceDTO); // perform the update configureControllerService(controllerService, controllerServiceDTO); // attempt to change the underlying controller service if an updated bundle is specified updateBundle(controllerService, controllerServiceDTO); // enable or disable as appropriate if (isNotNull(controllerServiceDTO.getState())) { final ControllerServiceState purposedControllerServiceState = ControllerServiceState.valueOf(controllerServiceDTO.getState()); // only attempt an action if it is changing if (!purposedControllerServiceState.equals(controllerService.getState())) { if (ControllerServiceState.ENABLED.equals(purposedControllerServiceState)) { serviceProvider.enableControllerService(controllerService); } else if (ControllerServiceState.DISABLED.equals(purposedControllerServiceState)) { serviceProvider.disableControllerService(controllerService); } } } return controllerService; } private void updateBundle(final ControllerServiceNode controllerService, final ControllerServiceDTO controllerServiceDTO) { BundleDTO bundleDTO = controllerServiceDTO.getBundle(); if (bundleDTO != null) { final BundleCoordinate incomingCoordinate = BundleUtils.getBundle(controllerService.getCanonicalClassName(), bundleDTO); try { flowController.changeControllerServiceType(controllerService, controllerService.getCanonicalClassName(), incomingCoordinate); } catch (ControllerServiceInstantiationException e) { throw new NiFiCoreException(String.format("Unable to update controller service %s from %s to %s due to: %s", controllerServiceDTO.getId(), controllerService.getBundleCoordinate().getCoordinate(), incomingCoordinate.getCoordinate(), e.getMessage()), e); } } } @Override public Set<ConfiguredComponent> updateControllerServiceReferencingComponents( final String controllerServiceId, final ScheduledState scheduledState, final ControllerServiceState controllerServiceState) { // get the controller service final ControllerServiceNode controllerService = locateControllerService(controllerServiceId); // this request is either acting upon referencing services or schedulable components if (controllerServiceState != null) { if (ControllerServiceState.ENABLED.equals(controllerServiceState)) { return serviceProvider.enableReferencingServices(controllerService); } else { return serviceProvider.disableReferencingServices(controllerService); } } else if (scheduledState != null) { if (ScheduledState.RUNNING.equals(scheduledState)) { return serviceProvider.scheduleReferencingComponents(controllerService); } else { return serviceProvider.unscheduleReferencingComponents(controllerService); } } return Collections.emptySet(); } private List<String> validateProposedConfiguration(final ControllerServiceNode controllerService, final ControllerServiceDTO controllerServiceDTO) { final List<String> validationErrors = new ArrayList<>(); return validationErrors; } @Override public void verifyDelete(final String controllerServiceId) { final ControllerServiceNode controllerService = locateControllerService(controllerServiceId); controllerService.verifyCanDelete(); } @Override public void verifyUpdate(final ControllerServiceDTO controllerServiceDTO) { final ControllerServiceNode controllerService = locateControllerService(controllerServiceDTO.getId()); verifyUpdate(controllerService, controllerServiceDTO); } @Override public void verifyUpdateReferencingComponents(final String controllerServiceId, final ScheduledState scheduledState, final ControllerServiceState controllerServiceState) { final ControllerServiceNode controllerService = locateControllerService(controllerServiceId); if (controllerServiceState != null) { if (ControllerServiceState.ENABLED.equals(controllerServiceState)) { serviceProvider.verifyCanEnableReferencingServices(controllerService); } else { serviceProvider.verifyCanDisableReferencingServices(controllerService); } } else if (scheduledState != null) { if (ScheduledState.RUNNING.equals(scheduledState)) { serviceProvider.verifyCanScheduleReferencingComponents(controllerService); } else { serviceProvider.verifyCanStopReferencingComponents(controllerService); } } } private void verifyUpdate(final ControllerServiceNode controllerService, final ControllerServiceDTO controllerServiceDTO) { // validate the new controller service state if appropriate if (isNotNull(controllerServiceDTO.getState())) { try { // attempt to parse the service state final ControllerServiceState purposedControllerServiceState = ControllerServiceState.valueOf(controllerServiceDTO.getState()); // ensure the state is valid if (ControllerServiceState.ENABLING.equals(purposedControllerServiceState) || ControllerServiceState.DISABLING.equals(purposedControllerServiceState)) { throw new IllegalArgumentException(); } // only attempt an action if it is changing if (!purposedControllerServiceState.equals(controllerService.getState())) { if (ControllerServiceState.ENABLED.equals(purposedControllerServiceState)) { controllerService.verifyCanEnable(); } else if (ControllerServiceState.DISABLED.equals(purposedControllerServiceState)) { controllerService.verifyCanDisable(); } } } catch (final IllegalArgumentException iae) { throw new IllegalArgumentException("Controller Service state: Value must be one of [ENABLED, DISABLED]"); } } boolean modificationRequest = false; if (isAnyNotNull(controllerServiceDTO.getName(), controllerServiceDTO.getAnnotationData(), controllerServiceDTO.getComments(), controllerServiceDTO.getProperties(), controllerServiceDTO.getBundle())) { modificationRequest = true; // validate the request final List<String> requestValidation = validateProposedConfiguration(controllerService, controllerServiceDTO); // ensure there was no validation errors if (!requestValidation.isEmpty()) { throw new ValidationException(requestValidation); } } final BundleDTO bundleDTO = controllerServiceDTO.getBundle(); if (bundleDTO != null) { // ensures all nodes in a cluster have the bundle, throws exception if bundle not found for the given type final BundleCoordinate bundleCoordinate = BundleUtils.getBundle(controllerService.getCanonicalClassName(), bundleDTO); // ensure we are only changing to a bundle with the same group and id, but different version controllerService.verifyCanUpdateBundle(bundleCoordinate); } if (modificationRequest) { controllerService.verifyCanUpdate(); } } private void configureControllerService(final ControllerServiceNode controllerService, final ControllerServiceDTO controllerServiceDTO) { final String name = controllerServiceDTO.getName(); final String annotationData = controllerServiceDTO.getAnnotationData(); final String comments = controllerServiceDTO.getComments(); final Map<String, String> properties = controllerServiceDTO.getProperties(); if (isNotNull(name)) { controllerService.setName(name); } if (isNotNull(annotationData)) { controllerService.setAnnotationData(annotationData); } if (isNotNull(comments)) { controllerService.setComments(comments); } if (isNotNull(properties)) { controllerService.setProperties(properties); } } @Override public void deleteControllerService(final String controllerServiceId) { final ControllerServiceNode controllerService = locateControllerService(controllerServiceId); serviceProvider.removeControllerService(controllerService); } @Override public StateMap getState(final String controllerServiceId, final Scope scope) { final ControllerServiceNode controllerService = locateControllerService(controllerServiceId); return componentStateDAO.getState(controllerService, scope); } @Override public void verifyClearState(final String controllerServiceId) { final ControllerServiceNode controllerService = locateControllerService(controllerServiceId); controllerService.verifyCanClearState(); } @Override public void clearState(final String controllerServiceId) { final ControllerServiceNode controllerService = locateControllerService(controllerServiceId); componentStateDAO.clearState(controllerService); } /* setters */ public void setServiceProvider(final ControllerServiceProvider serviceProvider) { this.serviceProvider = serviceProvider; } public void setComponentStateDAO(final ComponentStateDAO componentStateDAO) { this.componentStateDAO = componentStateDAO; } public void setFlowController(final FlowController flowController) { this.flowController = flowController; } }
/* * Copyright 2015, Yahoo Inc. * Copyrights licensed under the Apache 2.0 License. * See the accompanying LICENSE file for terms. */ package com.yahoo.squidb.data; import com.yahoo.squidb.reactive.ReactiveSquidDatabase; import com.yahoo.squidb.sql.Query; import com.yahoo.squidb.sql.Table; import com.yahoo.squidb.test.Employee; import com.yahoo.squidb.test.SquidTestCase; import com.yahoo.squidb.test.TestDatabase; import com.yahoo.squidb.test.TestModel; import com.yahoo.squidb.test.TestReactiveDatabase; import java.util.Arrays; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import rx.Observable; import rx.Subscription; import rx.functions.Action1; public class ReactiveSquidDatabaseTest extends SquidTestCase { private ReactiveSquidDatabase database; @Override protected void setUp() throws Exception { super.setUp(); setupDatabase(); } /** * Called during {@link #setUp()} to initialize the database. The base implementation creates a new * {@link TestDatabase}. Subclasses that want to insert test data should override and call super, then perform its * operations. */ protected void setupDatabase() { database = new TestReactiveDatabase(getContext()); database.clear(); } @Override protected void tearDown() throws Exception { super.tearDown(); tearDownDatabase(); } /** * Called during {@link #tearDown()} to clean up any databases. The base implementation tries to close the database * created in {@link #setupDatabase()}. */ protected void tearDownDatabase() { if (database != null) { database.close(); } } public void testObservableWithInitialSubscribeFlagEmitsOnFirstSubscribe() { final AtomicBoolean called = new AtomicBoolean(false); Observable<Table> observable = database.observeTable(TestModel.TABLE, true); observable.subscribe(new Action1<Table>() { @Override public void call(Table table) { called.set(true); } }); assertTrue(called.get()); } public void testSimpleObservableEmitsTable() { final AtomicBoolean tablesMatch = new AtomicBoolean(false); Observable<Table> observable = database.observeTable(TestModel.TABLE, true); observable.subscribe(new Action1<Table>() { @Override public void call(Table table) { tablesMatch.set(TestModel.TABLE.equals(table)); } }); assertTrue(tablesMatch.get()); } public void testObservableEmitsCustomObject() { final AtomicBoolean objectsMatch = new AtomicBoolean(false); final Query originalQuery = Query.select().from(TestModel.TABLE); Observable<Query> observable = database.observeTableAndEmit(TestModel.TABLE, originalQuery, true); observable.subscribe(new Action1<Query>() { @Override public void call(Query query) { objectsMatch.set(originalQuery == query); } }); assertTrue(objectsMatch.get()); } public void testObservableEmitsOncePerTransaction() { testMultipleStatements(true, true); } public void testObservableEmitsNothingAfterFailedTransaction() { testMultipleStatements(true, false); } public void testObservableCalledForEachChange() { testMultipleStatements(false, false); } private void testMultipleStatements(boolean useTransaction, boolean successfulTransaction) { final AtomicInteger callCount = new AtomicInteger(); Observable<Table> observable = database.observeTable(TestModel.TABLE); observable.subscribe(new Action1<Table>() { @Override public void call(Table table) { callCount.incrementAndGet(); } }); assertEquals(0, callCount.get()); if (useTransaction) { database.beginTransaction(); } try { database.persist(new TestModel().setFirstName("A").setLastName("B") .setBirthday(System.currentTimeMillis() - 2)); database.persist(new TestModel().setFirstName("C").setLastName("D") .setBirthday(System.currentTimeMillis() - 1)); if (useTransaction && successfulTransaction) { database.setTransactionSuccessful(); } } finally { if (useTransaction) { database.endTransaction(); } } int expectedCount; if (useTransaction) { expectedCount = successfulTransaction ? 1 : 0; } else { expectedCount = 2; } assertEquals(expectedCount, callCount.get()); } public void testObserveMultipleTables() { testObserveMultipleTables(true); testObserveMultipleTables(false); } private void testObserveMultipleTables(boolean useTransaction) { AtomicInteger callCount = new AtomicInteger(); Observable<AtomicInteger> observable = database.observeTablesAndEmit( Arrays.asList(TestModel.TABLE, Employee.TABLE), callCount); observable.subscribe(new Action1<AtomicInteger>() { @Override public void call(AtomicInteger callCount) { callCount.incrementAndGet(); } }); assertEquals(0, callCount.get()); if (useTransaction) { database.beginTransaction(); } try { database.persist( new TestModel().setFirstName("A").setLastName("B").setBirthday(System.currentTimeMillis())); database.persist(new Employee().setName("ABC").setIsHappy(true).setManagerId(0L)); if (useTransaction) { database.setTransactionSuccessful(); } } finally { if (useTransaction) { database.endTransaction(); } } assertEquals(useTransaction ? 1 : 2, callCount.get()); database.deleteAll(TestModel.class); } public void testObservableNotCalledForUnobservedTable() { AtomicInteger callCount = new AtomicInteger(); Observable<AtomicInteger> observable = database.observeTableAndEmit(TestModel.TABLE, callCount); observable.subscribe(new Action1<AtomicInteger>() { @Override public void call(AtomicInteger callCount) { callCount.incrementAndGet(); } }); assertEquals(0, callCount.get()); database.persist(new Employee().setName("ABC").setIsHappy(true).setManagerId(0L)); assertEquals(0, callCount.get()); } public void testUnsubscribeStopsNotifications() { AtomicInteger callCount = new AtomicInteger(); Observable<AtomicInteger> observable = database.observeTableAndEmit(Employee.TABLE, callCount); Subscription s = observable.subscribe(new Action1<AtomicInteger>() { @Override public void call(AtomicInteger callCount) { callCount.incrementAndGet(); } }); assertEquals(0, callCount.get()); database.persist(new Employee().setName("ABC").setIsHappy(true).setManagerId(0L)); assertEquals(1, callCount.get()); s.unsubscribe(); database.persist(new Employee().setName("DEF").setIsHappy(true).setManagerId(0L)); assertEquals(1, callCount.get()); } public void testSubscribeDuringTransaction() { AtomicInteger callCount = new AtomicInteger(); Observable<AtomicInteger> observable = database.observeTableAndEmit(Employee.TABLE, callCount); database.beginTransaction(); try { database.persist(new Employee().setName("ABC").setIsHappy(true).setManagerId(0L)); observable.subscribe(new Action1<AtomicInteger>() { @Override public void call(AtomicInteger atomicInteger) { atomicInteger.incrementAndGet(); } }); assertEquals(0, callCount.get()); database.persist(new Employee().setName("DEF").setIsHappy(true).setManagerId(0L)); assertEquals(0, callCount.get()); database.setTransactionSuccessful(); } finally { database.endTransaction(); } assertEquals(1, callCount.get()); } }
/** * Copyright 2011-2017 Asakusa Framework Team. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.asakusafw.compiler.flow.stage; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.io.NullWritable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.asakusafw.compiler.common.NameGenerator; import com.asakusafw.compiler.common.Naming; import com.asakusafw.compiler.common.Precondition; import com.asakusafw.compiler.flow.FlowCompilingEnvironment; import com.asakusafw.runtime.flow.Rendezvous; import com.asakusafw.runtime.flow.SegmentedReducer; import com.asakusafw.runtime.flow.SegmentedWritable; import com.asakusafw.runtime.trace.TraceLocation; import com.asakusafw.utils.java.model.syntax.CompilationUnit; import com.asakusafw.utils.java.model.syntax.Expression; import com.asakusafw.utils.java.model.syntax.Javadoc; import com.asakusafw.utils.java.model.syntax.MethodDeclaration; import com.asakusafw.utils.java.model.syntax.ModelFactory; import com.asakusafw.utils.java.model.syntax.Name; import com.asakusafw.utils.java.model.syntax.QualifiedName; import com.asakusafw.utils.java.model.syntax.SimpleName; import com.asakusafw.utils.java.model.syntax.Statement; import com.asakusafw.utils.java.model.syntax.Type; import com.asakusafw.utils.java.model.syntax.TypeBodyDeclaration; import com.asakusafw.utils.java.model.syntax.TypeDeclaration; import com.asakusafw.utils.java.model.util.AttributeBuilder; import com.asakusafw.utils.java.model.util.ExpressionBuilder; import com.asakusafw.utils.java.model.util.ImportBuilder; import com.asakusafw.utils.java.model.util.JavadocBuilder; import com.asakusafw.utils.java.model.util.Models; import com.asakusafw.utils.java.model.util.TypeBuilder; import com.asakusafw.vocabulary.flow.graph.FlowElement; /** * An emitter for emitting Reducer classes. */ public class ReducerEmitter { static final Logger LOG = LoggerFactory.getLogger(ReducerEmitter.class); private final FlowCompilingEnvironment environment; /** * Creates a new instance. * @param environment the current environment * @throws IllegalArgumentException if the parameter is {@code null} */ public ReducerEmitter(FlowCompilingEnvironment environment) { Precondition.checkMustNotBeNull(environment, "environment"); //$NON-NLS-1$ this.environment = environment; } /** * Creates a new Reducer class, and returns the qualified name of its class. * @param model the target stage * @return qualified name of the created class * @throws IOException if error was occurred while creating the class * @throws IllegalArgumentException if the parameter is {@code null} */ public CompiledType emit(StageModel model) throws IOException { Precondition.checkMustNotBeNull(model, "model"); //$NON-NLS-1$ LOG.debug("start generating reducer class: {}", model); //$NON-NLS-1$ Engine engine = new Engine(environment, model); CompilationUnit source = engine.generate(); environment.emit(source); Name packageName = source.getPackageDeclaration().getName(); SimpleName simpleName = source.getTypeDeclarations().get(0).getName(); QualifiedName name = environment .getModelFactory() .newQualifiedName(packageName, simpleName); LOG.debug("finish generating reducer class: {} ({})", model, name); //$NON-NLS-1$ return new CompiledType(name); } private static class Engine { private final FlowCompilingEnvironment environment; private final StageModel model; private final ShuffleModel shuffle; private final ModelFactory factory; private final ImportBuilder importer; private final NameGenerator names; private final FragmentFlow fragments; private final SimpleName context; Engine(FlowCompilingEnvironment environment, StageModel model) { assert environment != null; assert model != null; this.environment = environment; this.model = model; this.shuffle = model.getShuffleModel(); this.factory = environment.getModelFactory(); Name packageName = environment.getStagePackageName( model.getStageBlock().getStageNumber()); this.importer = new ImportBuilder( factory, factory.newPackageDeclaration(packageName), ImportBuilder.Strategy.TOP_LEVEL); this.names = new NameGenerator(factory); this.fragments = new FragmentFlow( environment, importer, names, model, model.getReduceUnits()); this.context = names.create("context"); //$NON-NLS-1$ } public CompilationUnit generate() { TypeDeclaration type = createType(); return factory.newCompilationUnit( importer.getPackageDeclaration(), importer.toImportDeclarations(), Collections.singletonList(type)); } private TypeDeclaration createType() { SimpleName name = factory.newSimpleName(Naming.getReduceClass()); importer.resolvePackageMember(name); List<TypeBodyDeclaration> members = new ArrayList<>(); members.addAll(fragments.createFields()); members.add(createSetup()); members.add(createCleanup()); members.add(createGetRendezvous()); return factory.newClassDeclaration( createJavadoc(), new AttributeBuilder(factory) .annotation(t(TraceLocation.class), createTraceLocationElements()) .annotation(t(SuppressWarnings.class), v("deprecation")) //$NON-NLS-1$ .Public() .Final() .toAttributes(), name, Collections.emptyList(), importer.resolve(factory.newParameterizedType( Models.toType(factory, SegmentedReducer.class), Arrays.asList( fragments.getShuffleKeyType(), fragments.getShuffleValueType(), t(NullWritable.class), t(NullWritable.class)))), Collections.emptyList(), members); } private Map<String, Expression> createTraceLocationElements() { Map<String, Expression> results = new LinkedHashMap<>(); results.put("batchId", Models.toLiteral(factory, environment.getBatchId())); //$NON-NLS-1$ results.put("flowId", Models.toLiteral(factory, environment.getFlowId())); //$NON-NLS-1$ results.put("stageId", //$NON-NLS-1$ Models.toLiteral(factory, Naming.getStageName(model.getStageBlock().getStageNumber()))); results.put("stageUnitId", Models.toLiteral(factory, "r")); //$NON-NLS-1$ //$NON-NLS-2$ return results; } private MethodDeclaration createSetup() { return factory.newMethodDeclaration( null, new AttributeBuilder(factory) .annotation(t(Override.class)) .Public() .toAttributes(), Collections.emptyList(), t(void.class), factory.newSimpleName("setup"), //$NON-NLS-1$ Collections.singletonList(factory.newFormalParameterDeclaration( factory.newNamedType(factory.newSimpleName("Context")), //$NON-NLS-1$ context)), 0, Arrays.asList(t(IOException.class), t(InterruptedException.class)), factory.newBlock(fragments.createSetup(context))); } private MethodDeclaration createCleanup() { return factory.newMethodDeclaration( null, new AttributeBuilder(factory) .annotation(t(Override.class)) .Public() .toAttributes(), Collections.emptyList(), t(void.class), factory.newSimpleName("cleanup"), //$NON-NLS-1$ Collections.singletonList(factory.newFormalParameterDeclaration( factory.newNamedType(factory.newSimpleName("Context")), //$NON-NLS-1$ context)), 0, Arrays.asList(t(IOException.class), t(InterruptedException.class)), factory.newBlock(fragments.createCleanup(context))); } private MethodDeclaration createGetRendezvous() { List<Statement> cases = new ArrayList<>(); for (List<ShuffleModel.Segment> group : ShuffleEmiterUtil.groupByElement(shuffle)) { for (ShuffleModel.Segment segment : group) { cases.add(factory.newSwitchCaseLabel(v(segment.getPortId()))); } FlowElement element = group.get(0).getPort().getOwner(); cases.add(new ExpressionBuilder(factory, fragments.getRendezvous(element)) .toReturnStatement()); } cases.add(factory.newSwitchDefaultLabel()); cases.add(new TypeBuilder(factory, t(AssertionError.class)) .newObject() .toThrowStatement()); SimpleName argument = names.create("nextKey"); //$NON-NLS-1$ List<Statement> statements = new ArrayList<>(); statements.add(factory.newSwitchStatement( new ExpressionBuilder(factory, argument) .method(SegmentedWritable.ID_GETTER) .toExpression(), cases)); return factory.newMethodDeclaration( null, new AttributeBuilder(factory) .annotation(t(Override.class)) .Protected() .toAttributes(), importer.resolve(factory.newParameterizedType( Models.toType(factory, Rendezvous.class), Arrays.asList(fragments.getShuffleValueType()))), factory.newSimpleName(SegmentedReducer.GET_RENDEZVOUS), Collections.singletonList(factory.newFormalParameterDeclaration( fragments.getShuffleKeyType(), argument)), statements); } private Javadoc createJavadoc() { return new JavadocBuilder(factory) .inline("A reducer class for processing stage {0}.", //$NON-NLS-1$ d -> d.code(shuffle.getStageBlock().getStageNumber())) .toJavadoc(); } private Type t(java.lang.reflect.Type type) { return importer.resolve(Models.toType(factory, type)); } private Expression v(Object value) { return Models.toLiteral(factory, value); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zeppelin.python; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteException; import org.apache.commons.exec.ExecuteResultHandler; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.LogOutputStream; import org.apache.commons.exec.PumpStreamHandler; import org.apache.commons.exec.environment.EnvironmentUtils; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.commons.lang.StringUtils; import org.apache.zeppelin.interpreter.Interpreter; import org.apache.zeppelin.interpreter.InterpreterContext; import org.apache.zeppelin.interpreter.InterpreterResult; import org.apache.zeppelin.interpreter.remote.RemoteInterpreterUtils; import org.apache.zeppelin.interpreter.thrift.InterpreterCompletion; import org.apache.zeppelin.interpreter.util.InterpreterOutputStream; import org.apache.zeppelin.python.proto.CancelRequest; import org.apache.zeppelin.python.proto.CompletionRequest; import org.apache.zeppelin.python.proto.CompletionResponse; import org.apache.zeppelin.python.proto.ExecuteRequest; import org.apache.zeppelin.python.proto.ExecuteResponse; import org.apache.zeppelin.python.proto.ExecuteStatus; import org.apache.zeppelin.python.proto.IPythonStatus; import org.apache.zeppelin.python.proto.StatusRequest; import org.apache.zeppelin.python.proto.StatusResponse; import org.apache.zeppelin.python.proto.StopRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import py4j.GatewayServer; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Properties; /** * IPython Interpreter for Zeppelin */ public class IPythonInterpreter extends Interpreter implements ExecuteResultHandler { private static final Logger LOGGER = LoggerFactory.getLogger(IPythonInterpreter.class); private ExecuteWatchdog watchDog; private IPythonClient ipythonClient; private GatewayServer gatewayServer; private PythonZeppelinContext zeppelinContext; private String pythonExecutable; private long ipythonLaunchTimeout; private String additionalPythonPath; private String additionalPythonInitFile; private InterpreterOutputStream interpreterOutput = new InterpreterOutputStream(LOGGER); public IPythonInterpreter(Properties properties) { super(properties); } /** * Sub class can customize the interpreter by adding more python packages under PYTHONPATH. * e.g. PySparkInterpreter * * @param additionalPythonPath */ public void setAdditionalPythonPath(String additionalPythonPath) { this.additionalPythonPath = additionalPythonPath; } /** * Sub class can customize the interpreter by running additional python init code. * e.g. PySparkInterpreter * * @param additionalPythonInitFile */ public void setAdditionalPythonInitFile(String additionalPythonInitFile) { this.additionalPythonInitFile = additionalPythonInitFile; } @Override public void open() { try { if (ipythonClient != null) { // IPythonInterpreter might already been opened by PythonInterpreter return; } pythonExecutable = getProperty().getProperty("zeppelin.python", "python"); ipythonLaunchTimeout = Long.parseLong( getProperty().getProperty("zeppelin.ipython.launch.timeout", "30000")); this.zeppelinContext = new PythonZeppelinContext( getInterpreterGroup().getInterpreterHookRegistry(), Integer.parseInt(getProperty().getProperty("zeppelin.python.maxResult", "1000"))); int ipythonPort = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces(); int jvmGatewayPort = RemoteInterpreterUtils.findRandomAvailablePortOnAllLocalInterfaces(); LOGGER.info("Launching IPython Kernel at port: " + ipythonPort); LOGGER.info("Launching JVM Gateway at port: " + jvmGatewayPort); ipythonClient = new IPythonClient("127.0.0.1", ipythonPort); launchIPythonKernel(ipythonPort); setupJVMGateway(jvmGatewayPort); } catch (Exception e) { throw new RuntimeException("Fail to open IPythonInterpreter", e); } } public boolean checkIPythonPrerequisite() { ProcessBuilder processBuilder = new ProcessBuilder("pip", "freeze"); try { File stderrFile = File.createTempFile("zeppelin", ".txt"); processBuilder.redirectError(stderrFile); File stdoutFile = File.createTempFile("zeppelin", ".txt"); processBuilder.redirectOutput(stdoutFile); Process proc = processBuilder.start(); int ret = proc.waitFor(); if (ret != 0) { LOGGER.warn("Fail to run pip freeze.\n" + IOUtils.toString(new FileInputStream(stderrFile))); return false; } String freezeOutput = IOUtils.toString(new FileInputStream(stdoutFile)); if (!freezeOutput.contains("jupyter-client=")) { InterpreterContext.get().out.write("jupyter-client is not installed\n".getBytes()); return false; } if (!freezeOutput.contains("ipykernel=")) { InterpreterContext.get().out.write("ipkernel is not installed\n".getBytes()); return false; } if (!freezeOutput.contains("ipython=")) { InterpreterContext.get().out.write("ipython is not installed\n".getBytes()); return false; } if (!freezeOutput.contains("grpcio=")) { InterpreterContext.get().out.write("grpcio is not installed\n".getBytes()); return false; } LOGGER.info("IPython prerequisite is meet"); return true; } catch (Exception e) { LOGGER.warn("Fail to checkIPythonPrerequisite", e); return false; } } private void setupJVMGateway(int jvmGatewayPort) throws IOException { gatewayServer = new GatewayServer(this, jvmGatewayPort); gatewayServer.start(); InputStream input = getClass().getClassLoader().getResourceAsStream("grpc/python/zeppelin_python.py"); List<String> lines = IOUtils.readLines(input); ExecuteResponse response = ipythonClient.block_execute(ExecuteRequest.newBuilder() .setCode(StringUtils.join(lines, System.lineSeparator()) .replace("${JVM_GATEWAY_PORT}", jvmGatewayPort + "")).build()); if (response.getStatus() == ExecuteStatus.ERROR) { throw new IOException("Fail to setup JVMGateway\n" + response.getOutput()); } if (additionalPythonInitFile != null) { input = getClass().getClassLoader().getResourceAsStream(additionalPythonInitFile); lines = IOUtils.readLines(input); response = ipythonClient.block_execute(ExecuteRequest.newBuilder() .setCode(StringUtils.join(lines, System.lineSeparator()) .replace("${JVM_GATEWAY_PORT}", jvmGatewayPort + "")).build()); if (response.getStatus() == ExecuteStatus.ERROR) { throw new IOException("Fail to run additional Python init file: " + additionalPythonInitFile + "\n" + response.getOutput()); } } } private void launchIPythonKernel(int ipythonPort) throws IOException, URISyntaxException { // copy the python scripts to a temp directory, then launch ipython kernel in that folder File tmpPythonScriptFolder = Files.createTempDirectory("zeppelin_ipython").toFile(); String[] ipythonScripts = {"ipython_server.py", "ipython_pb2.py", "ipython_pb2_grpc.py"}; for (String ipythonScript : ipythonScripts) { URL url = getClass().getClassLoader().getResource("grpc/python" + "/" + ipythonScript); FileUtils.copyURLToFile(url, new File(tmpPythonScriptFolder, ipythonScript)); } CommandLine cmd = CommandLine.parse(pythonExecutable); cmd.addArgument(tmpPythonScriptFolder.getAbsolutePath() + "/ipython_server.py"); cmd.addArgument(ipythonPort + ""); DefaultExecutor executor = new DefaultExecutor(); ProcessLogOutputStream processOutput = new ProcessLogOutputStream(LOGGER); executor.setStreamHandler(new PumpStreamHandler(processOutput)); watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); executor.setWatchdog(watchDog); String py4jLibPath = null; if (System.getenv("ZEPPELIN_HOME") != null) { py4jLibPath = System.getenv("ZEPPELIN_HOME") + File.separator + PythonInterpreter.ZEPPELIN_PY4JPATH; } else { Path workingPath = Paths.get("..").toAbsolutePath(); py4jLibPath = workingPath + File.separator + PythonInterpreter.ZEPPELIN_PY4JPATH; } if (additionalPythonPath != null) { // put the py4j at the end, because additionalPythonPath may already contain py4j. // e.g. PySparkInterpreter additionalPythonPath = additionalPythonPath + ":" + py4jLibPath; } else { additionalPythonPath = py4jLibPath; } Map<String, String> envs = EnvironmentUtils.getProcEnvironment(); if (envs.containsKey("PYTHONPATH")) { envs.put("PYTHONPATH", additionalPythonPath + ":" + envs.get("PYTHONPATH")); } else { envs.put("PYTHONPATH", additionalPythonPath); } LOGGER.debug("PYTHONPATH: " + envs.get("PYTHONPATH")); executor.execute(cmd, envs, this); // wait until IPython kernel is started or timeout long startTime = System.currentTimeMillis(); while (true) { try { Thread.sleep(100); } catch (InterruptedException e) { LOGGER.error("Interrupted by something", e); } try { StatusResponse response = ipythonClient.status(StatusRequest.newBuilder().build()); if (response.getStatus() == IPythonStatus.RUNNING) { LOGGER.info("IPython Kernel is Running"); break; } else { LOGGER.info("Wait for IPython Kernel to be started"); } } catch (Exception e) { // ignore the exception, because is may happen when grpc server has not started yet. LOGGER.info("Wait for IPython Kernel to be started"); } if ((System.currentTimeMillis() - startTime) > ipythonLaunchTimeout) { throw new IOException("Fail to launch IPython Kernel in " + ipythonLaunchTimeout / 1000 + " seconds"); } } } @Override public void close() { if (watchDog != null) { LOGGER.debug("Kill IPython Process"); ipythonClient.stop(StopRequest.newBuilder().build()); watchDog.destroyProcess(); gatewayServer.shutdown(); } } @Override public InterpreterResult interpret(String st, InterpreterContext context) { zeppelinContext.setGui(context.getGui()); interpreterOutput.setInterpreterOutput(context.out); ExecuteResponse response = ipythonClient.stream_execute(ExecuteRequest.newBuilder().setCode(st).build(), interpreterOutput); try { interpreterOutput.getInterpreterOutput().flush(); } catch (IOException e) { throw new RuntimeException("Fail to write output", e); } InterpreterResult result = new InterpreterResult( InterpreterResult.Code.valueOf(response.getStatus().name())); return result; } @Override public void cancel(InterpreterContext context) { ipythonClient.cancel(CancelRequest.newBuilder().build()); } @Override public FormType getFormType() { return FormType.SIMPLE; } @Override public int getProgress(InterpreterContext context) { return 0; } @Override public List<InterpreterCompletion> completion(String buf, int cursor, InterpreterContext interpreterContext) { List<InterpreterCompletion> completions = new ArrayList<>(); CompletionResponse response = ipythonClient.complete( CompletionRequest.getDefaultInstance().newBuilder().setCode(buf) .setCursor(cursor).build()); for (int i = 0; i < response.getMatchesCount(); i++) { completions.add(new InterpreterCompletion( response.getMatches(i), response.getMatches(i), "")); } return completions; } public PythonZeppelinContext getZeppelinContext() { return zeppelinContext; } @Override public void onProcessComplete(int exitValue) { LOGGER.warn("Python Process is completed with exitValue: " + exitValue); } @Override public void onProcessFailed(ExecuteException e) { LOGGER.warn("Exception happens in Python Process", e); } private static class ProcessLogOutputStream extends LogOutputStream { private Logger logger; public ProcessLogOutputStream(Logger logger) { this.logger = logger; } @Override protected void processLine(String s, int i) { this.logger.debug("Process Output: " + s); } } }
package com.smockin.mockserver.service; import com.smockin.admin.dto.UserKeyValueDataDTO; import com.smockin.admin.service.SmockinUserService; import com.smockin.admin.service.UserKeyValueDataService; import com.smockin.mockserver.exception.InboundParamMatchException; import com.smockin.mockserver.service.enums.ParamMatchTypeEnum; import com.smockin.utils.GeneralUtils; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.tuple.Pair; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import spark.Request; import java.text.SimpleDateFormat; /** * Created by mgallina on 09/08/17. */ @Service public class InboundParamMatchServiceImpl implements InboundParamMatchService { private final Logger logger = LoggerFactory.getLogger(InboundParamMatchServiceImpl.class); @Autowired private SmockinUserService smockinUserService; @Autowired private UserKeyValueDataService userKeyValueDataService; private static final String GENERAL_ERROR = "Error processing inbound param matching. Please check your token syntax"; @Override public String enrichWithInboundParamMatches(final Request req, final String mockPath, final String responseBody, final String userCtxPath, final long mockOwnerUserId) throws InboundParamMatchException { if (responseBody == null) { return null; } final String sanitizedUserCtxInboundPath = GeneralUtils.sanitizeMultiUserPath(smockinUserService.getUserMode(), req.pathInfo(), userCtxPath); String enrichedResponseBody = responseBody; final int MAX = 10000; int index = 0; while (true) { if (index > MAX) { logger.error("Error MAX iterations reached in 'while loop', whilst trying to swap out inbound param tokens."); throw new InboundParamMatchException(GENERAL_ERROR); } final String r; try { r = processParamMatch(req, mockPath, enrichedResponseBody, sanitizedUserCtxInboundPath, mockOwnerUserId); } catch (Throwable ex) { logger.error(ex.getMessage()); throw new InboundParamMatchException(GENERAL_ERROR); } if (r == null) { break; } enrichedResponseBody = r; index++; } return enrichedResponseBody; } String processParamMatch(final Request req, final String mockPath, final String responseBody, final String sanitizedUserCtxInboundPath, final long mockOwnerUserId) { // Look up for any 'inbound param token' matches final Pair<ParamMatchTypeEnum, Integer> matchResult = findInboundParamMatch(responseBody); if (matchResult == null) { // No tokens found so do nothing. return null; } final ParamMatchTypeEnum paramMatchType = matchResult.getLeft(); final int matchStartingPosition = matchResult.getRight(); // Determine the matching token type, is it a requestHeader, requestParameter, pathVar, etc... if (ParamMatchTypeEnum.lookUpKvp.equals(paramMatchType)) { return processKvp(matchStartingPosition, sanitizedUserCtxInboundPath, mockPath, req, responseBody, mockOwnerUserId); } if (ParamMatchTypeEnum.requestHeader.equals(paramMatchType)) { return processRequestHeader(matchStartingPosition, req, responseBody); } if (ParamMatchTypeEnum.requestParameter.equals(paramMatchType)) { return processRequestParameter(matchStartingPosition, req, responseBody); } if (ParamMatchTypeEnum.pathVar.equals(paramMatchType)) { return processPathVariable(sanitizedUserCtxInboundPath, matchStartingPosition, mockPath, responseBody); } if (ParamMatchTypeEnum.requestBody.equals(paramMatchType)) { return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.requestBody, (req.body() != null) ? req.body() : "", 1); } if (ParamMatchTypeEnum.isoDate.equals(paramMatchType)) { return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.isoDate, new SimpleDateFormat(GeneralUtils.ISO_DATE_FORMAT).format(GeneralUtils.getCurrentDate()), 1); } if (ParamMatchTypeEnum.isoDatetime.equals(paramMatchType)) { return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.isoDatetime, new SimpleDateFormat(GeneralUtils.ISO_DATETIME_FORMAT).format(GeneralUtils.getCurrentDate()), 1); } if (ParamMatchTypeEnum.uuid.equals(paramMatchType)) { return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.uuid, GeneralUtils.generateUUID(), 1); } if (ParamMatchTypeEnum.randomNumber.equals(paramMatchType)) { return processRandomNumber(matchStartingPosition, responseBody); } throw new IllegalArgumentException("Unsupported token : " + matchResult); } Pair<ParamMatchTypeEnum, Integer> findInboundParamMatch(final String responseBody) { if (responseBody == null) { return null; } for (ParamMatchTypeEnum p : ParamMatchTypeEnum.values()) { final int pos = StringUtils.indexOf(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + p.name() + ((p.takesArg()) ? "(" : "")); if (pos > -1) { return Pair.of(p, pos + ((p.takesArg()) ? 1 : 0)); } } return null; } String extractArgName(final int matchStartPos, final ParamMatchTypeEnum paramMatchType, final String responseBody, final boolean isNested) { final int start = matchStartPos + (ParamMatchTypeEnum.PARAM_PREFIX + paramMatchType).length(); final int closingPos = StringUtils.indexOf(responseBody, (isNested) ? "))" : ")", start); return StringUtils.substring(responseBody, start, closingPos); } String sanitiseArgName(String argName) { argName = StringUtils.remove(argName, "'"); return StringUtils.remove(argName, "\""); } String processKvp(final int matchStartingPosition, final String sanitizedUserCtxInboundPath, final String mockPath, final Request req, final String responseBody, final long mockOwnerUserId) { // Determine the matching token type, is it a requestHeader, requestParameter, pathVar, etc... final String kvpKey = extractArgName(matchStartingPosition, ParamMatchTypeEnum.lookUpKvp, responseBody, false); String sanitisedKvpKey = sanitiseArgName(kvpKey); if (sanitisedKvpKey.contains("(") && !sanitisedKvpKey.contains(")")) { sanitisedKvpKey = sanitisedKvpKey.concat(")"); } if (logger.isDebugEnabled()) { logger.debug("RAW KVP: " + kvpKey); logger.debug("Cleaned KVP : " + sanitisedKvpKey); } // Check if kvpKey is a nested ParamMatchTypeEnum itself final Pair<ParamMatchTypeEnum, Integer> kvpMatchResult = findInboundParamMatch(sanitisedKvpKey); final boolean isNested = (kvpMatchResult != null); if (isNested) { if (logger.isDebugEnabled()) { logger.debug("Nested KVP request type: " + kvpMatchResult.getLeft()); } final String nestedRequestKey = extractArgName(kvpMatchResult.getRight(), kvpMatchResult.getLeft(), sanitisedKvpKey, isNested); if (logger.isDebugEnabled()) { logger.debug("Nested KVP request key: " + nestedRequestKey); } switch (kvpMatchResult.getLeft()) { case requestHeader: sanitisedKvpKey = GeneralUtils.findHeaderIgnoreCase(req, sanitiseArgName(nestedRequestKey)); break; case requestParameter: sanitisedKvpKey = GeneralUtils.extractRequestParamByName(req, sanitiseArgName(nestedRequestKey)); break; case pathVar: sanitisedKvpKey = GeneralUtils.findPathVarIgnoreCase(sanitizedUserCtxInboundPath, mockPath, sanitiseArgName(nestedRequestKey)); break; case requestBody: sanitisedKvpKey = req.body(); break; default: sanitisedKvpKey = null; break; } } final UserKeyValueDataDTO userKeyValueDataDTO = (sanitisedKvpKey != null) ? userKeyValueDataService.loadByKey(sanitisedKvpKey, mockOwnerUserId) : null; if (logger.isDebugEnabled()) { logger.debug("KVP value: " + ((userKeyValueDataDTO != null) ? userKeyValueDataDTO.getValue() : null)); } return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.lookUpKvp + "(" + kvpKey + ((kvpKey.contains("(")) ? "))" : ")"), (userKeyValueDataDTO != null) ? userKeyValueDataDTO.getValue() : "", 1); } String processRequestHeader(final int matchStartingPosition, final Request req, final String responseBody) { final String headerName = extractArgName(matchStartingPosition, ParamMatchTypeEnum.requestHeader, responseBody, false); final String headerValue = GeneralUtils.findHeaderIgnoreCase(req, sanitiseArgName(headerName)); if (logger.isDebugEnabled()) { logger.debug("raw header: " + headerName); logger.debug("cleaned header: " + sanitiseArgName(headerName)); logger.debug("header value: " + headerValue); } return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.requestHeader + "(" + headerName + ")", (headerValue != null) ? headerValue : "", 1); } String processRequestParameter(final int matchStartingPosition, final Request req, final String responseBody) { final String requestParamName = extractArgName(matchStartingPosition, ParamMatchTypeEnum.requestParameter, responseBody, false); final String requestParamValue = GeneralUtils.extractRequestParamByName(req, sanitiseArgName(requestParamName)); if (logger.isDebugEnabled()) { logger.debug("RAW request param: " + requestParamName); logger.debug("Cleaned request param: " + sanitiseArgName(requestParamName)); logger.debug("Request param value: " + requestParamValue); } return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.requestParameter + "(" + requestParamName + ")", (requestParamValue != null) ? requestParamValue : "", 1); } String processPathVariable(final String sanitizedUserCtxInboundPath, final int matchStartingPosition, final String mockPath, final String responseBody) { final String pathVariableName = extractArgName(matchStartingPosition, ParamMatchTypeEnum.pathVar, responseBody, false); final String pathVariableValue = GeneralUtils.findPathVarIgnoreCase(sanitizedUserCtxInboundPath, mockPath, sanitiseArgName(pathVariableName)); if (logger.isDebugEnabled()) { logger.debug("RAW path var: " + pathVariableName); logger.debug("Cleaned path var : " + sanitiseArgName(pathVariableName)); logger.debug("Path var value: " + pathVariableValue); } return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.pathVar + "(" + pathVariableName + ")", (pathVariableValue != null) ? pathVariableValue : "", 1); } String processRandomNumber(final int matchStartingPosition, final String responseBody) { final String randomNumberContent = extractArgName(matchStartingPosition, ParamMatchTypeEnum.randomNumber, responseBody, false); if (logger.isDebugEnabled()) { logger.debug("Random number params: " + randomNumberContent); } if (randomNumberContent == null) { throw new IllegalArgumentException(ParamMatchTypeEnum.randomNumber.name() + " is missing args"); } final String[] randomNumberContentParams = StringUtils.split(randomNumberContent, ","); if (randomNumberContentParams.length == 0) { throw new IllegalArgumentException(ParamMatchTypeEnum.randomNumber.name() + " is missing args"); } if (randomNumberContentParams.length > 2) { throw new IllegalArgumentException(ParamMatchTypeEnum.randomNumber.name() + " has too many args"); } final int startInc = (randomNumberContentParams.length == 2) ? Integer.parseInt(randomNumberContentParams[0].trim()) : 0; final int endExcl = (randomNumberContentParams.length == 2) ? Integer.parseInt(randomNumberContentParams[1].trim()) : Integer.parseInt(randomNumberContentParams[0].trim()); final int randomValue = RandomUtils.nextInt(startInc, endExcl); if (logger.isDebugEnabled()) { logger.debug("Random number value: " + randomValue); } return StringUtils.replaceIgnoreCase(responseBody, ParamMatchTypeEnum.PARAM_PREFIX + ParamMatchTypeEnum.randomNumber + "(" + randomNumberContent + ")", String.valueOf(randomValue), 1); } }
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2016 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.eclipse.wizards.sreinstall; import java.io.File; import java.text.MessageFormat; import com.google.common.base.Strings; import org.eclipse.core.runtime.CoreException; import org.eclipse.core.runtime.IPath; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Path; import org.eclipse.debug.internal.ui.SWTFactory; import org.eclipse.jdt.internal.debug.ui.JavaDebugImages; import org.eclipse.jface.dialogs.Dialog; import org.eclipse.swt.SWT; import org.eclipse.swt.events.ModifyEvent; import org.eclipse.swt.events.ModifyListener; import org.eclipse.swt.events.SelectionAdapter; import org.eclipse.swt.events.SelectionEvent; import org.eclipse.swt.graphics.Image; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.Button; import org.eclipse.swt.widgets.Composite; import org.eclipse.swt.widgets.FileDialog; import org.eclipse.swt.widgets.Text; import io.sarl.eclipse.runtime.ISREInstall; import io.sarl.eclipse.runtime.SARLRuntime; import io.sarl.eclipse.runtime.SREException; import io.sarl.eclipse.runtime.StandardSREInstall; import io.sarl.eclipse.util.Utilities; /** * Standard implementation of a page for the SRE installation wizard. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ public class StandardSREPage extends AbstractSREInstallPage { private Text sreLibraryTextField; private Text sreNameTextField; private Text sreMainClassTextField; private Text sreIdTextField; private StandardSREInstall originalSRE; private StandardSREInstall workingCopy; /** Construct a configuration page for the SREs. */ public StandardSREPage() { super(Utilities.EMPTY_STRING); } @Override public Image getImage() { return JavaDebugImages.get(JavaDebugImages.IMG_WIZBAN_LIBRARY); } @Override public void createControl(Composite parent) { // create a composite with standard margins and spacing final Composite composite = new Composite(parent, SWT.NONE); final GridLayout layout = new GridLayout(); layout.numColumns = 3; composite.setLayout(layout); composite.setLayoutData(new GridData(GridData.FILL_BOTH)); // SRE location SWTFactory.createLabel(composite, Messages.StandardSREPage_0, 1); this.sreLibraryTextField = SWTFactory.createSingleText(composite, 1); this.sreLibraryTextField.setEditable(false); final Button folders = SWTFactory.createPushButton(composite, Messages.StandardSREPage_1, null); final GridData data = (GridData) folders.getLayoutData(); data.horizontalAlignment = GridData.END; //SRE name SWTFactory.createLabel(composite, Messages.StandardSREPage_2, 1); this.sreNameTextField = SWTFactory.createSingleText(composite, 2); //SRE main class SWTFactory.createLabel(composite, Messages.StandardSREPage_3, 1); this.sreMainClassTextField = SWTFactory.createSingleText(composite, 2); //SRE Id SWTFactory.createLabel(composite, Messages.StandardSREPage_8, 1); this.sreIdTextField = SWTFactory.createSingleText(composite, 2); this.sreIdTextField.setEditable(false); //add the listeners now to prevent them from monkeying with initialized settings this.sreNameTextField.addModifyListener(new ModifyListener() { @SuppressWarnings("synthetic-access") @Override public void modifyText(ModifyEvent event) { StandardSREPage.this.workingCopy.setName( StandardSREPage.this.sreNameTextField.getText()); setPageStatus(validate()); updatePageStatus(); } }); this.sreMainClassTextField.addModifyListener(new ModifyListener() { @SuppressWarnings("synthetic-access") @Override public void modifyText(ModifyEvent event) { StandardSREPage.this.workingCopy.setMainClass( StandardSREPage.this.sreMainClassTextField.getText()); setPageStatus(validate()); updatePageStatus(); } }); folders.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent event) { selectSRE(); } }); Dialog.applyDialogFont(composite); setControl(composite); //PlatformUI.getWorkbench().getHelpSystem().setHelp(getControl(), //IJavaDebugHelpContextIds.EDIT_JRE_STD_VM_WIZARD_PAGE); setPageStatus(validate()); updatePageStatus(); initializeFields(); } /** Ask to the user to selected the SRE. */ protected void selectSRE() { final File file; if (StandardSREPage.this.workingCopy.getJarFile() != null) { file = StandardSREPage.this.workingCopy.getJarFile().toFile(); } else { file = null; } final FileDialog dialog = new FileDialog(getShell(), SWT.OPEN); dialog.setText(Messages.StandardSREPage_4); dialog.setFilterExtensions(new String[] {"*.jar"}); //$NON-NLS-1$ if (file != null && file.exists()) { dialog.setFileName(file.getAbsolutePath()); } final String selectedFile = dialog.open(); if (selectedFile != null) { final IPath path = Path.fromOSString(selectedFile); // IWorkspace workspace = ResourcesPlugin.getWorkspace(); // IPath workspaceLocation = workspace.getRoot().getLocation(); // SARLEclipsePlugin.logDebugMessage("Workspace (Path): " + workspaceLocation); //$NON-NLS-1$ // if (workspaceLocation.isPrefixOf(path)) { // SARLEclipsePlugin.logDebugMessage("Make relative path"); //$NON-NLS-1$ // path = workspaceLocation.makeRelativeTo(workspaceLocation); // } // SARLEclipsePlugin.logDebugMessage("Resolved Path (Path): " + path); //$NON-NLS-1$ // createWorkingCopy(); this.workingCopy.setJarFile(path); final IStatus status = validate(); // initializeFields(); setPageStatus(status); updatePageStatus(); } } @Override public boolean performFinish() { try { final String xml = SARLRuntime.getSREAsXML(this.workingCopy); SARLRuntime.setSREFromXML(this.originalSRE, xml); return true; } catch (CoreException e) { setErrorMessage(e.getLocalizedMessage()); return false; } } @Override public void initialize(ISREInstall sre) { if (!(sre instanceof StandardSREInstall)) { throw new SREException("Illegal SRE type: expecting StandardSREInstall."); //$NON-NLS-1$ } setTitle(MessageFormat.format(Messages.StandardSREPage_7, sre.getName())); this.originalSRE = (StandardSREInstall) sre; createWorkingCopy(); } /** Create a new instance of the working copy. */ protected void createWorkingCopy() { this.workingCopy = this.originalSRE.clone(); this.workingCopy.setNotify(false); } @Override public ISREInstall createSelection(String id) { final StandardSREInstall sre = new StandardSREInstall(id); sre.revalidate(); initialize(sre); return sre; } /** * Initialize the dialogs fields. */ private void initializeFields() { final IPath path = this.workingCopy.getJarFile(); String tooltip = null; String basename = null; if (path != null) { tooltip = path.toOSString(); final IPath tmpPath = path.removeTrailingSeparator(); if (tmpPath != null) { basename = tmpPath.lastSegment(); } } this.sreLibraryTextField.setText(Strings.nullToEmpty(basename)); this.sreLibraryTextField.setToolTipText(Strings.nullToEmpty(tooltip)); // final String name = this.workingCopy.getNameNoDefault(); this.sreNameTextField.setText(Strings.nullToEmpty(name)); // final String mainClass = this.workingCopy.getMainClass(); this.sreMainClassTextField.setText(Strings.nullToEmpty(mainClass)); // this.sreIdTextField.setText(this.workingCopy.getId()); } private IStatus validate() { IStatus status = this.workingCopy.revalidate(); if (status.isOK()) { status = validateNameAgainstOtherSREs(this.workingCopy.getName()); } return status; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.appflow.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Information about required authentication parameters. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/appflow-2020-08-23/AuthParameter" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class AuthParameter implements Serializable, Cloneable, StructuredPojo { /** * <p> * The authentication key required to authenticate with the connector. * </p> */ private String key; /** * <p> * Indicates whether this authentication parameter is required. * </p> */ private Boolean isRequired; /** * <p> * Label used for authentication parameter. * </p> */ private String label; /** * <p> * A description about the authentication parameter. * </p> */ private String description; /** * <p> * Indicates whether this authentication parameter is a sensitive field. * </p> */ private Boolean isSensitiveField; /** * <p> * Contains default values for this authentication parameter that are supplied by the connector. * </p> */ private java.util.List<String> connectorSuppliedValues; /** * <p> * The authentication key required to authenticate with the connector. * </p> * * @param key * The authentication key required to authenticate with the connector. */ public void setKey(String key) { this.key = key; } /** * <p> * The authentication key required to authenticate with the connector. * </p> * * @return The authentication key required to authenticate with the connector. */ public String getKey() { return this.key; } /** * <p> * The authentication key required to authenticate with the connector. * </p> * * @param key * The authentication key required to authenticate with the connector. * @return Returns a reference to this object so that method calls can be chained together. */ public AuthParameter withKey(String key) { setKey(key); return this; } /** * <p> * Indicates whether this authentication parameter is required. * </p> * * @param isRequired * Indicates whether this authentication parameter is required. */ public void setIsRequired(Boolean isRequired) { this.isRequired = isRequired; } /** * <p> * Indicates whether this authentication parameter is required. * </p> * * @return Indicates whether this authentication parameter is required. */ public Boolean getIsRequired() { return this.isRequired; } /** * <p> * Indicates whether this authentication parameter is required. * </p> * * @param isRequired * Indicates whether this authentication parameter is required. * @return Returns a reference to this object so that method calls can be chained together. */ public AuthParameter withIsRequired(Boolean isRequired) { setIsRequired(isRequired); return this; } /** * <p> * Indicates whether this authentication parameter is required. * </p> * * @return Indicates whether this authentication parameter is required. */ public Boolean isRequired() { return this.isRequired; } /** * <p> * Label used for authentication parameter. * </p> * * @param label * Label used for authentication parameter. */ public void setLabel(String label) { this.label = label; } /** * <p> * Label used for authentication parameter. * </p> * * @return Label used for authentication parameter. */ public String getLabel() { return this.label; } /** * <p> * Label used for authentication parameter. * </p> * * @param label * Label used for authentication parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public AuthParameter withLabel(String label) { setLabel(label); return this; } /** * <p> * A description about the authentication parameter. * </p> * * @param description * A description about the authentication parameter. */ public void setDescription(String description) { this.description = description; } /** * <p> * A description about the authentication parameter. * </p> * * @return A description about the authentication parameter. */ public String getDescription() { return this.description; } /** * <p> * A description about the authentication parameter. * </p> * * @param description * A description about the authentication parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public AuthParameter withDescription(String description) { setDescription(description); return this; } /** * <p> * Indicates whether this authentication parameter is a sensitive field. * </p> * * @param isSensitiveField * Indicates whether this authentication parameter is a sensitive field. */ public void setIsSensitiveField(Boolean isSensitiveField) { this.isSensitiveField = isSensitiveField; } /** * <p> * Indicates whether this authentication parameter is a sensitive field. * </p> * * @return Indicates whether this authentication parameter is a sensitive field. */ public Boolean getIsSensitiveField() { return this.isSensitiveField; } /** * <p> * Indicates whether this authentication parameter is a sensitive field. * </p> * * @param isSensitiveField * Indicates whether this authentication parameter is a sensitive field. * @return Returns a reference to this object so that method calls can be chained together. */ public AuthParameter withIsSensitiveField(Boolean isSensitiveField) { setIsSensitiveField(isSensitiveField); return this; } /** * <p> * Indicates whether this authentication parameter is a sensitive field. * </p> * * @return Indicates whether this authentication parameter is a sensitive field. */ public Boolean isSensitiveField() { return this.isSensitiveField; } /** * <p> * Contains default values for this authentication parameter that are supplied by the connector. * </p> * * @return Contains default values for this authentication parameter that are supplied by the connector. */ public java.util.List<String> getConnectorSuppliedValues() { return connectorSuppliedValues; } /** * <p> * Contains default values for this authentication parameter that are supplied by the connector. * </p> * * @param connectorSuppliedValues * Contains default values for this authentication parameter that are supplied by the connector. */ public void setConnectorSuppliedValues(java.util.Collection<String> connectorSuppliedValues) { if (connectorSuppliedValues == null) { this.connectorSuppliedValues = null; return; } this.connectorSuppliedValues = new java.util.ArrayList<String>(connectorSuppliedValues); } /** * <p> * Contains default values for this authentication parameter that are supplied by the connector. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setConnectorSuppliedValues(java.util.Collection)} or * {@link #withConnectorSuppliedValues(java.util.Collection)} if you want to override the existing values. * </p> * * @param connectorSuppliedValues * Contains default values for this authentication parameter that are supplied by the connector. * @return Returns a reference to this object so that method calls can be chained together. */ public AuthParameter withConnectorSuppliedValues(String... connectorSuppliedValues) { if (this.connectorSuppliedValues == null) { setConnectorSuppliedValues(new java.util.ArrayList<String>(connectorSuppliedValues.length)); } for (String ele : connectorSuppliedValues) { this.connectorSuppliedValues.add(ele); } return this; } /** * <p> * Contains default values for this authentication parameter that are supplied by the connector. * </p> * * @param connectorSuppliedValues * Contains default values for this authentication parameter that are supplied by the connector. * @return Returns a reference to this object so that method calls can be chained together. */ public AuthParameter withConnectorSuppliedValues(java.util.Collection<String> connectorSuppliedValues) { setConnectorSuppliedValues(connectorSuppliedValues); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getKey() != null) sb.append("Key: ").append(getKey()).append(","); if (getIsRequired() != null) sb.append("IsRequired: ").append(getIsRequired()).append(","); if (getLabel() != null) sb.append("Label: ").append(getLabel()).append(","); if (getDescription() != null) sb.append("Description: ").append(getDescription()).append(","); if (getIsSensitiveField() != null) sb.append("IsSensitiveField: ").append(getIsSensitiveField()).append(","); if (getConnectorSuppliedValues() != null) sb.append("ConnectorSuppliedValues: ").append(getConnectorSuppliedValues()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof AuthParameter == false) return false; AuthParameter other = (AuthParameter) obj; if (other.getKey() == null ^ this.getKey() == null) return false; if (other.getKey() != null && other.getKey().equals(this.getKey()) == false) return false; if (other.getIsRequired() == null ^ this.getIsRequired() == null) return false; if (other.getIsRequired() != null && other.getIsRequired().equals(this.getIsRequired()) == false) return false; if (other.getLabel() == null ^ this.getLabel() == null) return false; if (other.getLabel() != null && other.getLabel().equals(this.getLabel()) == false) return false; if (other.getDescription() == null ^ this.getDescription() == null) return false; if (other.getDescription() != null && other.getDescription().equals(this.getDescription()) == false) return false; if (other.getIsSensitiveField() == null ^ this.getIsSensitiveField() == null) return false; if (other.getIsSensitiveField() != null && other.getIsSensitiveField().equals(this.getIsSensitiveField()) == false) return false; if (other.getConnectorSuppliedValues() == null ^ this.getConnectorSuppliedValues() == null) return false; if (other.getConnectorSuppliedValues() != null && other.getConnectorSuppliedValues().equals(this.getConnectorSuppliedValues()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getKey() == null) ? 0 : getKey().hashCode()); hashCode = prime * hashCode + ((getIsRequired() == null) ? 0 : getIsRequired().hashCode()); hashCode = prime * hashCode + ((getLabel() == null) ? 0 : getLabel().hashCode()); hashCode = prime * hashCode + ((getDescription() == null) ? 0 : getDescription().hashCode()); hashCode = prime * hashCode + ((getIsSensitiveField() == null) ? 0 : getIsSensitiveField().hashCode()); hashCode = prime * hashCode + ((getConnectorSuppliedValues() == null) ? 0 : getConnectorSuppliedValues().hashCode()); return hashCode; } @Override public AuthParameter clone() { try { return (AuthParameter) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.appflow.model.transform.AuthParameterMarshaller.getInstance().marshall(this, protocolMarshaller); } }
package io.variability.jhipster.service; import io.variability.jhipster.domain.Authority; import io.variability.jhipster.domain.User; import io.variability.jhipster.repository.AuthorityRepository; import io.variability.jhipster.repository.UserRepository; import io.variability.jhipster.security.AuthoritiesConstants; import io.variability.jhipster.security.SecurityUtils; import io.variability.jhipster.service.util.RandomUtil; import io.variability.jhipster.web.rest.vm.ManagedUserVM; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.scheduling.annotation.Scheduled; import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.stereotype.Service; import java.time.ZonedDateTime; import javax.inject.Inject; import java.util.*; /** * Service class for managing users. */ @Service public class UserService { private final Logger log = LoggerFactory.getLogger(UserService.class); @Inject private PasswordEncoder passwordEncoder; @Inject private UserRepository userRepository; @Inject private AuthorityRepository authorityRepository; public Optional<User> activateRegistration(String key) { log.debug("Activating user for activation key {}", key); return userRepository.findOneByActivationKey(key) .map(user -> { // activate given user for the registration key. user.setActivated(true); user.setActivationKey(null); userRepository.save(user); log.debug("Activated user: {}", user); return user; }); } public Optional<User> completePasswordReset(String newPassword, String key) { log.debug("Reset user password for reset key {}", key); return userRepository.findOneByResetKey(key) .filter(user -> { ZonedDateTime oneDayAgo = ZonedDateTime.now().minusHours(24); return user.getResetDate().isAfter(oneDayAgo); }) .map(user -> { user.setPassword(passwordEncoder.encode(newPassword)); user.setResetKey(null); user.setResetDate(null); userRepository.save(user); return user; }); } public Optional<User> requestPasswordReset(String mail) { return userRepository.findOneByEmail(mail) .filter(User::getActivated) .map(user -> { user.setResetKey(RandomUtil.generateResetKey()); user.setResetDate(ZonedDateTime.now()); userRepository.save(user); return user; }); } public User createUser(String login, String password, String firstName, String lastName, String email, String langKey) { User newUser = new User(); Authority authority = authorityRepository.findOne(AuthoritiesConstants.USER); Set<Authority> authorities = new HashSet<>(); String encryptedPassword = passwordEncoder.encode(password); newUser.setLogin(login); // new user gets initially a generated password newUser.setPassword(encryptedPassword); newUser.setFirstName(firstName); newUser.setLastName(lastName); newUser.setEmail(email); newUser.setLangKey(langKey); // new user is not active newUser.setActivated(false); // new user gets registration key newUser.setActivationKey(RandomUtil.generateActivationKey()); authorities.add(authority); newUser.setAuthorities(authorities); userRepository.save(newUser); log.debug("Created Information for User: {}", newUser); return newUser; } public User createUser(ManagedUserVM managedUserVM) { User user = new User(); user.setLogin(managedUserVM.getLogin()); user.setFirstName(managedUserVM.getFirstName()); user.setLastName(managedUserVM.getLastName()); user.setEmail(managedUserVM.getEmail()); if (managedUserVM.getLangKey() == null) { user.setLangKey("en"); // default language } else { user.setLangKey(managedUserVM.getLangKey()); } if (managedUserVM.getAuthorities() != null) { Set<Authority> authorities = new HashSet<>(); managedUserVM.getAuthorities().stream().forEach( authority -> authorities.add(authorityRepository.findOne(authority)) ); user.setAuthorities(authorities); } String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword()); user.setPassword(encryptedPassword); user.setResetKey(RandomUtil.generateResetKey()); user.setResetDate(ZonedDateTime.now()); user.setActivated(true); userRepository.save(user); log.debug("Created Information for User: {}", user); return user; } public void updateUser(String firstName, String lastName, String email, String langKey) { userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(u -> { u.setFirstName(firstName); u.setLastName(lastName); u.setEmail(email); u.setLangKey(langKey); userRepository.save(u); log.debug("Changed Information for User: {}", u); }); } public void updateUser(String id, String login, String firstName, String lastName, String email, boolean activated, String langKey, Set<String> authorities) { userRepository .findOneById(id) .ifPresent(u -> { u.setLogin(login); u.setFirstName(firstName); u.setLastName(lastName); u.setEmail(email); u.setActivated(activated); u.setLangKey(langKey); Set<Authority> managedAuthorities = u.getAuthorities(); managedAuthorities.clear(); authorities.stream().forEach( authority -> managedAuthorities.add(authorityRepository.findOne(authority)) ); userRepository.save(u); log.debug("Changed Information for User: {}", u); }); } public void deleteUser(String login) { userRepository.findOneByLogin(login).ifPresent(u -> { userRepository.delete(u); log.debug("Deleted User: {}", u); }); } public void changePassword(String password) { userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(u -> { String encryptedPassword = passwordEncoder.encode(password); u.setPassword(encryptedPassword); userRepository.save(u); log.debug("Changed password for User: {}", u); }); } public Optional<User> getUserWithAuthoritiesByLogin(String login) { return userRepository.findOneByLogin(login); } public User getUserWithAuthorities(String id) { User user = userRepository.findOne(id); return user; } public User getUserWithAuthorities() { User user = userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).get(); return user; } /** * Not activated users should be automatically deleted after 3 days. * <p> * This is scheduled to get fired everyday, at 01:00 (am). * </p> */ @Scheduled(cron = "0 0 1 * * ?") public void removeNotActivatedUsers() { ZonedDateTime now = ZonedDateTime.now(); List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(now.minusDays(3)); for (User user : users) { log.debug("Deleting not activated user {}", user.getLogin()); userRepository.delete(user); } } }
package ru.job4j.start; import ru.job4j.models.Comment; import ru.job4j.models.Item; /** * The class MenuTracker. * * @author Alexander Mezgin * @version 1.0 * @since 18.12.2016 */ public class MenuTracker { /** * private field input. */ private Input input; /** * private field tracker. */ private Tracker tracker; /** * private field arrayslength. */ private final int arrayslength = 8; /** * private field userAction. */ private UserAction[] userActions = new UserAction[arrayslength]; /** * private field position. */ private int position = 0; /** * Constructor for class MenuTracker. * * @param input input * @param tracker tracker */ public MenuTracker(Input input, Tracker tracker) { this.input = input; this.tracker = tracker; } /** * This method fill arrays of action. */ public void fillAction() { this.userActions[position++] = new AddItem("Add the new item."); this.userActions[position++] = new RemoveItem("Delete the item."); this.userActions[position++] = new EditItem("Edit the item."); this.userActions[position++] = new ShowItem("Show all item."); this.userActions[position++] = new FindByIdItem("Find item by id."); this.userActions[position++] = new FindByNameItem("Find item by name."); this.userActions[position++] = new FindByDescriptionItem("Find item by description."); this.userActions[position++] = new AddComment("Add comment in to item."); } /** * This method selected the action. * * @param key key */ public void select(int key) { this.userActions[key].execute(this.input, this.tracker); } /** * This method show the menu of programm. */ public void show() { for (UserAction action : userActions) { if (action != null) { System.out.println(action.info()); } } } /** * This method return a number of ranges users action. * * @return int[] range */ public int[] getRangeActions() { int[] range = new int[arrayslength]; for (int i = 0; i < arrayslength; i++) { range[i] = userActions[i].key(); } return range; } /** * The inner class AddItem. */ private class AddItem extends BaseAction { /** * The Constructor. * @param name applies name */ AddItem(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 0; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { String name = input.ask("Input the name of the item: "); String description = input.ask("Input the description of the item: "); tracker.addItem(new Item(name, description)); } } /** * The inner class RemoveItem. */ private class RemoveItem extends BaseAction { /** * The Constructor. * @param name applies name */ RemoveItem(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 1; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { String id = input.ask("Enter the id of the deleted item: "); tracker.removeItem(tracker.findById(id)); } } /** * The inner class EditItem. */ private class EditItem extends BaseAction { /** * The Constructor. * @param name applies name */ EditItem(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 2; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { String id = input.ask("Enter the id of the edited item: "); String name = input.ask("Input the new name of the item: "); String description = input.ask("Input the new description of the item: "); Item item = new Item(name, description); item.setId(id); tracker.editItem(item); } } /** * The inner class ShowItem. */ private class ShowItem extends BaseAction { /** * The Constructor. * @param name applies name */ ShowItem(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 3; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { tracker.getAllItem().forEach(System.out::println); } } /** * The inner class FindByIdItem. */ private class FindByIdItem extends BaseAction { /** * The Constructor. * @param name applies name */ FindByIdItem(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 4; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { String id = input.ask("Enter the id of the searched item: "); System.out.println(tracker.findById(id)); } } /** * The inner class FindByNameItem. */ private class FindByNameItem extends BaseAction { /** * The Constructor. * @param name applies name */ FindByNameItem(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 5; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { String name = input.ask("Enter the name of the searched item: "); System.out.println(tracker.findByName(name)); } } /** * The inner class FindByDescriptionItem. */ private class FindByDescriptionItem extends BaseAction { /** * The Constructor. * @param name applies name */ FindByDescriptionItem(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 6; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { String description = input.ask("Enter the description of the searched item: "); System.out.println(tracker.findByDescription(description)); } } /** * The inner class AddComment. */ private class AddComment extends BaseAction { /** * The Constructor. * @param name applies name */ AddComment(String name) { super(name); } /** * This method asks for the key on which the user performs an action. * * @return key */ @Override public int key() { final int k = 7; return k; } /** * This method performs the main action. * * @param input is input interface * @param tracker is base class */ @Override public void execute(Input input, Tracker tracker) { String id = input.ask("Enter the id of the item to which to add a comment: "); String comment = input.ask("Enter the comment: "); System.out.println(tracker.addComment(id, new Comment(comment))); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.asterix.metadata.utils; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.asterix.common.config.DatasetConfig.DatasetType; import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp; import org.apache.asterix.common.config.DatasetConfig.TransactionState; import org.apache.asterix.common.context.IStorageComponentProvider; import org.apache.asterix.common.exceptions.NoOpWarningCollector; import org.apache.asterix.external.api.ITypedAdapterFactory; import org.apache.asterix.external.indexing.ExternalFile; import org.apache.asterix.external.indexing.IndexingConstants; import org.apache.asterix.external.operators.ExternalDatasetIndexesAbortOperatorDescriptor; import org.apache.asterix.external.operators.ExternalDatasetIndexesCommitOperatorDescriptor; import org.apache.asterix.external.operators.ExternalDatasetIndexesRecoverOperatorDescriptor; import org.apache.asterix.external.operators.ExternalFilesIndexCreateOperatorDescriptor; import org.apache.asterix.external.operators.ExternalFilesIndexModificationOperatorDescriptor; import org.apache.asterix.external.operators.ExternalScanOperatorDescriptor; import org.apache.asterix.external.provider.AdapterFactoryProvider; import org.apache.asterix.external.util.ExternalDataConstants; import org.apache.asterix.metadata.MetadataManager; import org.apache.asterix.metadata.declared.MetadataProvider; import org.apache.asterix.metadata.entities.Dataset; import org.apache.asterix.metadata.entities.ExternalDatasetDetails; import org.apache.asterix.metadata.entities.Index; import org.apache.asterix.om.types.ARecordType; import org.apache.asterix.om.types.BuiltinType; import org.apache.asterix.om.types.IAType; import org.apache.asterix.runtime.utils.RuntimeUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint; import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper; import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException; import org.apache.hyracks.algebricks.common.utils.Pair; import org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy; import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory; import org.apache.hyracks.api.dataflow.value.RecordDescriptor; import org.apache.hyracks.api.exceptions.HyracksDataException; import org.apache.hyracks.api.exceptions.SourceLocation; import org.apache.hyracks.api.job.JobSpecification; import org.apache.hyracks.dataflow.std.file.IFileSplitProvider; import org.apache.hyracks.storage.am.common.api.IIndexBuilderFactory; import org.apache.hyracks.storage.am.common.build.IndexBuilderFactory; import org.apache.hyracks.storage.am.common.dataflow.IIndexDataflowHelperFactory; import org.apache.hyracks.storage.am.common.dataflow.IndexDataflowHelperFactory; import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor; import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory; import org.apache.hyracks.storage.common.IResourceFactory; import org.apache.hyracks.storage.common.IStorageManager; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; public class ExternalIndexingOperations { private static final Logger LOGGER = LogManager.getLogger(); public static final List<List<String>> FILE_INDEX_FIELD_NAMES = Collections.unmodifiableList(Collections.singletonList(Collections.singletonList(""))); public static final List<IAType> FILE_INDEX_FIELD_TYPES = Collections.unmodifiableList(Collections.singletonList(BuiltinType.ASTRING)); private ExternalIndexingOperations() { } public static boolean isIndexible(ExternalDatasetDetails ds) { String adapter = ds.getAdapter(); if (adapter.equalsIgnoreCase(ExternalDataConstants.ALIAS_HDFS_ADAPTER)) { return true; } return false; } public static boolean isRefereshActive(ExternalDatasetDetails ds) { return ds.getState() != TransactionState.COMMIT; } public static boolean isValidIndexName(String datasetName, String indexName) { return !datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX).equals(indexName); } public static int getRIDSize(Dataset dataset) { ExternalDatasetDetails dsd = (ExternalDatasetDetails) dataset.getDatasetDetails(); return IndexingConstants.getRIDSize(dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT)); } public static IBinaryComparatorFactory[] getComparatorFactories(Dataset dataset) { ExternalDatasetDetails dsd = (ExternalDatasetDetails) dataset.getDatasetDetails(); return IndexingConstants.getComparatorFactories(dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT)); } public static IBinaryComparatorFactory[] getBuddyBtreeComparatorFactories() { return IndexingConstants.getBuddyBtreeComparatorFactories(); } public static List<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset) throws AlgebricksException { ArrayList<ExternalFile> files = new ArrayList<>(); ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails(); try { // Create the file system object FileSystem fs = getFileSystemObject(datasetDetails.getProperties()); // Get paths of dataset String path = datasetDetails.getProperties().get(ExternalDataConstants.KEY_PATH); String[] paths = path.split(","); // Add fileStatuses to files for (String aPath : paths) { FileStatus[] fileStatuses = fs.listStatus(new Path(aPath)); for (int i = 0; i < fileStatuses.length; i++) { int nextFileNumber = files.size(); handleFile(dataset, files, fs, fileStatuses[i], nextFileNumber); } } // Close file system fs.close(); if (files.isEmpty()) { throw new AlgebricksException("File Snapshot retrieved from external file system is empty"); } return files; } catch (Exception e) { LOGGER.warn("Exception while trying to get snapshot from external system", e); throw new AlgebricksException("Unable to get list of HDFS files " + e); } } private static void handleFile(Dataset dataset, List<ExternalFile> files, FileSystem fs, FileStatus fileStatus, int nextFileNumber) throws IOException { if (fileStatus.isDirectory()) { listSubFiles(dataset, fs, fileStatus, files); } else { files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber, fileStatus.getPath().toUri().getPath(), new Date(fileStatus.getModificationTime()), fileStatus.getLen(), ExternalFilePendingOp.NO_OP)); } } /* list all files under the directory * src is expected to be a folder */ private static void listSubFiles(Dataset dataset, FileSystem srcFs, FileStatus src, List<ExternalFile> files) throws IOException { Path path = src.getPath(); FileStatus[] fileStatuses = srcFs.listStatus(path); for (int i = 0; i < fileStatuses.length; i++) { int nextFileNumber = files.size(); if (fileStatuses[i].isDirectory()) { listSubFiles(dataset, srcFs, fileStatuses[i], files); } else { files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber, fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i].getModificationTime()), fileStatuses[i].getLen(), ExternalFilePendingOp.NO_OP)); } } } public static FileSystem getFileSystemObject(Map<String, String> map) throws IOException { Configuration conf = new Configuration(); conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_URI, map.get(ExternalDataConstants.KEY_HDFS_URL).trim()); conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_CLASS, DistributedFileSystem.class.getName()); return FileSystem.get(conf); } public static JobSpecification buildFilesIndexCreateJobSpec(Dataset dataset, List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider) throws AlgebricksException { IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider(); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtil.getMergePolicyFactory(dataset, metadataProvider.getMetadataTxnContext()); ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first; Map<String, String> mergePolicyProperties = compactionInfo.second; Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider .getSplitProviderAndConstraints(dataset, IndexingConstants.getFilesIndexName(dataset.getDatasetName())); IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first; String fileIndexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName()); Index fileIndex = MetadataManager.INSTANCE.getIndex(metadataProvider.getMetadataTxnContext(), dataset.getDataverseName(), dataset.getDatasetName(), fileIndexName); ARecordType recordType = (ARecordType) metadataProvider.findType(dataset.getItemTypeDataverseName(), dataset.getItemTypeName()); IResourceFactory resourceFactory = dataset.getResourceFactory(metadataProvider, fileIndex, recordType, null, mergePolicyFactory, mergePolicyProperties); IIndexBuilderFactory indexBuilderFactory = new IndexBuilderFactory(storageComponentProvider.getStorageManager(), secondaryFileSplitProvider, resourceFactory, true); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), secondaryFileSplitProvider); ExternalFilesIndexCreateOperatorDescriptor externalFilesOp = new ExternalFilesIndexCreateOperatorDescriptor( spec, indexBuilderFactory, dataflowHelperFactory, externalFilesSnapshot); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp, secondarySplitsAndConstraint.second); spec.addRoot(externalFilesOp); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; } public static JobSpecification buildFilesIndexUpdateJobSpec(Dataset dataset, List<ExternalFile> externalFilesSnapshot, MetadataProvider metadataProvider) throws AlgebricksException { IStorageComponentProvider storageComponentProvider = metadataProvider.getStorageComponentProvider(); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider .getSplitProviderAndConstraints(dataset, IndexingConstants.getFilesIndexName(dataset.getDatasetName())); IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first; IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( storageComponentProvider.getStorageManager(), secondaryFileSplitProvider); ExternalFilesIndexModificationOperatorDescriptor externalFilesOp = new ExternalFilesIndexModificationOperatorDescriptor(spec, dataflowHelperFactory, externalFilesSnapshot); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp, secondarySplitsAndConstraint.second); spec.addRoot(externalFilesOp); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; } /** * This method create an indexing operator that index records in HDFS * * @param jobSpec * @param itemType * @param dataset * @param files * @param indexerDesc * @param sourceLoc * @return * @throws AlgebricksException * @throws HyracksDataException * @throws Exception */ private static Pair<ExternalScanOperatorDescriptor, AlgebricksPartitionConstraint> getIndexingOperator( MetadataProvider metadataProvider, JobSpecification jobSpec, IAType itemType, Dataset dataset, List<ExternalFile> files, RecordDescriptor indexerDesc, SourceLocation sourceLoc) throws HyracksDataException, AlgebricksException { ExternalDatasetDetails externalDatasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails(); Map<String, String> configuration = externalDatasetDetails.getProperties(); ITypedAdapterFactory adapterFactory = AdapterFactoryProvider.getIndexingAdapterFactory( metadataProvider.getApplicationContext().getServiceContext(), externalDatasetDetails.getAdapter(), configuration, (ARecordType) itemType, files, true, null, NoOpWarningCollector.INSTANCE); ExternalScanOperatorDescriptor scanOp = new ExternalScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory); scanOp.setSourceLocation(sourceLoc); return new Pair<>(scanOp, adapterFactory.getPartitionConstraint()); } public static Pair<ExternalScanOperatorDescriptor, AlgebricksPartitionConstraint> createExternalIndexingOp( JobSpecification spec, MetadataProvider metadataProvider, Dataset dataset, ARecordType itemType, RecordDescriptor indexerDesc, List<ExternalFile> files, SourceLocation sourceLoc) throws HyracksDataException, AlgebricksException { return getIndexingOperator(metadataProvider, spec, itemType, dataset, files == null ? MetadataManager.INSTANCE .getDatasetExternalFiles(metadataProvider.getMetadataTxnContext(), dataset) : files, indexerDesc, sourceLoc); } /** * At the end of this method, we expect to have 4 sets as follows: * metadataFiles should contain only the files that are appended in their original state * addedFiles should contain new files that has number assigned starting after the max original file number * deletedFiles should contain files that are no longer there in the file system * appendedFiles should have the new file information of existing files * The method should return false in case of zero delta * * @param dataset * @param metadataFiles * @param addedFiles * @param deletedFiles * @param appendedFiles * @return * @throws AlgebricksException */ public static boolean isDatasetUptodate(Dataset dataset, List<ExternalFile> metadataFiles, List<ExternalFile> addedFiles, List<ExternalFile> deletedFiles, List<ExternalFile> appendedFiles) throws AlgebricksException { boolean uptodate = true; int newFileNumber = metadataFiles.get(metadataFiles.size() - 1).getFileNumber() + 1; List<ExternalFile> fileSystemFiles = getSnapshotFromExternalFileSystem(dataset); // Loop over file system files < taking care of added files > for (ExternalFile fileSystemFile : fileSystemFiles) { boolean fileFound = false; Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator(); while (mdFilesIterator.hasNext()) { ExternalFile metadataFile = mdFilesIterator.next(); if (!fileSystemFile.getFileName().equals(metadataFile.getFileName())) { continue; } // Same file name if (fileSystemFile.getLastModefiedTime().equals(metadataFile.getLastModefiedTime())) { // Same timestamp if (fileSystemFile.getSize() == metadataFile.getSize()) { // Same size -> no op mdFilesIterator.remove(); fileFound = true; } else { // Different size -> append op metadataFile.setPendingOp(ExternalFilePendingOp.APPEND_OP); fileSystemFile.setPendingOp(ExternalFilePendingOp.APPEND_OP); appendedFiles.add(fileSystemFile); fileFound = true; uptodate = false; } } else { // Same file name, Different file mod date -> delete and add metadataFile.setPendingOp(ExternalFilePendingOp.DROP_OP); deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(), 0, metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile.getSize(), ExternalFilePendingOp.DROP_OP)); fileSystemFile.setPendingOp(ExternalFilePendingOp.ADD_OP); fileSystemFile.setFileNumber(newFileNumber); addedFiles.add(fileSystemFile); newFileNumber++; fileFound = true; uptodate = false; } if (fileFound) { break; } } if (!fileFound) { // File not stored previously in metadata -> pending add op fileSystemFile.setPendingOp(ExternalFilePendingOp.ADD_OP); fileSystemFile.setFileNumber(newFileNumber); addedFiles.add(fileSystemFile); newFileNumber++; uptodate = false; } } // Done with files from external file system -> metadata files now contain both deleted files and appended ones // first, correct number assignment to deleted and updated files for (ExternalFile deletedFile : deletedFiles) { deletedFile.setFileNumber(newFileNumber); newFileNumber++; } for (ExternalFile appendedFile : appendedFiles) { appendedFile.setFileNumber(newFileNumber); newFileNumber++; } // include the remaining deleted files Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator(); while (mdFilesIterator.hasNext()) { ExternalFile metadataFile = mdFilesIterator.next(); if (metadataFile.getPendingOp() == ExternalFilePendingOp.NO_OP) { metadataFile.setPendingOp(ExternalFilePendingOp.DROP_OP); deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(), newFileNumber, metadataFile.getFileName(), metadataFile.getLastModefiedTime(), metadataFile.getSize(), metadataFile.getPendingOp())); newFileNumber++; uptodate = false; } } return uptodate; } public static Dataset createTransactionDataset(Dataset dataset) { ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails(); ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(), originalDsd.getTimestamp(), TransactionState.BEGIN); return new Dataset(dataset.getDataverseName(), dataset.getDatasetName(), dataset.getItemTypeDataverseName(), dataset.getItemTypeName(), dataset.getNodeGroupName(), dataset.getCompactionPolicy(), dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(), DatasetType.EXTERNAL, dataset.getDatasetId(), dataset.getPendingOp()); } public static JobSpecification buildDropFilesIndexJobSpec(MetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException { String indexName = IndexingConstants.getFilesIndexName(dataset.getDatasetName()); JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider.getSplitProviderAndConstraints(dataset, indexName); IIndexDataflowHelperFactory dataflowHelperFactory = new IndexDataflowHelperFactory( metadataProvider.getStorageComponentProvider().getStorageManager(), splitsAndConstraint.first); IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec, dataflowHelperFactory); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop, splitsAndConstraint.second); spec.addRoot(btreeDrop); return spec; } public static JobSpecification buildFilesIndexUpdateOp(Dataset ds, List<ExternalFile> metadataFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles, MetadataProvider metadataProvider) throws AlgebricksException { ArrayList<ExternalFile> files = new ArrayList<>(); for (ExternalFile file : metadataFiles) { if (file.getPendingOp() == ExternalFilePendingOp.DROP_OP) { files.add(file); } else if (file.getPendingOp() == ExternalFilePendingOp.APPEND_OP) { for (ExternalFile appendedFile : appendedFiles) { if (appendedFile.getFileName().equals(file.getFileName())) { files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(), file.getFileNumber(), file.getFileName(), file.getLastModefiedTime(), appendedFile.getSize(), ExternalFilePendingOp.NO_OP)); } } } } for (ExternalFile file : addedFiles) { files.add(file); } Collections.sort(files); return buildFilesIndexUpdateJobSpec(ds, files, metadataProvider); } public static JobSpecification buildIndexUpdateOp(Dataset ds, Index index, List<ExternalFile> metadataFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles, MetadataProvider metadataProvider, SourceLocation sourceLoc) throws AlgebricksException { // Create files list ArrayList<ExternalFile> files = new ArrayList<>(); for (ExternalFile metadataFile : metadataFiles) { if (metadataFile.getPendingOp() != ExternalFilePendingOp.APPEND_OP) { files.add(metadataFile); } else { metadataFile.setPendingOp(ExternalFilePendingOp.NO_OP); files.add(metadataFile); } } // add new files for (ExternalFile file : addedFiles) { files.add(file); } // add appended files for (ExternalFile file : appendedFiles) { files.add(file); } return IndexUtil.buildSecondaryIndexLoadingJobSpec(ds, index, metadataProvider, files, sourceLoc); } public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager(); ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>(); AlgebricksPartitionConstraint constraints = null; for (Index index : indexes) { IFileSplitProvider indexSplitProvider; if (isValidIndexName(index.getDatasetName(), index.getIndexName())) { Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName()); indexSplitProvider = sAndConstraints.first; constraints = sAndConstraints.second; } else { indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first; } IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider); treeDataflowHelperFactories.add(indexDataflowHelperFactory); } ExternalDatasetIndexesCommitOperatorDescriptor op = new ExternalDatasetIndexesCommitOperatorDescriptor(spec, treeDataflowHelperFactories); spec.addRoot(op); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; } public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager(); ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>(); AlgebricksPartitionConstraint constraints = null; for (Index index : indexes) { IFileSplitProvider indexSplitProvider; if (isValidIndexName(index.getDatasetName(), index.getIndexName())) { Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName()); indexSplitProvider = sAndConstraints.first; constraints = sAndConstraints.second; } else { indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first; } IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider); treeDataflowHelperFactories.add(indexDataflowHelperFactory); } ExternalDatasetIndexesAbortOperatorDescriptor op = new ExternalDatasetIndexesAbortOperatorDescriptor(spec, treeDataflowHelperFactories); spec.addRoot(op); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; } public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, MetadataProvider metadataProvider) throws AlgebricksException { JobSpecification spec = RuntimeUtils.createJobSpecification(metadataProvider.getApplicationContext()); IStorageManager storageMgr = metadataProvider.getStorageComponentProvider().getStorageManager(); ArrayList<IIndexDataflowHelperFactory> treeDataflowHelperFactories = new ArrayList<>(); AlgebricksPartitionConstraint constraints = null; for (Index index : indexes) { IFileSplitProvider indexSplitProvider; if (isValidIndexName(index.getDatasetName(), index.getIndexName())) { Pair<IFileSplitProvider, AlgebricksPartitionConstraint> sAndConstraints = metadataProvider.getSplitProviderAndConstraints(ds, index.getIndexName()); indexSplitProvider = sAndConstraints.first; constraints = sAndConstraints.second; } else { indexSplitProvider = metadataProvider.getSplitProviderAndConstraints(ds, IndexingConstants.getFilesIndexName(ds.getDatasetName())).first; } IIndexDataflowHelperFactory indexDataflowHelperFactory = new IndexDataflowHelperFactory(storageMgr, indexSplitProvider); treeDataflowHelperFactories.add(indexDataflowHelperFactory); } ExternalDatasetIndexesRecoverOperatorDescriptor op = new ExternalDatasetIndexesRecoverOperatorDescriptor(spec, treeDataflowHelperFactories); spec.addRoot(op); AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op, constraints); spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy()); return spec; } public static boolean isFileIndex(Index index) { return index.getIndexName().equals(IndexingConstants.getFilesIndexName(index.getDatasetName())); } }
/* * Copyright 2012 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package io.netty.channel.socket.oio; import io.netty.channel.ChannelException; import io.netty.channel.ChannelMetadata; import io.netty.channel.ChannelOutboundBuffer; import io.netty.channel.oio.AbstractOioMessageChannel; import io.netty.channel.socket.ServerSocketChannel; import io.netty.util.internal.logging.InternalLogger; import io.netty.util.internal.logging.InternalLoggerFactory; import java.io.IOException; import java.net.InetSocketAddress; import java.net.ServerSocket; import java.net.Socket; import java.net.SocketAddress; import java.net.SocketTimeoutException; import java.util.List; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * {@link ServerSocketChannel} which accepts new connections and create the {@link OioSocketChannel}'s for them. * * This implementation use Old-Blocking-IO. */ public class OioServerSocketChannel extends AbstractOioMessageChannel implements ServerSocketChannel { private static final InternalLogger logger = InternalLoggerFactory.getInstance(OioServerSocketChannel.class); private static final ChannelMetadata METADATA = new ChannelMetadata(false); private static ServerSocket newServerSocket() { try { return new ServerSocket(); } catch (IOException e) { throw new ChannelException("failed to create a server socket", e); } } final ServerSocket socket; final Lock shutdownLock = new ReentrantLock(); private final OioServerSocketChannelConfig config; /** * Create a new instance with an new {@link Socket} */ public OioServerSocketChannel() { this(newServerSocket()); } /** * Create a new instance from the given {@link ServerSocket} * * @param socket the {@link ServerSocket} which is used by this instance */ public OioServerSocketChannel(ServerSocket socket) { super(null); if (socket == null) { throw new NullPointerException("socket"); } boolean success = false; try { socket.setSoTimeout(SO_TIMEOUT); success = true; } catch (IOException e) { throw new ChannelException( "Failed to set the server socket timeout.", e); } finally { if (!success) { try { socket.close(); } catch (IOException e) { if (logger.isWarnEnabled()) { logger.warn( "Failed to close a partially initialized socket.", e); } } } } this.socket = socket; config = new DefaultOioServerSocketChannelConfig(this, socket); } @Override public InetSocketAddress localAddress() { return (InetSocketAddress) super.localAddress(); } @Override public ChannelMetadata metadata() { return METADATA; } @Override public OioServerSocketChannelConfig config() { return config; } @Override public InetSocketAddress remoteAddress() { return null; } @Override public boolean isOpen() { return !socket.isClosed(); } @Override public boolean isActive() { return isOpen() && socket.isBound(); } @Override protected SocketAddress localAddress0() { return socket.getLocalSocketAddress(); } @Override protected void doBind(SocketAddress localAddress) throws Exception { socket.bind(localAddress, config.getBacklog()); } @Override protected void doClose() throws Exception { socket.close(); } @Override protected int doReadMessages(List<Object> buf) throws Exception { if (socket.isClosed()) { return -1; } try { Socket s = socket.accept(); try { buf.add(new OioSocketChannel(this, s)); return 1; } catch (Throwable t) { logger.warn("Failed to create a new channel from an accepted socket.", t); try { s.close(); } catch (Throwable t2) { logger.warn("Failed to close a socket.", t2); } } } catch (SocketTimeoutException e) { // Expected } return 0; } @Override protected void doWrite(ChannelOutboundBuffer in) throws Exception { throw new UnsupportedOperationException(); } @Override protected Object filterOutboundMessage(Object msg) throws Exception { throw new UnsupportedOperationException(); } @Override protected void doConnect( SocketAddress remoteAddress, SocketAddress localAddress) throws Exception { throw new UnsupportedOperationException(); } @Override protected SocketAddress remoteAddress0() { return null; } @Override protected void doDisconnect() throws Exception { throw new UnsupportedOperationException(); } @Override protected void setReadPending(boolean readPending) { super.setReadPending(readPending); } }
package de.tomgrill.gdxfirebase.bindings.googlemobileads; import apple.NSObject; import apple.coregraphics.struct.CGPoint; import apple.coregraphics.struct.CGRect; import apple.foundation.NSArray; import apple.foundation.NSCoder; import apple.foundation.NSDate; import apple.foundation.NSMethodSignature; import apple.foundation.NSSet; import apple.uikit.UITraitCollection; import apple.uikit.UIView; import de.tomgrill.gdxfirebase.bindings.googlemobileads.protocol.GADAdSizeDelegate; import de.tomgrill.gdxfirebase.bindings.googlemobileads.struct.GADAdSize; import org.moe.natj.c.ann.FunctionPtr; import org.moe.natj.c.ann.Variadic; import org.moe.natj.general.NatJ; import org.moe.natj.general.Pointer; import org.moe.natj.general.ann.ByValue; import org.moe.natj.general.ann.Generated; import org.moe.natj.general.ann.Library; import org.moe.natj.general.ann.Mapped; import org.moe.natj.general.ann.MappedReturn; import org.moe.natj.general.ann.NFloat; import org.moe.natj.general.ann.NInt; import org.moe.natj.general.ann.NUInt; import org.moe.natj.general.ann.Owned; import org.moe.natj.general.ann.Runtime; import org.moe.natj.general.ptr.VoidPtr; import org.moe.natj.objc.Class; import org.moe.natj.objc.ObjCRuntime; import org.moe.natj.objc.SEL; import org.moe.natj.objc.ann.ObjCBlock; import org.moe.natj.objc.ann.ObjCClassBinding; import org.moe.natj.objc.ann.ProtocolClassMethod; import org.moe.natj.objc.ann.Selector; import org.moe.natj.objc.map.ObjCObjectMapper; @Generated @Library("GoogleMobileAds") @Runtime(ObjCRuntime.class) @ObjCClassBinding public class GADSearchBannerView extends GADBannerView { static { NatJ.register(); } @Generated protected GADSearchBannerView(Pointer peer) { super(peer); } @Generated @Selector("accessInstanceVariablesDirectly") public static native boolean accessInstanceVariablesDirectly(); @Generated @Selector("adSizeDelegate") @MappedReturn(ObjCObjectMapper.class) public native GADAdSizeDelegate adSizeDelegate(); @Generated @Selector("addKeyframeWithRelativeStartTime:relativeDuration:animations:") public static native void addKeyframeWithRelativeStartTimeRelativeDurationAnimations( double frameStartTime, double frameDuration, @ObjCBlock(name = "call_addKeyframeWithRelativeStartTimeRelativeDurationAnimations") UIView.Block_addKeyframeWithRelativeStartTimeRelativeDurationAnimations animations); @Generated @Owned @Selector("alloc") public static native GADSearchBannerView alloc(); @Generated @Selector("allocWithZone:") @MappedReturn(ObjCObjectMapper.class) public static native Object allocWithZone(VoidPtr zone); @Generated @Selector("animateKeyframesWithDuration:delay:options:animations:completion:") public static native void animateKeyframesWithDurationDelayOptionsAnimationsCompletion( double duration, double delay, @NUInt long options, @ObjCBlock(name = "call_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_3") UIView.Block_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_3 animations, @ObjCBlock(name = "call_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_4") UIView.Block_animateKeyframesWithDurationDelayOptionsAnimationsCompletion_4 completion); @Generated @Selector("animateWithDuration:animations:") public static native void animateWithDurationAnimations( double duration, @ObjCBlock(name = "call_animateWithDurationAnimations") UIView.Block_animateWithDurationAnimations animations); @Generated @Selector("animateWithDuration:animations:completion:") public static native void animateWithDurationAnimationsCompletion( double duration, @ObjCBlock(name = "call_animateWithDurationAnimationsCompletion_1") UIView.Block_animateWithDurationAnimationsCompletion_1 animations, @ObjCBlock(name = "call_animateWithDurationAnimationsCompletion_2") UIView.Block_animateWithDurationAnimationsCompletion_2 completion); @Generated @Selector("animateWithDuration:delay:options:animations:completion:") public static native void animateWithDurationDelayOptionsAnimationsCompletion( double duration, double delay, @NUInt long options, @ObjCBlock(name = "call_animateWithDurationDelayOptionsAnimationsCompletion_3") UIView.Block_animateWithDurationDelayOptionsAnimationsCompletion_3 animations, @ObjCBlock(name = "call_animateWithDurationDelayOptionsAnimationsCompletion_4") UIView.Block_animateWithDurationDelayOptionsAnimationsCompletion_4 completion); @Generated @Selector("animateWithDuration:delay:usingSpringWithDamping:initialSpringVelocity:options:animations:completion:") public static native void animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion( double duration, double delay, @NFloat double dampingRatio, @NFloat double velocity, @NUInt long options, @ObjCBlock(name = "call_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_5") UIView.Block_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_5 animations, @ObjCBlock(name = "call_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_6") UIView.Block_animateWithDurationDelayUsingSpringWithDampingInitialSpringVelocityOptionsAnimationsCompletion_6 completion); @Generated @Selector("appearance") @MappedReturn(ObjCObjectMapper.class) public static native Object appearance(); @Generated @ProtocolClassMethod("appearance") @MappedReturn(ObjCObjectMapper.class) public Object _appearance() { return appearance(); } @Generated @Selector("appearanceForTraitCollection:") @MappedReturn(ObjCObjectMapper.class) public static native Object appearanceForTraitCollection( UITraitCollection trait); @Generated @ProtocolClassMethod("appearanceForTraitCollection") @MappedReturn(ObjCObjectMapper.class) public Object _appearanceForTraitCollection(UITraitCollection trait) { return appearanceForTraitCollection(trait); } @Generated @Variadic() @Deprecated @Selector("appearanceForTraitCollection:whenContainedIn:") @MappedReturn(ObjCObjectMapper.class) public static native Object appearanceForTraitCollectionWhenContainedIn( UITraitCollection trait, @Mapped(ObjCObjectMapper.class) Object ContainerClass, Object... varargs); @Generated @Deprecated @ProtocolClassMethod("appearanceForTraitCollectionWhenContainedIn") @MappedReturn(ObjCObjectMapper.class) public Object _appearanceForTraitCollectionWhenContainedIn( UITraitCollection trait, @Mapped(ObjCObjectMapper.class) Object ContainerClass, Object... varargs) { return appearanceForTraitCollectionWhenContainedIn(trait, ContainerClass, varargs); } @Generated @Selector("appearanceForTraitCollection:whenContainedInInstancesOfClasses:") @MappedReturn(ObjCObjectMapper.class) public static native Object appearanceForTraitCollectionWhenContainedInInstancesOfClasses( UITraitCollection trait, NSArray<?> containerTypes); @Generated @ProtocolClassMethod("appearanceForTraitCollectionWhenContainedInInstancesOfClasses") @MappedReturn(ObjCObjectMapper.class) public Object _appearanceForTraitCollectionWhenContainedInInstancesOfClasses( UITraitCollection trait, NSArray<?> containerTypes) { return appearanceForTraitCollectionWhenContainedInInstancesOfClasses( trait, containerTypes); } @Generated @Variadic() @Deprecated @Selector("appearanceWhenContainedIn:") @MappedReturn(ObjCObjectMapper.class) public static native Object appearanceWhenContainedIn( @Mapped(ObjCObjectMapper.class) Object ContainerClass, Object... varargs); @Generated @Deprecated @ProtocolClassMethod("appearanceWhenContainedIn") @MappedReturn(ObjCObjectMapper.class) public Object _appearanceWhenContainedIn( @Mapped(ObjCObjectMapper.class) Object ContainerClass, Object... varargs) { return appearanceWhenContainedIn(ContainerClass, varargs); } @Generated @Selector("appearanceWhenContainedInInstancesOfClasses:") @MappedReturn(ObjCObjectMapper.class) public static native Object appearanceWhenContainedInInstancesOfClasses( NSArray<?> containerTypes); @Generated @ProtocolClassMethod("appearanceWhenContainedInInstancesOfClasses") @MappedReturn(ObjCObjectMapper.class) public Object _appearanceWhenContainedInInstancesOfClasses( NSArray<?> containerTypes) { return appearanceWhenContainedInInstancesOfClasses(containerTypes); } @Generated @Selector("areAnimationsEnabled") public static native boolean areAnimationsEnabled(); @Generated @Selector("automaticallyNotifiesObserversForKey:") public static native boolean automaticallyNotifiesObserversForKey(String key); @Generated @Selector("beginAnimations:context:") public static native void beginAnimationsContext(String animationID, VoidPtr context); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:") public static native void cancelPreviousPerformRequestsWithTarget( @Mapped(ObjCObjectMapper.class) Object aTarget); @Generated @Selector("cancelPreviousPerformRequestsWithTarget:selector:object:") public static native void cancelPreviousPerformRequestsWithTargetSelectorObject( @Mapped(ObjCObjectMapper.class) Object aTarget, SEL aSelector, @Mapped(ObjCObjectMapper.class) Object anArgument); @Generated @Selector("classFallbacksForKeyedArchiver") public static native NSArray<String> classFallbacksForKeyedArchiver(); @Generated @Selector("classForKeyedUnarchiver") public static native Class classForKeyedUnarchiver(); @Generated @Selector("clearTextInputContextIdentifier:") public static native void clearTextInputContextIdentifier(String identifier); @Generated @Selector("commitAnimations") public static native void commitAnimations(); @Generated @Selector("debugDescription") public static native String debugDescription_static(); @Generated @Selector("description") public static native String description_static(); @Generated @Selector("hash") @NUInt public static native long hash_static(); @Generated @Selector("inheritedAnimationDuration") public static native double inheritedAnimationDuration(); @Generated @Selector("init") public native GADSearchBannerView init(); @Generated @Selector("initWithAdSize:") public native GADSearchBannerView initWithAdSize(@ByValue GADAdSize adSize); @Generated @Selector("initWithAdSize:origin:") public native GADSearchBannerView initWithAdSizeOrigin( @ByValue GADAdSize adSize, @ByValue CGPoint origin); @Generated @Selector("initWithCoder:") public native GADSearchBannerView initWithCoder(NSCoder aDecoder); @Generated @Selector("initWithFrame:") public native GADSearchBannerView initWithFrame(@ByValue CGRect frame); @Generated @Selector("instanceMethodForSelector:") @FunctionPtr(name = "call_instanceMethodForSelector_ret") public static native NSObject.Function_instanceMethodForSelector_ret instanceMethodForSelector( SEL aSelector); @Generated @Selector("instanceMethodSignatureForSelector:") public static native NSMethodSignature instanceMethodSignatureForSelector( SEL aSelector); @Generated @Selector("instancesRespondToSelector:") public static native boolean instancesRespondToSelector(SEL aSelector); @Generated @Selector("isSubclassOfClass:") public static native boolean isSubclassOfClass(Class aClass); @Generated @Selector("keyPathsForValuesAffectingValueForKey:") public static native NSSet<String> keyPathsForValuesAffectingValueForKey( String key); @Generated @Selector("layerClass") public static native Class layerClass(); @Generated @Owned @Selector("new") @MappedReturn(ObjCObjectMapper.class) public static native Object new_objc(); @Generated @Selector("performSystemAnimation:onViews:options:animations:completion:") public static native void performSystemAnimationOnViewsOptionsAnimationsCompletion( @NUInt long animation, NSArray<? extends UIView> views, @NUInt long options, @ObjCBlock(name = "call_performSystemAnimationOnViewsOptionsAnimationsCompletion_3") UIView.Block_performSystemAnimationOnViewsOptionsAnimationsCompletion_3 parallelAnimations, @ObjCBlock(name = "call_performSystemAnimationOnViewsOptionsAnimationsCompletion_4") UIView.Block_performSystemAnimationOnViewsOptionsAnimationsCompletion_4 completion); @Generated @Selector("performWithoutAnimation:") public static native void performWithoutAnimation( @ObjCBlock(name = "call_performWithoutAnimation") UIView.Block_performWithoutAnimation actionsWithoutAnimation); @Generated @Selector("requiresConstraintBasedLayout") public static native boolean requiresConstraintBasedLayout(); @Generated @Selector("resolveClassMethod:") public static native boolean resolveClassMethod(SEL sel); @Generated @Selector("resolveInstanceMethod:") public static native boolean resolveInstanceMethod(SEL sel); @Generated @Selector("setAdSizeDelegate:") public native void setAdSizeDelegate_unsafe( @Mapped(ObjCObjectMapper.class) GADAdSizeDelegate value); @Generated public void setAdSizeDelegate( @Mapped(ObjCObjectMapper.class) GADAdSizeDelegate value) { Object __old = adSizeDelegate(); if (value != null) { org.moe.natj.objc.ObjCRuntime.associateObjCObject(this, value); } setAdSizeDelegate_unsafe(value); if (__old != null) { org.moe.natj.objc.ObjCRuntime.dissociateObjCObject(this, __old); } } @Generated @Selector("setAnimationBeginsFromCurrentState:") public static native void setAnimationBeginsFromCurrentState( boolean fromCurrentState); @Generated @Selector("setAnimationCurve:") public static native void setAnimationCurve(@NInt long curve); @Generated @Selector("setAnimationDelay:") public static native void setAnimationDelay(double delay); @Generated @Selector("setAnimationDelegate:") public static native void setAnimationDelegate( @Mapped(ObjCObjectMapper.class) Object delegate); @Generated @Selector("setAnimationDidStopSelector:") public static native void setAnimationDidStopSelector(SEL selector); @Generated @Selector("setAnimationDuration:") public static native void setAnimationDuration_static(double duration); @Generated @Selector("setAnimationRepeatAutoreverses:") public static native void setAnimationRepeatAutoreverses( boolean repeatAutoreverses); @Generated @Selector("setAnimationRepeatCount:") public static native void setAnimationRepeatCount_static(float repeatCount); @Generated @Selector("setAnimationStartDate:") public static native void setAnimationStartDate(NSDate startDate); @Generated @Selector("setAnimationTransition:forView:cache:") public static native void setAnimationTransitionForViewCache( @NInt long transition, UIView view, boolean cache); @Generated @Selector("setAnimationWillStartSelector:") public static native void setAnimationWillStartSelector(SEL selector); @Generated @Selector("setAnimationsEnabled:") public static native void setAnimationsEnabled(boolean enabled); @Generated @Selector("setVersion:") public static native void setVersion(@NInt long aVersion); @Generated @Selector("superclass") public static native Class superclass_static(); @Generated @Selector("transitionFromView:toView:duration:options:completion:") public static native void transitionFromViewToViewDurationOptionsCompletion( UIView fromView, UIView toView, double duration, @NUInt long options, @ObjCBlock(name = "call_transitionFromViewToViewDurationOptionsCompletion") UIView.Block_transitionFromViewToViewDurationOptionsCompletion completion); @Generated @Selector("transitionWithView:duration:options:animations:completion:") public static native void transitionWithViewDurationOptionsAnimationsCompletion( UIView view, double duration, @NUInt long options, @ObjCBlock(name = "call_transitionWithViewDurationOptionsAnimationsCompletion_3") UIView.Block_transitionWithViewDurationOptionsAnimationsCompletion_3 animations, @ObjCBlock(name = "call_transitionWithViewDurationOptionsAnimationsCompletion_4") UIView.Block_transitionWithViewDurationOptionsAnimationsCompletion_4 completion); @Generated @Selector("userInterfaceLayoutDirectionForSemanticContentAttribute:") @NInt public static native long userInterfaceLayoutDirectionForSemanticContentAttribute( @NInt long attribute); @Generated @Selector("userInterfaceLayoutDirectionForSemanticContentAttribute:relativeToLayoutDirection:") @NInt public static native long userInterfaceLayoutDirectionForSemanticContentAttributeRelativeToLayoutDirection( @NInt long semanticContentAttribute, @NInt long layoutDirection); @Generated @Selector("version") @NInt public static native long version_static(); }
package org.sagebionetworks.repo.model.dbo.dao; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertThrows; import static org.junit.jupiter.api.Assertions.assertTrue; import java.time.Instant; import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.LinkedList; import java.util.List; import java.util.Set; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.sagebionetworks.repo.model.ACCESS_TYPE; import org.sagebionetworks.repo.model.AccessApproval; import org.sagebionetworks.repo.model.AccessApprovalDAO; import org.sagebionetworks.repo.model.AccessRequirement; import org.sagebionetworks.repo.model.AccessRequirementDAO; import org.sagebionetworks.repo.model.ApprovalState; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.Node; import org.sagebionetworks.repo.model.NodeDAO; import org.sagebionetworks.repo.model.UserGroup; import org.sagebionetworks.repo.model.UserGroupDAO; import org.sagebionetworks.repo.model.dataaccess.AccessorGroup; import org.sagebionetworks.repo.model.jdo.NodeTestUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit.jupiter.SpringExtension; import com.google.common.collect.Sets; @ExtendWith(SpringExtension.class) @ContextConfiguration(locations = { "classpath:jdomodels-test-context.xml" }) public class DBOAccessApprovalDAOImplTest { @Autowired UserGroupDAO userGroupDAO; @Autowired AccessRequirementDAO accessRequirementDAO; @Autowired AccessApprovalDAO accessApprovalDAO; @Autowired NodeDAO nodeDao; private UserGroup individualGroup = null; private UserGroup individualGroup2 = null; private Node node = null; private Node node2 = null; private AccessRequirement accessRequirement = null; private AccessRequirement accessRequirement2 = null; private AccessApproval accessApproval = null; private AccessApproval accessApproval2 = null; private AccessApproval accessApproval3 = null; private List<ACCESS_TYPE> participateAndDownload=null; private List<ACCESS_TYPE> downloadAccessType=null; private List<ACCESS_TYPE> updateAccessType=null; @BeforeEach public void setUp() throws Exception { accessApprovalDAO.clear(); accessRequirementDAO.clear(); individualGroup = new UserGroup(); individualGroup.setIsIndividual(true); individualGroup.setCreationDate(new Date()); individualGroup.setId(userGroupDAO.create(individualGroup).toString()); individualGroup2 = new UserGroup(); individualGroup2.setIsIndividual(true); individualGroup2.setCreationDate(new Date()); individualGroup2.setId(userGroupDAO.create(individualGroup2).toString()); if (node==null) { node = NodeTestUtils.createNew("foo", Long.parseLong(individualGroup.getId())); node.setId( nodeDao.createNew(node) ); }; if (node2==null) { node2 = NodeTestUtils.createNew("bar", Long.parseLong(individualGroup.getId())); node2.setId( nodeDao.createNew(node2) ); }; accessRequirement = DBOAccessRequirementDAOImplTest.newEntityAccessRequirement(individualGroup, node, "foo"); accessRequirement = accessRequirementDAO.create(accessRequirement); Long id = accessRequirement.getId(); assertNotNull(id); accessRequirement2 = DBOAccessRequirementDAOImplTest.newEntityAccessRequirement(individualGroup, node2, "bar"); accessRequirement2 = accessRequirementDAO.create(accessRequirement2); id = accessRequirement2.getId(); assertNotNull(id); if (participateAndDownload == null) { participateAndDownload = new ArrayList<ACCESS_TYPE>(); participateAndDownload.add(ACCESS_TYPE.DOWNLOAD); participateAndDownload.add(ACCESS_TYPE.PARTICIPATE); } if (downloadAccessType == null) { downloadAccessType= new ArrayList<ACCESS_TYPE>(); downloadAccessType.add(ACCESS_TYPE.DOWNLOAD); } if (updateAccessType == null) { updateAccessType= new ArrayList<ACCESS_TYPE>(); updateAccessType.add(ACCESS_TYPE.UPDATE); } } @AfterEach public void tearDown() throws Exception{ accessApprovalDAO.clear(); accessRequirementDAO.clear(); if (node!=null && nodeDao!=null) { nodeDao.delete(node.getId()); node = null; } if (node2!=null && nodeDao!=null) { nodeDao.delete(node2.getId()); node2 = null; } if (individualGroup != null) { userGroupDAO.delete(individualGroup.getId()); } if (individualGroup2 != null) { userGroupDAO.delete(individualGroup2.getId()); } } public static AccessApproval newAccessApproval(UserGroup principal, AccessRequirement ar) throws DatastoreException { AccessApproval accessApproval = new AccessApproval(); accessApproval.setCreatedBy(principal.getId()); accessApproval.setCreatedOn(new Date()); accessApproval.setModifiedBy(principal.getId()); accessApproval.setModifiedOn(new Date()); accessApproval.setAccessorId(principal.getId()); accessApproval.setRequirementId(ar.getId()); accessApproval.setRequirementVersion(ar.getVersionNumber()); accessApproval.setSubmitterId(principal.getId()); accessApproval.setState(ApprovalState.APPROVED); return accessApproval; } @Test public void testCRUD() throws Exception { // Create a new object accessApproval = newAccessApproval(individualGroup, accessRequirement); // Create it accessApproval = accessApprovalDAO.create(accessApproval); String id = accessApproval.getId().toString(); assertNotNull(id); assertNotNull(accessApproval.getEtag()); // test create again AccessApproval updated = accessApprovalDAO.create(accessApproval); accessApproval.setEtag(updated.getEtag()); assertEquals(accessApproval, updated); // Fetch it AccessApproval clone = accessApprovalDAO.get(id); assertNotNull(clone); assertEquals(accessApproval, clone); List<AccessApproval> ars = accessApprovalDAO.getActiveApprovalsForUser( accessRequirement.getId().toString(), individualGroup.getId().toString()); assertEquals(1, ars.size()); assertEquals(accessApproval, ars.iterator().next()); Set<String> requirementIds = accessApprovalDAO.getRequirementsUserHasApprovals( individualGroup.getId().toString(), Arrays.asList(accessRequirement.getId().toString(), "-1")); assertNotNull(requirementIds); assertEquals(1, requirementIds.size()); assertTrue(requirementIds.contains(accessRequirement.getId().toString())); assertTrue(accessApprovalDAO.hasApprovalsSubmittedBy( Sets.newHashSet(individualGroup.getId().toString()), individualGroup.getId(), accessRequirement.getId().toString())); // creating an approval is idempotent: // make a second one... accessApproval2 = accessApprovalDAO.create(newAccessApproval(individualGroup, accessRequirement)); ars = accessApprovalDAO.getActiveApprovalsForUser( accessRequirement.getId().toString(), individualGroup.getId().toString()); assertEquals(1, ars.size()); assertEquals(accessApproval2, ars.get(0)); // Delete it accessApprovalDAO.delete(id); assertFalse(accessApprovalDAO.hasApprovalsSubmittedBy( Sets.newHashSet(individualGroup.getId().toString()), individualGroup.getId(), accessRequirement.getId().toString())); } @Test public void testCreateRevokeAndRenewBatch() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); accessApprovalDAO.createOrUpdateBatch(Arrays.asList(accessApproval, accessApproval2)); accessApproval = accessApprovalDAO.getByPrimaryKey( accessApproval.getRequirementId(), accessApproval.getRequirementVersion(), accessApproval.getSubmitterId(), accessApproval.getAccessorId()); accessApproval2 = accessApprovalDAO.getByPrimaryKey( accessApproval2.getRequirementId(), accessApproval2.getRequirementVersion(), accessApproval2.getSubmitterId(), accessApproval2.getAccessorId()); // insert again accessApprovalDAO.createOrUpdateBatch(Arrays.asList(accessApproval, accessApproval2)); AccessApproval updated = accessApprovalDAO.getByPrimaryKey( accessApproval.getRequirementId(), accessApproval.getRequirementVersion(), accessApproval.getSubmitterId(), accessApproval.getAccessorId()); accessApproval.setEtag(updated.getEtag()); assertEquals(accessApproval, updated); AccessApproval updated2 = accessApprovalDAO.getByPrimaryKey( accessApproval2.getRequirementId(), accessApproval2.getRequirementVersion(), accessApproval2.getSubmitterId(), accessApproval2.getAccessorId()); accessApproval2.setEtag(updated2.getEtag()); assertEquals(accessApproval2, updated2); // revoke List<Long> approvals = accessApprovalDAO.listApprovalsBySubmitter( accessApproval.getRequirementId().toString(), accessApproval.getSubmitterId(), Arrays.asList(accessApproval.getAccessorId(), accessApproval2.getAccessorId()) ); accessApprovalDAO.revokeBatch(Long.valueOf(individualGroup2.getId()), approvals); updated = accessApprovalDAO.getByPrimaryKey( accessApproval.getRequirementId(), accessApproval.getRequirementVersion(), accessApproval.getSubmitterId(), accessApproval.getAccessorId()); assertEquals(ApprovalState.REVOKED, updated.getState()); // renew Date newExpirationDate = new Date(); Long newVersion = 9L; accessApproval.setExpiredOn(newExpirationDate); accessApproval.setRequirementVersion(newVersion); accessApprovalDAO.createOrUpdateBatch(Arrays.asList(accessApproval)); updated = accessApprovalDAO.getByPrimaryKey( accessApproval.getRequirementId(), newVersion, accessApproval.getSubmitterId(), accessApproval.getAccessorId()); assertEquals(newVersion, updated.getRequirementVersion()); assertEquals(newExpirationDate, updated.getExpiredOn()); // clean up accessApprovalDAO.delete(accessApproval.getId().toString()); accessApprovalDAO.delete(accessApproval2.getId().toString()); } @Test public void testListAccessorListAndRevokeGroup() { List<AccessorGroup> result = accessApprovalDAO.listAccessorGroup(accessRequirement.getId().toString(), individualGroup.getId(), null, 10L, 0L); assertNotNull(result); assertTrue(result.isEmpty()); // create some approvals accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); accessApproval2.setSubmitterId(individualGroup.getId()); accessApprovalDAO.createOrUpdateBatch(Arrays.asList(accessApproval, accessApproval2)); result = accessApprovalDAO.listAccessorGroup(accessRequirement.getId().toString(), individualGroup.getId(), null, 10L, 0L); assertNotNull(result); assertEquals(1, result.size()); AccessorGroup group = result.get(0); assertNotNull(group); assertEquals(accessRequirement.getId().toString(), group.getAccessRequirementId()); assertEquals(individualGroup.getId(), group.getSubmitterId()); assertTrue(group.getAccessorIds().contains(individualGroup.getId())); assertTrue(group.getAccessorIds().contains(individualGroup2.getId())); assertEquals(new Date(DBOAccessApprovalDAOImpl.DEFAULT_NOT_EXPIRED), group.getExpiredOn()); // revoke the group List<Long> accessors = accessApprovalDAO.listApprovalsBySubmitter(accessRequirement.getId().toString(), individualGroup.getId()); accessApprovalDAO.revokeBatch(Long.valueOf(individualGroup2.getId()), accessors); result = accessApprovalDAO.listAccessorGroup(accessRequirement.getId().toString(), individualGroup.getId(), null, 10L, 0L); assertNotNull(result); assertTrue(result.isEmpty()); // check each approval AccessApproval approval = accessApprovalDAO.getByPrimaryKey(accessRequirement.getId(), accessRequirement.getVersionNumber(), individualGroup.getId(), individualGroup.getId()); assertNotNull(approval); assertEquals(ApprovalState.REVOKED, approval.getState()); assertEquals(individualGroup2.getId(), approval.getModifiedBy()); AccessApproval approval2 = accessApprovalDAO.getByPrimaryKey(accessRequirement.getId(), accessRequirement.getVersionNumber(), individualGroup.getId(), individualGroup2.getId()); assertNotNull(approval2); assertEquals(ApprovalState.REVOKED, approval2.getState()); assertEquals(individualGroup2.getId(), approval2.getModifiedBy()); } @Test public void testConvertToList() { assertEquals(new LinkedList<String>(), DBOAccessApprovalDAOImpl.convertToList(null)); assertEquals(Arrays.asList("1"), DBOAccessApprovalDAOImpl.convertToList("1")); assertEquals(Arrays.asList("1","2"), DBOAccessApprovalDAOImpl.convertToList("1,2")); } @Test public void testBuildQuery() { assertEquals("SELECT REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON, GROUP_CONCAT(DISTINCT ACCESSOR_ID SEPARATOR ',') AS ACCESSOR_LIST" + " FROM ACCESS_APPROVAL" + " WHERE STATE = 'APPROVED'" + " GROUP BY REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON" + " ORDER BY EXPIRED_ON" + " LIMIT :LIMIT" + " OFFSET :OFFSET", DBOAccessApprovalDAOImpl.buildAccessorGroupQuery(null, null, null)); assertEquals("SELECT REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON, GROUP_CONCAT(DISTINCT ACCESSOR_ID SEPARATOR ',') AS ACCESSOR_LIST" + " FROM ACCESS_APPROVAL" + " WHERE STATE = 'APPROVED'" + " AND REQUIREMENT_ID = :REQUIREMENT_ID" + " GROUP BY REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON" + " ORDER BY EXPIRED_ON" + " LIMIT :LIMIT" + " OFFSET :OFFSET", DBOAccessApprovalDAOImpl.buildAccessorGroupQuery("1", null, null)); assertEquals("SELECT REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON, GROUP_CONCAT(DISTINCT ACCESSOR_ID SEPARATOR ',') AS ACCESSOR_LIST" + " FROM ACCESS_APPROVAL" + " WHERE STATE = 'APPROVED'" + " AND SUBMITTER_ID = :SUBMITTER_ID" + " GROUP BY REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON" + " ORDER BY EXPIRED_ON" + " LIMIT :LIMIT" + " OFFSET :OFFSET", DBOAccessApprovalDAOImpl.buildAccessorGroupQuery(null, "2", null)); assertEquals("SELECT REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON, GROUP_CONCAT(DISTINCT ACCESSOR_ID SEPARATOR ',') AS ACCESSOR_LIST" + " FROM ACCESS_APPROVAL" + " WHERE STATE = 'APPROVED'" + " AND EXPIRED_ON <> 0" + " AND EXPIRED_ON <= :EXPIRED_ON" + " GROUP BY REQUIREMENT_ID, SUBMITTER_ID, EXPIRED_ON" + " ORDER BY EXPIRED_ON" + " LIMIT :LIMIT" + " OFFSET :OFFSET", DBOAccessApprovalDAOImpl.buildAccessorGroupQuery(null, null, new Date())); } @Test public void testListExpiredApprovalsWithNoExpiredAfter() { Instant expiredAfter = null; int limit = 10; String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.listExpiredApprovals(expiredAfter, limit); }).getMessage(); assertEquals("expiredAfter is required.", message); } @Test public void testListExpiredApprovalsWithWrongLimit() { Instant expiredAfter = Instant.now(); String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.listExpiredApprovals(expiredAfter, 0); }).getMessage(); assertEquals("The limit must be greater than 0.", message); message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.listExpiredApprovals(expiredAfter, -1); }).getMessage(); assertEquals("The limit must be greater than 0.", message); } @Test public void testListExpiredApprovals() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); // Expire one approval Instant yesterday = Instant.now().minus(1, ChronoUnit.DAYS); accessApproval.setExpiredOn(Date.from(yesterday)); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); List<Long> expected = Arrays.asList(accessApproval.getId()); int limit = 10; // Call under test List<Long> result = accessApprovalDAO.listExpiredApprovals(yesterday, limit); assertEquals(expected, result); } @Test public void testListExpiredApprovalsWithLimit() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); // Expire one approval Instant yesterday = Instant.now().minus(1, ChronoUnit.DAYS); accessApproval.setExpiredOn(Date.from(yesterday)); accessApproval2.setExpiredOn(Date.from(yesterday)); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); List<Long> expected = Arrays.asList(accessApproval.getId()); int limit = 1; // Call under test List<Long> result = accessApprovalDAO.listExpiredApprovals(yesterday, limit); assertEquals(expected, result); } @Test public void testListExpiredApprovalsWithNoExpiration() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); List<Long> expected = Collections.emptyList(); Instant yesterday = Instant.now().minus(1, ChronoUnit.DAYS); int limit = 10; // Call under test List<Long> result = accessApprovalDAO.listExpiredApprovals(yesterday, limit); assertEquals(expected, result); } @Test public void testListExpiredApprovalsWithPastExpiration() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); Instant dayBeforeYesterday = Instant.now().minus(2, ChronoUnit.DAYS); accessApproval.setExpiredOn(Date.from(dayBeforeYesterday)); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); List<Long> expected = Collections.emptyList(); int limit = 10; Instant yesterday = Instant.now().minus(1, ChronoUnit.DAYS); // Call under test List<Long> result = accessApprovalDAO.listExpiredApprovals(yesterday, limit); assertEquals(expected, result); } @Test public void testListExpiredApprovalsWithAlreadyRevoked() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); Instant yesterday = Instant.now().minus(1, ChronoUnit.DAYS); accessApproval.setExpiredOn(Date.from(yesterday)); accessApproval2.setExpiredOn(Date.from(yesterday)); accessApproval2.setState(ApprovalState.REVOKED); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); List<Long> expected = Arrays.asList(accessApproval.getId()); int limit = 10; // Call under test List<Long> result = accessApprovalDAO.listExpiredApprovals(yesterday, limit); assertEquals(expected, result); } @Test public void testRevokeBatch() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); Long userId = Long.valueOf(individualGroup.getId()); List<Long> ids = Arrays.asList(accessApproval.getId(), accessApproval2.getId()); List<Long> expected = ids; // Call under test List<Long> result = accessApprovalDAO.revokeBatch(userId, ids); assertEquals(expected, result); // Verify the etag change assertNotEquals(accessApproval.getEtag(), accessApprovalDAO.get(accessApproval.getId().toString()).getEtag()); assertNotEquals(accessApproval2.getEtag(), accessApprovalDAO.get(accessApproval2.getId().toString()).getEtag()); } @Test public void testRevokeBatchWithNoUserId() { Long userId = null; List<Long> ids = Collections.emptyList(); String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.revokeBatch(userId, ids); }).getMessage(); assertEquals("userId is required.", message); } @Test public void testRevokeBatchWithNullBatch() { Long userId = Long.valueOf(individualGroup.getId()); List<Long> ids = null; String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.revokeBatch(userId, ids); }).getMessage(); assertEquals("ids is required.", message); } @Test public void testRevokeEmptyBatch() { Long userId = Long.valueOf(individualGroup.getId()); List<Long> ids = Collections.emptyList(); // Call under test List<Long> result = accessApprovalDAO.revokeBatch(userId, ids); assertEquals(Collections.emptyList(), result); } @Test public void testRevokeBatchWithAlreadyRevoked() { accessApproval = newAccessApproval(individualGroup, accessRequirement); accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); accessApproval.setState(ApprovalState.REVOKED); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); Long userId = Long.valueOf(individualGroup.getId()); List<Long> ids = Arrays.asList(accessApproval.getId(), accessApproval2.getId()); List<Long> expected = Arrays.asList(accessApproval2.getId()); // Call under test List<Long> result = accessApprovalDAO.revokeBatch(userId, ids); assertEquals(expected, result); } @Test public void testListBySubmitterWithNoAccessRequirement() { String accessRequirementId = null; String submitter = individualGroup.getId(); String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.listApprovalsBySubmitter(accessRequirementId, submitter); }).getMessage(); assertEquals("accessRequirementId is required.", message); } @Test public void testListBySubmitterWithNoSubmitter() { String accessRequirementId = accessRequirement.getId().toString(); String submitter = null; String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.listApprovalsBySubmitter(accessRequirementId, submitter); }).getMessage(); assertEquals("submitterId is required.", message); } @Test public void testListBySubmitter() { accessApproval = newAccessApproval(individualGroup, accessRequirement); // Different submitter accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); // Different AR accessApproval3 = newAccessApproval(individualGroup, accessRequirement2); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); accessApproval3 = accessApprovalDAO.create(accessApproval3); String accessRequirementId = accessRequirement.getId().toString(); String submitterId = individualGroup.getId(); List<Long> expected = Arrays.asList(accessApproval.getId()); List<Long> result = accessApprovalDAO.listApprovalsBySubmitter(accessRequirementId, submitterId); assertEquals(expected, result); } @Test public void testListBySubmitterWithRevoked() { accessApproval = newAccessApproval(individualGroup, accessRequirement); // Same submitter but REVOKED state accessApproval2 = newAccessApproval(individualGroup, accessRequirement); accessApproval2.setAccessorId(individualGroup2.getId()); accessApproval2.setState(ApprovalState.REVOKED); // Different AR accessApproval3 = newAccessApproval(individualGroup, accessRequirement2); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); accessApproval3 = accessApprovalDAO.create(accessApproval3); String accessRequirementId = accessRequirement.getId().toString(); String submitterId = individualGroup.getId(); List<Long> expected = Arrays.asList(accessApproval.getId()); List<Long> result = accessApprovalDAO.listApprovalsBySubmitter(accessRequirementId, submitterId); assertEquals(expected, result); } @Test public void testListBySubmitterAndAccessors() { accessApproval = newAccessApproval(individualGroup, accessRequirement); // Different submitter accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); // Different AR accessApproval3 = newAccessApproval(individualGroup, accessRequirement2); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); accessApproval3 = accessApprovalDAO.create(accessApproval3); String accessRequirementId = accessRequirement.getId().toString(); String submitterId = individualGroup.getId(); List<String> accessorIds = Arrays.asList(individualGroup.getId()); List<Long> expected = Arrays.asList(accessApproval.getId()); List<Long> result = accessApprovalDAO.listApprovalsBySubmitter(accessRequirementId, submitterId, accessorIds); assertEquals(expected, result); } @Test public void testListBySubmitterAndAccessorsWithEmptyAccessors() { accessApproval = newAccessApproval(individualGroup, accessRequirement); // Different submitter accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); // Different AR accessApproval3 = newAccessApproval(individualGroup, accessRequirement2); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); accessApproval3 = accessApprovalDAO.create(accessApproval3); String accessRequirementId = accessRequirement.getId().toString(); String submitterId = individualGroup.getId(); List<String> accessorIds = Collections.emptyList(); List<Long> expected = Collections.emptyList(); List<Long> result = accessApprovalDAO.listApprovalsBySubmitter(accessRequirementId, submitterId, accessorIds); assertEquals(expected, result); } @Test public void testListBySubmitterAndAccessorsWithRevoked() { accessApproval = newAccessApproval(individualGroup, accessRequirement); // Same submitter, but REVOKED accessApproval2 = newAccessApproval(individualGroup, accessRequirement); accessApproval2.setAccessorId(individualGroup2.getId()); accessApproval2.setState(ApprovalState.REVOKED); // Different AR accessApproval3 = newAccessApproval(individualGroup, accessRequirement2); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); accessApproval3 = accessApprovalDAO.create(accessApproval3); String accessRequirementId = accessRequirement.getId().toString(); String submitterId = individualGroup.getId(); List<String> accessorIds = Arrays.asList(individualGroup.getId(), individualGroup2.getId()); List<Long> expected = Arrays.asList(accessApproval.getId()); List<Long> result = accessApprovalDAO.listApprovalsBySubmitter(accessRequirementId, submitterId, accessorIds); assertEquals(expected, result); } @Test public void testListByAccessorWithNoAccessRequirement() { String accessRequirementId = null; String accessor = individualGroup.getId(); String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.listApprovalsByAccessor(accessRequirementId, accessor); }).getMessage(); assertEquals("accessRequirementId is required.", message); } @Test public void testListByAccessorWithNoSubmitter() { String accessRequirementId = accessRequirement.getId().toString(); String accessor = null; String message = assertThrows(IllegalArgumentException.class, () -> { // Call under test accessApprovalDAO.listApprovalsByAccessor(accessRequirementId, accessor); }).getMessage(); assertEquals("accessorId is required.", message); } @Test public void testListByAccessor() { accessApproval = newAccessApproval(individualGroup, accessRequirement); // Different accessor accessApproval2 = newAccessApproval(individualGroup2, accessRequirement); // Different AR accessApproval3 = newAccessApproval(individualGroup, accessRequirement2); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); accessApproval3 = accessApprovalDAO.create(accessApproval3); String accessRequirementId = accessRequirement.getId().toString(); String accessorId = individualGroup.getId(); List<Long> expected = Arrays.asList(accessApproval.getId()); List<Long> result = accessApprovalDAO.listApprovalsByAccessor(accessRequirementId, accessorId); assertEquals(expected, result); } @Test public void testListByAccessorWithRevoked() { accessApproval = newAccessApproval(individualGroup, accessRequirement); // Same accessor but REVOKED accessApproval2 = newAccessApproval(individualGroup, accessRequirement); accessApproval2.setSubmitterId(individualGroup2.getId()); accessApproval2.setState(ApprovalState.REVOKED); // Different AR accessApproval3 = newAccessApproval(individualGroup, accessRequirement2); accessApproval = accessApprovalDAO.create(accessApproval); accessApproval2 = accessApprovalDAO.create(accessApproval2); accessApproval3 = accessApprovalDAO.create(accessApproval3); String accessRequirementId = accessRequirement.getId().toString(); String accessorId = individualGroup.getId(); List<Long> expected = Arrays.asList(accessApproval.getId()); List<Long> result = accessApprovalDAO.listApprovalsByAccessor(accessRequirementId, accessorId); assertEquals(expected, result); } @Test public void testHasSubmmiterApproval() { Instant expireAfter = Instant.now(); // An approval without an expiration AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement); ap1 = accessApprovalDAO.create(ap1); boolean expected = true; boolean result = accessApprovalDAO.hasSubmitterApproval(accessRequirement.getId().toString(), individualGroup.getId(), expireAfter); assertEquals(expected, result); } @Test public void testHasSubmmiterApprovalWithAccessor() { Instant expireAfter = Instant.now(); // An approval with no expiration, but the user is not the submitter AccessApproval ap1 = newAccessApproval(individualGroup2, accessRequirement); ap1.setAccessorId(individualGroup.getId()); ap1 = accessApprovalDAO.create(ap1); boolean expected = false; boolean result = accessApprovalDAO.hasSubmitterApproval(accessRequirement.getId().toString(), individualGroup.getId(), expireAfter); assertEquals(expected, result); } @Test public void testHasSubmmiterApprovalWithExpireAfter() { Instant expireAfter = Instant.now(); Instant nextDay = expireAfter.plus(1, ChronoUnit.DAYS); // An approval that expires the day after AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement); ap1.setExpiredOn(Date.from(nextDay)); ap1 = accessApprovalDAO.create(ap1); boolean expected = true; boolean result = accessApprovalDAO.hasSubmitterApproval(accessRequirement.getId().toString(), individualGroup.getId(), expireAfter); assertEquals(expected, result); } @Test public void testHasSubmmiterApprovalWithExpireBefore() { Instant expireAfter = Instant.now(); Instant previousDay = expireAfter.minus(1, ChronoUnit.DAYS); // An approval that expired the previous day AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement); ap1.setExpiredOn(Date.from(previousDay)); ap1 = accessApprovalDAO.create(ap1); boolean expected = false; boolean result = accessApprovalDAO.hasSubmitterApproval(accessRequirement.getId().toString(), individualGroup.getId(), expireAfter); assertEquals(expected, result); } @Test public void testHasSubmmiterApprovalWithDifferentRequirement() { Instant expireAfter = Instant.now(); // An approval that does not expire but is for a different requirement AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement2); ap1 = accessApprovalDAO.create(ap1); boolean expected = false; boolean result = accessApprovalDAO.hasSubmitterApproval(accessRequirement.getId().toString(), individualGroup.getId(), expireAfter); assertEquals(expected, result); } @Test public void testHasAccessorApproval() { AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement); ap1 = accessApprovalDAO.create(ap1); boolean expected = true; boolean result = accessApprovalDAO.hasAccessorApproval(ap1.getRequirementId().toString(), ap1.getAccessorId()); assertEquals(expected, result); } @Test public void testHasAccessorApprovalWithRevoked() { // A REVOKED approval AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement); ap1.setState(ApprovalState.REVOKED); ap1 = accessApprovalDAO.create(ap1); boolean expected = false; boolean result = accessApprovalDAO.hasAccessorApproval(ap1.getRequirementId().toString(), ap1.getAccessorId()); assertEquals(expected, result); } @Test public void testHasAccessorApprovalWithDifferentRequirement() { // An approval, but a different requirment AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement2); ap1 = accessApprovalDAO.create(ap1); boolean expected = false; boolean result = accessApprovalDAO.hasAccessorApproval(accessRequirement.getId().toString(), ap1.getAccessorId()); assertEquals(expected, result); } @Test public void testHasAccessorApprovalWithDifferentAccessor() { // An approval, but a different requirement AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement); ap1.setAccessorId(individualGroup2.getId()); ap1 = accessApprovalDAO.create(ap1); boolean expected = false; boolean result = accessApprovalDAO.hasAccessorApproval(ap1.getRequirementId().toString(), individualGroup.getId()); assertEquals(expected, result); } @Test public void testHasAccessorApprovalWithApprovedAndRevoked() { // A REVOKED approval AccessApproval ap1 = newAccessApproval(individualGroup, accessRequirement); ap1.setState(ApprovalState.REVOKED); // An approval on the same requirement, different submitter AccessApproval ap2 = newAccessApproval(individualGroup2, accessRequirement); ap2.setAccessorId(individualGroup.getId()); ap1 = accessApprovalDAO.create(ap1); ap2 = accessApprovalDAO.create(ap2); boolean expected = true; boolean result = accessApprovalDAO.hasAccessorApproval(ap1.getRequirementId().toString(), ap1.getAccessorId()); assertEquals(expected, result); } }
package org.apache.archiva.webdav; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ import org.apache.archiva.metadata.model.facets.AuditEvent; import org.apache.archiva.repository.content.LayoutException; import org.apache.archiva.repository.storage.RepositoryStorage; import org.apache.archiva.repository.storage.StorageAsset; import org.apache.archiva.metadata.audit.AuditListener; import org.apache.archiva.scheduler.ArchivaTaskScheduler; import org.apache.archiva.scheduler.repository.model.RepositoryArchivaTaskScheduler; import org.apache.archiva.scheduler.repository.model.RepositoryTask; import org.apache.archiva.webdav.util.IndexWriter; import org.apache.archiva.webdav.util.MimeTypes; import org.apache.commons.io.IOUtils; import org.apache.jackrabbit.util.Text; import org.apache.jackrabbit.webdav.DavException; import org.apache.jackrabbit.webdav.DavResource; import org.apache.jackrabbit.webdav.DavResourceFactory; import org.apache.jackrabbit.webdav.DavResourceIterator; import org.apache.jackrabbit.webdav.DavResourceIteratorImpl; import org.apache.jackrabbit.webdav.DavResourceLocator; import org.apache.jackrabbit.webdav.DavServletResponse; import org.apache.jackrabbit.webdav.DavSession; import org.apache.jackrabbit.webdav.MultiStatusResponse; import org.apache.jackrabbit.webdav.io.InputContext; import org.apache.jackrabbit.webdav.io.OutputContext; import org.apache.jackrabbit.webdav.lock.ActiveLock; import org.apache.jackrabbit.webdav.lock.LockInfo; import org.apache.jackrabbit.webdav.lock.LockManager; import org.apache.jackrabbit.webdav.lock.Scope; import org.apache.jackrabbit.webdav.lock.Type; import org.apache.jackrabbit.webdav.property.DavProperty; import org.apache.jackrabbit.webdav.property.DavPropertyName; import org.apache.jackrabbit.webdav.property.DavPropertyNameSet; import org.apache.jackrabbit.webdav.property.DavPropertySet; import org.apache.jackrabbit.webdav.property.DefaultDavProperty; import org.apache.jackrabbit.webdav.property.ResourceType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.servlet.http.HttpServletResponse; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardOpenOption; import java.time.format.DateTimeFormatter; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.stream.Collectors; /** */ public class ArchivaDavResource implements DavResource { public static final String HIDDEN_PATH_PREFIX = "."; private final ArchivaDavResourceLocator locator; private final DavResourceFactory factory; // private final Path localResource; private final String logicalResource; private DavPropertySet properties = null; private LockManager lockManager; private final DavSession session; private String remoteAddr; private final RepositoryStorage repositoryStorage; private final MimeTypes mimeTypes; private List<AuditListener> auditListeners; private String principal; public static final String COMPLIANCE_CLASS = "1, 2"; private final ArchivaTaskScheduler<RepositoryTask> scheduler; private Logger log = LoggerFactory.getLogger( ArchivaDavResource.class ); private StorageAsset asset; public ArchivaDavResource( StorageAsset localResource, String logicalResource, RepositoryStorage repositoryStorage, DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler) throws LayoutException { // this.localResource = Paths.get( localResource ); this.asset = localResource; this.logicalResource = logicalResource; this.locator = locator; this.factory = factory; this.session = session; // TODO: push into locator as well as moving any references out of the resource factory this.repositoryStorage = repositoryStorage; // TODO: these should be pushed into the repository layer, along with the physical file operations in this class this.mimeTypes = mimeTypes; this.auditListeners = auditListeners; this.scheduler = scheduler; } public ArchivaDavResource( StorageAsset localResource, String logicalResource, RepositoryStorage repositoryStorage, String remoteAddr, String principal, DavSession session, ArchivaDavResourceLocator locator, DavResourceFactory factory, MimeTypes mimeTypes, List<AuditListener> auditListeners, RepositoryArchivaTaskScheduler scheduler) throws LayoutException { this( localResource, logicalResource, repositoryStorage, session, locator, factory, mimeTypes, auditListeners, scheduler ); this.remoteAddr = remoteAddr; this.principal = principal; } @Override public String getComplianceClass() { return COMPLIANCE_CLASS; } @Override public String getSupportedMethods() { return METHODS; } @Override public boolean exists() { return asset.exists(); } @Override public boolean isCollection() { return asset.isContainer(); } @Override public String getDisplayName() { String resPath = getResourcePath(); return ( resPath != null ) ? Text.getName( resPath ) : resPath; } @Override public DavResourceLocator getLocator() { return locator; } @Override public String getResourcePath() { return locator.getResourcePath(); } @Override public String getHref() { return locator.getHref( isCollection() ); } @Override public long getModificationTime() { return asset.getModificationTime().toEpochMilli(); } @Override public void spool( OutputContext outputContext ) throws IOException { if ( !isCollection() ) { outputContext.setContentLength( asset.getSize()); outputContext.setContentType( mimeTypes.getMimeType( asset.getName() ) ); } if ( !isCollection() && outputContext.hasStream() ) { repositoryStorage.consumeData( asset, is -> {copyStream(is, outputContext.getOutputStream());}, true ); } else if ( outputContext.hasStream() ) { IndexWriter writer = new IndexWriter( asset, logicalResource ); writer.write( outputContext ); } } private void copyStream(InputStream is, OutputStream os) throws RuntimeException { try { IOUtils.copy(is, os); } catch ( IOException e ) { throw new RuntimeException( "Copy failed "+e.getMessage(), e ); } } @Override public DavPropertyName[] getPropertyNames() { return getProperties().getPropertyNames(); } @Override public DavProperty getProperty( DavPropertyName name ) { return getProperties().get( name ); } @Override public DavPropertySet getProperties() { return initProperties(); } @Override public void setProperty( DavProperty property ) throws DavException { } @Override public void removeProperty( DavPropertyName propertyName ) throws DavException { } public MultiStatusResponse alterProperties( DavPropertySet setProperties, DavPropertyNameSet removePropertyNames ) throws DavException { return null; } @SuppressWarnings("unchecked") @Override public MultiStatusResponse alterProperties( List changeList ) throws DavException { return null; } @Override public DavResource getCollection() { DavResource parent = null; if ( getResourcePath() != null && !getResourcePath().equals( "/" ) ) { String parentPath = Text.getRelativeParent( getResourcePath(), 1 ); if ( parentPath.equals( "" ) ) { parentPath = "/"; } DavResourceLocator parentloc = locator.getFactory().createResourceLocator( locator.getPrefix(), parentPath ); try { parent = factory.createResource( parentloc, session ); } catch ( DavException e ) { // should not occur } } return parent; } @Override public void addMember( DavResource resource, InputContext inputContext ) throws DavException { // Path localFile = localResource.resolve( resource.getDisplayName() ); boolean exists = asset.exists(); final String newPath = asset.getPath()+"/"+resource.getDisplayName(); if ( isCollection() && inputContext.hasStream() ) // New File { Path tempFile = null; try { tempFile = Files.createTempFile( "archiva_upload","dat" ); try(OutputStream os = Files.newOutputStream( tempFile, StandardOpenOption.CREATE )) { IOUtils.copy( inputContext.getInputStream( ), os ); } long expectedContentLength = inputContext.getContentLength(); long actualContentLength = 0; try { actualContentLength = Files.size(tempFile); } catch ( IOException e ) { log.error( "Could not get length of file {}: {}", tempFile, e.getMessage(), e ); } // length of -1 is given for a chunked request or unknown length, in which case we accept what was uploaded if ( expectedContentLength >= 0 && expectedContentLength != actualContentLength ) { String msg = "Content Header length was " + expectedContentLength + " but was " + actualContentLength; log.debug( "Upload failed: {}", msg ); throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg ); } StorageAsset member = repositoryStorage.addAsset( newPath, false ); member.create(); member.replaceDataFromFile( tempFile ); } catch ( IOException e ) { throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e ); } finally { if (tempFile!=null) { try { Files.deleteIfExists( tempFile ); } catch ( IOException e ) { log.error("Could not delete temporary file {}", tempFile); } } } // queueRepositoryTask( asset ); log.debug( "File '{}{}(current user '{}')", resource.getDisplayName(), ( exists ? "' modified " : "' created " ), this.principal ); // triggerAuditEvent( resource, exists ? AuditEvent.MODIFY_FILE : AuditEvent.CREATE_FILE ); } else if ( !inputContext.hasStream() && isCollection() ) // New directory { try { StorageAsset member = repositoryStorage.addAsset( newPath, true ); member.create(); } catch ( IOException e ) { log.error("Could not create directory {}: {}", newPath, e.getMessage(), e); } log.debug( "Directory '{}' (current user '{}')", resource.getDisplayName(), this.principal ); triggerAuditEvent( resource, AuditEvent.CREATE_DIR ); } else { String msg = "Could not write member " + resource.getResourcePath() + " at " + getResourcePath() + " as this is not a DAV collection"; log.debug( msg ); throw new DavException( HttpServletResponse.SC_BAD_REQUEST, msg ); } } public StorageAsset getAsset() { return asset; } @Override public DavResourceIterator getMembers() { List<DavResource> list; if ( exists() && isCollection() ) { list = asset.list().stream().filter( m -> !m.getName().startsWith( HIDDEN_PATH_PREFIX ) ) .map(m -> { String path = locator.getResourcePath( ) + '/' + m.getName(); DavResourceLocator resourceLocator = locator.getFactory( ).createResourceLocator( locator.getPrefix( ), path ); try { return factory.createResource( resourceLocator, session ); } catch ( DavException e ) { return null; } }).filter( Objects::nonNull ).collect( Collectors.toList()); } else { list = Collections.emptyList( ); } return new DavResourceIteratorImpl( list ); } @Override public void removeMember( DavResource member ) throws DavException { StorageAsset resource = checkDavResourceIsArchivaDavResource( member ).getAsset( ); if ( resource.exists() ) { try { if ( resource.isContainer() ) { repositoryStorage.removeAsset( resource ); triggerAuditEvent( member, AuditEvent.REMOVE_DIR ); } else { repositoryStorage.removeAsset( resource ); triggerAuditEvent( member, AuditEvent.REMOVE_FILE ); } log.debug( "{}{}' removed (current user '{}')", ( resource.isContainer() ? "Directory '" : "File '" ), member.getDisplayName(), this.principal ); } catch ( IOException e ) { throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR ); } } else { throw new DavException( HttpServletResponse.SC_NOT_FOUND ); } } private void triggerAuditEvent( DavResource member, String action ) throws DavException { String path = logicalResource + "/" + member.getDisplayName(); ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( member ); AuditEvent auditEvent = new AuditEvent( locator.getRepositoryId(), resource.principal, path, action ); auditEvent.setRemoteIP( resource.remoteAddr ); for ( AuditListener listener : auditListeners ) { listener.auditEvent( auditEvent ); } } @Override public void move( DavResource destination ) throws DavException { if ( !exists() ) { throw new DavException( HttpServletResponse.SC_NOT_FOUND, "Resource to copy does not exist." ); } try { ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination ); if ( isCollection() ) { this.asset = repositoryStorage.moveAsset( asset, destination.getResourcePath() ); triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_DIRECTORY ); } else { this.asset = repositoryStorage.moveAsset( asset, destination.getResourcePath() ); triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.MOVE_FILE ); } log.debug( "{}{}' moved to '{}' (current user '{}')", ( isCollection() ? "Directory '" : "File '" ), asset.getPath(), destination, this.principal ); } catch ( IOException e ) { throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e ); } } @Override public void copy( DavResource destination, boolean shallow ) throws DavException { if ( !exists() ) { throw new DavException( HttpServletResponse.SC_NOT_FOUND, "Resource to copy does not exist." ); } if ( shallow && isCollection() ) { throw new DavException( DavServletResponse.SC_FORBIDDEN, "Unable to perform shallow copy for collection" ); } try { ArchivaDavResource resource = checkDavResourceIsArchivaDavResource( destination ); if ( isCollection() ) { repositoryStorage.copyAsset( asset, destination.getResourcePath() ); triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_DIRECTORY ); } else { repositoryStorage.copyAsset( asset, destination.getResourcePath() ); triggerAuditEvent( remoteAddr, locator.getRepositoryId(), logicalResource, AuditEvent.COPY_FILE ); } log.debug( "{}{}' copied to '{}' (current user '{}')", ( isCollection() ? "Directory '" : "File '" ), asset.getPath(), destination, this.principal ); } catch ( IOException e ) { throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e ); } } @Override public boolean isLockable( Type type, Scope scope ) { return Type.WRITE.equals( type ) && Scope.EXCLUSIVE.equals( scope ); } @Override public boolean hasLock( Type type, Scope scope ) { return getLock( type, scope ) != null; } @Override public ActiveLock getLock( Type type, Scope scope ) { ActiveLock lock = null; if ( exists() && Type.WRITE.equals( type ) && Scope.EXCLUSIVE.equals( scope ) ) { lock = lockManager.getLock( type, scope, this ); } return lock; } @Override public ActiveLock[] getLocks() { ActiveLock writeLock = getLock( Type.WRITE, Scope.EXCLUSIVE ); return ( writeLock != null ) ? new ActiveLock[]{ writeLock } : new ActiveLock[0]; } @Override public ActiveLock lock( LockInfo lockInfo ) throws DavException { ActiveLock lock = null; if ( isLockable( lockInfo.getType(), lockInfo.getScope() ) ) { lock = lockManager.createLock( lockInfo, this ); } else { throw new DavException( DavServletResponse.SC_PRECONDITION_FAILED, "Unsupported lock type or scope." ); } return lock; } @Override public ActiveLock refreshLock( LockInfo lockInfo, String lockToken ) throws DavException { if ( !exists() ) { throw new DavException( DavServletResponse.SC_NOT_FOUND ); } ActiveLock lock = getLock( lockInfo.getType(), lockInfo.getScope() ); if ( lock == null ) { throw new DavException( DavServletResponse.SC_PRECONDITION_FAILED, "No lock with the given type/scope present on resource " + getResourcePath() ); } lock = lockManager.refreshLock( lockInfo, lockToken, this ); return lock; } @Override public void unlock( String lockToken ) throws DavException { ActiveLock lock = getLock( Type.WRITE, Scope.EXCLUSIVE ); if ( lock == null ) { throw new DavException( HttpServletResponse.SC_PRECONDITION_FAILED ); } else if ( lock.isLockedByToken( lockToken ) ) { lockManager.releaseLock( lockToken, this ); } else { throw new DavException( DavServletResponse.SC_LOCKED ); } } @Override public void addLockManager( LockManager lockManager ) { this.lockManager = lockManager; } @Override public DavResourceFactory getFactory() { return factory; } @Override public DavSession getSession() { return session; } /** * Fill the set of properties */ protected DavPropertySet initProperties() { if ( !exists() ) { properties = new DavPropertySet(); } if ( properties != null ) { return properties; } DavPropertySet properties = new DavPropertySet(); // set (or reset) fundamental properties if ( getDisplayName() != null ) { properties.add( new DefaultDavProperty<>( DavPropertyName.DISPLAYNAME, getDisplayName() ) ); } if ( isCollection() ) { properties.add( new ResourceType( ResourceType.COLLECTION ) ); // Windows XP support properties.add( new DefaultDavProperty<>( DavPropertyName.ISCOLLECTION, "1" ) ); } else { properties.add( new ResourceType( ResourceType.DEFAULT_RESOURCE ) ); // Windows XP support properties.add( new DefaultDavProperty<>( DavPropertyName.ISCOLLECTION, "0" ) ); } // Need to get the ISO8601 date for properties String modifiedDate = DateTimeFormatter.ISO_INSTANT.format( asset.getModificationTime() ); properties.add( new DefaultDavProperty<>( DavPropertyName.GETLASTMODIFIED, modifiedDate ) ); properties.add( new DefaultDavProperty<>( DavPropertyName.CREATIONDATE, modifiedDate ) ); properties.add( new DefaultDavProperty<>( DavPropertyName.GETCONTENTLENGTH, asset.getSize() ) ); this.properties = properties; return properties; } private ArchivaDavResource checkDavResourceIsArchivaDavResource( DavResource resource ) throws DavException { if ( !( resource instanceof ArchivaDavResource ) ) { throw new DavException( HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "DavResource is not instance of ArchivaDavResource" ); } return (ArchivaDavResource) resource; } private void triggerAuditEvent( String remoteIP, String repositoryId, String resource, String action ) { AuditEvent event = new AuditEvent( repositoryId, principal, resource, action ); event.setRemoteIP( remoteIP ); for ( AuditListener listener : auditListeners ) { listener.auditEvent( event ); } } /** private void queueRepositoryTask( Path localFile ) { RepositoryTask task = new RepositoryTask(); task.setRepositoryId( repository.getId() ); task.setResourceFile( localFile ); task.setUpdateRelatedArtifacts( false ); task.setScanAll( false ); try { scheduler.queueTask( task ); } catch ( TaskQueueException e ) { log.error( "Unable to queue repository task to execute consumers on resource file ['{}" + "'].", localFile.getFileName() ); } } **/ }
/* * Copyright (c) 2010-2015 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.provisioning.impl.opendj; import static com.evolveum.midpoint.test.util.TestUtil.assertFailure; import static com.evolveum.midpoint.test.util.TestUtil.assertSuccess; import static com.evolveum.midpoint.test.IntegrationTestTools.display; import static org.testng.AssertJUnit.assertEquals; import static org.testng.AssertJUnit.assertNotNull; import static org.testng.AssertJUnit.assertNull; import static org.testng.AssertJUnit.assertTrue; import java.io.IOException; import java.util.List; import javax.xml.namespace.QName; import com.evolveum.midpoint.prism.PrismContext; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.annotation.DirtiesContext; import org.springframework.test.context.ContextConfiguration; import org.testng.AssertJUnit; import org.testng.annotations.BeforeClass; import org.testng.annotations.Test; import org.w3c.dom.Element; import com.evolveum.midpoint.common.refinery.RefinedResourceSchema; import com.evolveum.midpoint.prism.Containerable; import com.evolveum.midpoint.prism.PrismContainer; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.query.ObjectQuery; import com.evolveum.midpoint.prism.util.PrismAsserts; import com.evolveum.midpoint.prism.util.PrismTestUtil; import com.evolveum.midpoint.provisioning.impl.ProvisioningTestUtil; import com.evolveum.midpoint.provisioning.ucf.impl.ConnectorFactoryIcfImpl; import com.evolveum.midpoint.schema.DeltaConvertor; import com.evolveum.midpoint.schema.ResourceShadowDiscriminator; import com.evolveum.midpoint.schema.ResultHandler; import com.evolveum.midpoint.schema.processor.ResourceSchema; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.schema.result.OperationResultStatus; import com.evolveum.midpoint.schema.util.ObjectQueryUtil; import com.evolveum.midpoint.schema.util.ResourceTypeUtil; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.task.api.TaskManager; import com.evolveum.midpoint.test.util.TestUtil; import com.evolveum.midpoint.util.exception.CommunicationException; import com.evolveum.midpoint.util.exception.ConfigurationException; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SecurityViolationException; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.xml.ns._public.common.api_types_3.ObjectModificationType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ConnectorType; import com.evolveum.midpoint.xml.ns._public.common.common_3.FailedOperationTypeType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultStatusType; import com.evolveum.midpoint.xml.ns._public.common.common_3.OperationResultType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ResourceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.ShadowType; import com.evolveum.midpoint.xml.ns._public.common.common_3.XmlSchemaType; /** * Test for provisioning service implementation. Using OpenDJ. But NOT STARTING IT. * Checking if appropriate errors are provided. */ @ContextConfiguration(locations = "classpath:ctx-provisioning-test-main.xml") @DirtiesContext public class TestOpenDjNegative extends AbstractOpenDjTest { private static Trace LOGGER = TraceManager.getTrace(TestOpenDjNegative.class); @Autowired TaskManager taskManager; // @Autowired // private ResourceObjectChangeListener syncServiceMock; @Override public void initSystem(Task initTask, OperationResult initResult) throws Exception { super.initSystem(initTask, initResult); repoAddShadowFromFile(ACCOUNT1_REPO_FILE, initResult); repoAddShadowFromFile(ACCOUNT_SPARROW_REPO_FILE, initResult); repoAddShadowFromFile(ACCOUNT_JACK_REPO_FILE, initResult); } @BeforeClass public static void stoptLdap() throws Exception { // Make sure that OpenDJ is stopped. We want to see the blood .. err ... errors try { openDJController.stop(); } catch (Exception ex) { LOGGER.trace("Exeception during stopping already stopped LDAP (probably harmless)", ex); } } @Test public void test003Connection() throws Exception { final String TEST_NAME = "test003Connection"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TestOpenDjNegative.class.getName()+"."+TEST_NAME); ResourceType resourceTypeBefore = repositoryService.getObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, result).asObjectable(); display("Resource before testResource (repository)", resourceTypeBefore); assertNotNull("No connector ref",resourceTypeBefore.getConnectorRef()); assertNotNull("No connector ref OID",resourceTypeBefore.getConnectorRef().getOid()); connector = repositoryService.getObject(ConnectorType.class, resourceTypeBefore.getConnectorRef().getOid(), null, result); ConnectorType connectorType = connector.asObjectable(); assertNotNull(connectorType); XmlSchemaType xmlSchemaTypeBefore = resourceTypeBefore.getSchema(); AssertJUnit.assertNull("Found schema before test connection. Bad test setup?", xmlSchemaTypeBefore); Element resourceXsdSchemaElementBefore = ResourceTypeUtil.getResourceXsdSchema(resourceTypeBefore); AssertJUnit.assertNull("Found schema element before test connection. Bad test setup?", resourceXsdSchemaElementBefore); // WHEN OperationResult operationResult = provisioningService.testResource(RESOURCE_OPENDJ_OID); display("Test connection result (expected failure)",operationResult); TestUtil.assertFailure(operationResult); PrismObject<ResourceType> resourceRepoAfter = repositoryService.getObject(ResourceType.class,RESOURCE_OPENDJ_OID, null, result); display("Resource after testResource (repository)", resourceRepoAfter); ResourceType resourceTypeRepoAfter = resourceRepoAfter.asObjectable(); display("Resource after testResource (repository, XML)", PrismTestUtil.serializeObjectToString(resourceTypeRepoAfter.asPrismObject(), PrismContext.LANG_XML)); XmlSchemaType xmlSchemaTypeAfter = resourceTypeRepoAfter.getSchema(); assertNull("The schema was generated after test connection but it should not be",xmlSchemaTypeAfter); Element resourceXsdSchemaElementAfter = ResourceTypeUtil.getResourceXsdSchema(resourceTypeRepoAfter); assertNull("Schema after test connection (and should not be)", resourceXsdSchemaElementAfter); } @Test public void test004ResourceAndConnectorCaching() throws Exception { TestUtil.displayTestTile("test004ResourceAndConnectorCaching"); OperationResult result = new OperationResult(TestOpenDjNegative.class.getName()+".test004ResourceAndConnectorCaching"); Task task = taskManager.createTaskInstance(); // WHEN // This should NOT throw an exception. It should just indicate the failure in results resource = provisioningService.getObject(ResourceType.class,RESOURCE_OPENDJ_OID, null, task, result); ResourceType resourceType = resource.asObjectable(); // THEN result.computeStatus(); display("getObject(resource) result", result); TestUtil.assertFailure(result); TestUtil.assertFailure(resource.asObjectable().getFetchResult()); ResourceSchema resourceSchema = RefinedResourceSchema.getResourceSchema(resource, prismContext); assertNull("Resource schema found", resourceSchema); // WHEN PrismObject<ResourceType> resourceAgain = provisioningService.getObject(ResourceType.class,RESOURCE_OPENDJ_OID, null, task, result); // THEN result.computeStatus(); display("getObject(resourceAgain) result", result); TestUtil.assertFailure(result); TestUtil.assertFailure(resourceAgain.asObjectable().getFetchResult()); ResourceType resourceTypeAgain = resourceAgain.asObjectable(); assertNotNull("No connector ref",resourceTypeAgain.getConnectorRef()); assertNotNull("No connector ref OID",resourceTypeAgain.getConnectorRef().getOid()); PrismContainer<Containerable> configurationContainer = resource.findContainer(ResourceType.F_CONNECTOR_CONFIGURATION); PrismContainer<Containerable> configurationContainerAgain = resourceAgain.findContainer(ResourceType.F_CONNECTOR_CONFIGURATION); assertTrue("Configurations not equivalent", configurationContainer.equivalent(configurationContainerAgain)); assertTrue("Configurations not equals", configurationContainer.equals(configurationContainerAgain)); ResourceSchema resourceSchemaAgain = RefinedResourceSchema.getResourceSchema(resourceAgain, prismContext); assertNull("Resource schema (again)", resourceSchemaAgain); } /** * This goes to local repo, therefore the expected result is ObjectNotFound. * We know that the shadow does not exist. */ @Test public void test110GetObjectNoShadow() throws Exception { final String TEST_NAME = "test110GetObjectNoShadow"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); try { ShadowType acct = provisioningService.getObject(ShadowType.class, NON_EXISTENT_OID, null, taskManager.createTaskInstance(), result).asObjectable(); AssertJUnit.fail("getObject succeeded unexpectedly"); } catch (ObjectNotFoundException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } /** * This is using the shadow to go to the resource. But it cannot as OpenDJ is down. * It even cannot fetch schema. If there is no schema it does not even know how to process * identifiers in the shadow. Therefore the expected result is ConfigurationException (CommunicationException). * It must not be ObjectNotFound as we do NOT know that the shadow does not exist. */ @Test public void test111GetObjectShadow() throws Exception { final String TEST_NAME = "test111GetObjectShadow"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); try { ShadowType acct = provisioningService.getObject(ShadowType.class, ACCOUNT1_OID, null, taskManager.createTaskInstance(), result).asObjectable(); AssertJUnit.fail("getObject succeeded unexpectedly"); // } catch (CommunicationException e) { } catch (ConfigurationException e){ // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test120ListResourceObjects() throws Exception { final String TEST_NAME = "test120ListResourceObjects"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); try { // WHEN List<PrismObject<? extends ShadowType>> objectList = provisioningService.listResourceObjects( RESOURCE_OPENDJ_OID, RESOURCE_OPENDJ_ACCOUNT_OBJECTCLASS, null, null, result); AssertJUnit.fail("listResourceObjects succeeded unexpectedly"); } catch (ConfigurationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test121SearchAccounts() throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, Exception { final String TEST_NAME = "test121SearchAccounts"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); final String resourceNamespace = ResourceTypeUtil.getResourceNamespace(resource); QName objectClass = new QName(resourceNamespace, OBJECT_CLASS_INETORGPERSON_NAME); ObjectQuery query = ObjectQueryUtil.createResourceAndObjectClassQuery(resource.getOid(), objectClass, prismContext); try { // WHEN provisioningService.searchObjects(ShadowType.class, query, null, null, result); AssertJUnit.fail("searchObjectsIterative succeeded unexpectedly"); } catch (ConfigurationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); display(result); TestUtil.assertFailure(result); } @Test public void test122SearchAccountsIterative() throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, Exception { final String TEST_NAME = "test122SearchAccountsIterative"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); final String resourceNamespace = ResourceTypeUtil.getResourceNamespace(resource); QName objectClass = new QName(resourceNamespace, OBJECT_CLASS_INETORGPERSON_NAME); ObjectQuery query = ObjectQueryUtil.createResourceAndObjectClassQuery(resource.getOid(), objectClass, prismContext); ResultHandler handler = new ResultHandler<ObjectType>() { @Override public boolean handle(PrismObject<ObjectType> prismObject, OperationResult parentResult) { AssertJUnit.fail("handler called unexpectedly"); return false; } }; try { // WHEN provisioningService.searchObjectsIterative(ShadowType.class, query, null, handler, null, result); AssertJUnit.fail("searchObjectsIterative succeeded unexpectedly"); } catch (ConfigurationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test130AddAccountWill() throws Exception { final String TEST_NAME = "test130AddAccountWill"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); ShadowType object = parseObjectType(ACCOUNT_WILL_FILE, ShadowType.class); display("Account to add", object); try { // WHEN String addedObjectOid = provisioningService.addObject(object.asPrismObject(), null, null, taskManager.createTaskInstance(), result); AssertJUnit.fail("addObject succeeded unexpectedly"); } catch (ConfigurationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test140AddDeleteAccountSparrow() throws Exception { final String TEST_NAME = "test140AddDeleteAccountSparrow"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); try { provisioningService.deleteObject(ShadowType.class, ACCOUNT_SPARROW_OID, null, null, taskManager.createTaskInstance(), result); AssertJUnit.fail("addObject succeeded unexpectedly"); } catch (ConfigurationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test150ModifyObject() throws Exception { final String TEST_NAME = "test150ModifyObject"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); ObjectModificationType objectChange = PrismTestUtil.parseAtomicValue(ACCOUNT_JACK_CHANGE_FILE, ObjectModificationType.COMPLEX_TYPE); ObjectDelta<ShadowType> delta = DeltaConvertor.createObjectDelta(objectChange, ShadowType.class, PrismTestUtil.getPrismContext()); display("Object change",delta); try { provisioningService.modifyObject(ShadowType.class, objectChange.getOid(), delta.getModifications(), null, null, taskManager.createTaskInstance(), result); AssertJUnit.fail("addObject succeeded unexpectedly"); } catch (ConfigurationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test190Synchronize() throws Exception { final String TEST_NAME = "test190Synhronize"; TestUtil.displayTestTile(TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestOpenDjNegative.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); ResourceShadowDiscriminator coords = new ResourceShadowDiscriminator(RESOURCE_OPENDJ_OID, new QName(RESOURCE_NS, ConnectorFactoryIcfImpl.ACCOUNT_OBJECT_CLASS_LOCAL_NAME)); try { provisioningService.synchronize(coords, task, result); AssertJUnit.fail("addObject succeeded unexpectedly"); } catch (CommunicationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } // ========================================================================================================= // Now lets replace the resource with one that has schema and capabilities. And re-run some of the tests. // OpenDJ is still down so the results should be the same. But the code may take a different path if // schema is present. // ========================================================================================================= @Test public void test500ReplaceResource() throws Exception { final String TEST_NAME = "test500ReplaceResource"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); // Delete should work fine even though OpenDJ is down provisioningService.deleteObject(ResourceType.class, RESOURCE_OPENDJ_OID, null, null, taskManager.createTaskInstance(), result); result.computeStatus(); TestUtil.assertSuccess(result); resource = addResourceFromFile(RESOURCE_OPENDJ_INITIALIZED_FILE, ProvisioningTestUtil.CONNECTOR_LDAP_TYPE, result); result.computeStatus(); TestUtil.assertSuccess(result); } /** * This goes to local repo, therefore the expected result is ObjectNotFound. * We know that the shadow does not exist. */ @Test public void test510GetObjectNoShadow() throws Exception { final String TEST_NAME = "test510GetObjectNoShadow"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); try { ShadowType acct = provisioningService.getObject(ShadowType.class, NON_EXISTENT_OID, null, taskManager.createTaskInstance(), result).asObjectable(); AssertJUnit.fail("getObject succeeded unexpectedly"); } catch (ObjectNotFoundException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } /** * This is using the shadow to go to the resource. But it cannot as OpenDJ is down. * Therefore the expected result is CommunicationException. It must not be ObjectNotFound as * we do NOT know that the shadow does not exist. * Provisioning should return a repo shadow and indicate the result both in operation result and * in fetchResult in the returned shadow. */ @Test public void test511GetObjectShadow() throws Exception { final String TEST_NAME = "test511GetObjectShadow"; TestUtil.displayTestTile(TEST_NAME); OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); PrismObject<ShadowType> acct = provisioningService.getObject(ShadowType.class, ACCOUNT1_OID, null, taskManager.createTaskInstance(), result); display("Account", acct); result.computeStatus(); display("getObject result", result); assertEquals("Expected result partial error but was "+result.getStatus(), OperationResultStatus.PARTIAL_ERROR, result.getStatus()); OperationResultType fetchResult = acct.asObjectable().getFetchResult(); display("getObject fetchResult", fetchResult); assertEquals("Expected fetchResult partial error but was "+result.getStatus(), OperationResultStatusType.PARTIAL_ERROR, fetchResult.getStatus()); } /** * This is using the shadow to go to the resource. But it cannot as OpenDJ is down. * Therefore the expected result is CommunicationException. It must not be ObjectNotFound as * we do NOT know that the shadow does not exist. */ @Test public void test520ListResourceObjects() throws Exception { final String TEST_NAME = "test520ListResourceObjects"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); try { // WHEN List<PrismObject<? extends ShadowType>> objectList = provisioningService.listResourceObjects( RESOURCE_OPENDJ_OID, RESOURCE_OPENDJ_ACCOUNT_OBJECTCLASS, null, null, result); AssertJUnit.fail("listResourceObjects succeeded unexpectedly"); } catch (CommunicationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test521SearchAccounts() throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, Exception { final String TEST_NAME = "test521SearchAccounts"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); final String resourceNamespace = ResourceTypeUtil.getResourceNamespace(resource); QName objectClass = new QName(resourceNamespace, OBJECT_CLASS_INETORGPERSON_NAME); ObjectQuery query = ObjectQueryUtil.createResourceAndObjectClassQuery(resource.getOid(), objectClass, prismContext); try { // WHEN provisioningService.searchObjects(ShadowType.class, query, null, null, result); AssertJUnit.fail("searchObjectsIterative succeeded unexpectedly"); } catch (CommunicationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } @Test public void test522SearchAccountsIterative() throws SchemaException, ObjectNotFoundException, CommunicationException, ConfigurationException, SecurityViolationException, Exception { final String TEST_NAME = "test522SearchAccountsIterative"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); final String resourceNamespace = ResourceTypeUtil.getResourceNamespace(resource); QName objectClass = new QName(resourceNamespace, OBJECT_CLASS_INETORGPERSON_NAME); ObjectQuery query = ObjectQueryUtil.createResourceAndObjectClassQuery(resource.getOid(), objectClass, prismContext); ResultHandler handler = new ResultHandler<ObjectType>() { @Override public boolean handle(PrismObject<ObjectType> prismObject, OperationResult parentResult) { AssertJUnit.fail("handler called unexpectedly"); return false; } }; try { // WHEN provisioningService.searchObjectsIterative(ShadowType.class, query, null, handler, null, result); AssertJUnit.fail("searchObjectsIterative succeeded unexpectedly"); } catch (CommunicationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); display(result); TestUtil.assertFailure(result); } @Test public void test530AddAccountWill() throws Exception { final String TEST_NAME = "test530AddAccountWill"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); ShadowType object = parseObjectType(ACCOUNT_WILL_FILE, ShadowType.class); display("Account to add", object); Task task = taskManager.createTaskInstance(); // WHEN String addedObjectOid = provisioningService.addObject(object.asPrismObject(), null, null, task, result); // THEN result.computeStatus(); display("addObject result", result); assertEquals("Wrong result", OperationResultStatus.HANDLED_ERROR, result.getStatus()); assertEquals(ACCOUNT_WILL_OID, addedObjectOid); ShadowType repoAccountType = repositoryService.getObject(ShadowType.class, ACCOUNT_WILL_OID, null, result).asObjectable(); display("repo shadow", repoAccountType); PrismAsserts.assertEqualsPolyString("Name not equal", ACCOUNT_WILL_DN, repoAccountType.getName()); assertEquals("Wrong failedOperationType in repo", FailedOperationTypeType.ADD, repoAccountType.getFailedOperationType()); OperationResultType repoResult = repoAccountType.getResult(); assertNotNull("No result in shadow (repo)", repoResult); TestUtil.assertFailure("Result in shadow (repo)", repoResult); ShadowType provisioningAccountType = provisioningService.getObject(ShadowType.class, ACCOUNT_WILL_OID, null, task, result).asObjectable(); display("provisioning shadow", provisioningAccountType); PrismAsserts.assertEqualsPolyString("Name not equal", ACCOUNT_WILL_DN, provisioningAccountType.getName()); assertEquals("Wrong failedOperationType in repo", FailedOperationTypeType.ADD, provisioningAccountType.getFailedOperationType()); OperationResultType provisioningResult = provisioningAccountType.getResult(); assertNotNull("No result in shadow (repo)", provisioningResult); TestUtil.assertFailure("Result in shadow (repo)", provisioningResult); } @Test public void test540DeleteObject() throws Exception { final String TEST_NAME = "test540DeleteObject"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); Task task = taskManager.createTaskInstance(); // WHEN provisioningService.deleteObject(ShadowType.class, ACCOUNT_SPARROW_OID, null, null, task, result); // THEN result.computeStatus(); display("deleteObject result", result); assertEquals("Wrong result", OperationResultStatus.HANDLED_ERROR, result.getStatus()); ShadowType repoAccountType = repositoryService.getObject(ShadowType.class, ACCOUNT_SPARROW_OID, null, result).asObjectable(); display("repo shadow", repoAccountType); assertEquals("Wrong failedOperationType in repo", FailedOperationTypeType.DELETE, repoAccountType.getFailedOperationType()); OperationResultType repoResult = repoAccountType.getResult(); assertNotNull("No result in shadow (repo)", repoResult); display("repoResult in shadow", repoResult); TestUtil.assertFailure("Result in shadow (repo)", repoResult); ShadowType provisioningAccountType = provisioningService.getObject(ShadowType.class, ACCOUNT_SPARROW_OID, null, task, result).asObjectable(); display("provisioning shadow", provisioningAccountType); assertEquals("Wrong failedOperationType in repo", FailedOperationTypeType.DELETE, provisioningAccountType.getFailedOperationType()); OperationResultType provisioningResult = provisioningAccountType.getResult(); assertNotNull("No result in shadow (repo)", provisioningResult); TestUtil.assertFailure("Result in shadow (repo)", provisioningResult); } @Test public void test550ModifyObject() throws Exception { final String TEST_NAME = "test150ModifyObject"; TestUtil.displayTestTile(TEST_NAME); // GIVEN OperationResult result = new OperationResult(TestOpenDjNegative.class.getName() + "." + TEST_NAME); ObjectModificationType objectChange = PrismTestUtil.parseAtomicValue(ACCOUNT_JACK_CHANGE_FILE, ObjectModificationType.COMPLEX_TYPE); ObjectDelta<ShadowType> delta = DeltaConvertor.createObjectDelta(objectChange, ShadowType.class, PrismTestUtil.getPrismContext()); display("Object change",delta); Task task = taskManager.createTaskInstance(); provisioningService.modifyObject(ShadowType.class, objectChange.getOid(), delta.getModifications(), null, null, task, result); // THEN result.computeStatus(); display("deleteObject result", result); assertEquals("Wrong result", OperationResultStatus.HANDLED_ERROR, result.getStatus()); ShadowType repoAccountType = repositoryService.getObject(ShadowType.class, ACCOUNT_JACK_OID, null, result).asObjectable(); display("repo shadow", repoAccountType); assertEquals("Wrong failedOperationType in repo", FailedOperationTypeType.MODIFY, repoAccountType.getFailedOperationType()); OperationResultType repoResult = repoAccountType.getResult(); assertNotNull("No result in shadow (repo)", repoResult); TestUtil.assertFailure("Result in shadow (repo)", repoResult); ShadowType provisioningAccountType = provisioningService.getObject(ShadowType.class, ACCOUNT_JACK_OID, null, task, result).asObjectable(); display("provisioning shadow", provisioningAccountType); assertEquals("Wrong failedOperationType in repo", FailedOperationTypeType.MODIFY, provisioningAccountType.getFailedOperationType()); OperationResultType provisioningResult = provisioningAccountType.getResult(); assertNotNull("No result in shadow (repo)", provisioningResult); TestUtil.assertFailure("Result in shadow (repo)", provisioningResult); } @Test public void test590Synchronize() throws Exception { final String TEST_NAME = "test590Synhronize"; TestUtil.displayTestTile(TEST_NAME); // GIVEN Task task = taskManager.createTaskInstance(TestOpenDjNegative.class.getName() + "." + TEST_NAME); OperationResult result = task.getResult(); ResourceShadowDiscriminator coords = new ResourceShadowDiscriminator(RESOURCE_OPENDJ_OID, new QName(RESOURCE_NS, ConnectorFactoryIcfImpl.ACCOUNT_OBJECT_CLASS_LOCAL_NAME)); try { provisioningService.synchronize(coords, task, result); AssertJUnit.fail("addObject succeeded unexpectedly"); } catch (CommunicationException e) { // This is expected display("Expected exception", e); } result.computeStatus(); TestUtil.assertFailure(result); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.runtime.rest.handler.legacy.files; /***************************************************************************** * This code is based on the "HttpStaticFileServerHandler" from the * Netty project's HTTP server example. * * See http://netty.io and * https://github.com/netty/netty/blob/4.0/example/src/main/java/io/netty/example/http/file/HttpStaticFileServerHandler.java *****************************************************************************/ import org.apache.flink.api.common.time.Time; import org.apache.flink.runtime.rest.handler.RedirectHandler; import org.apache.flink.runtime.rest.handler.util.MimeTypes; import org.apache.flink.runtime.webmonitor.RestfulGateway; import org.apache.flink.runtime.webmonitor.retriever.GatewayRetriever; import org.apache.flink.shaded.netty4.io.netty.buffer.Unpooled; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFuture; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelFutureListener; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandler; import org.apache.flink.shaded.netty4.io.netty.channel.ChannelHandlerContext; import org.apache.flink.shaded.netty4.io.netty.channel.DefaultFileRegion; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.DefaultFullHttpResponse; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.DefaultHttpResponse; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.FullHttpResponse; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpChunkedInput; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpRequest; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponse; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.LastHttpContent; import org.apache.flink.shaded.netty4.io.netty.handler.codec.http.router.Routed; import org.apache.flink.shaded.netty4.io.netty.handler.ssl.SslHandler; import org.apache.flink.shaded.netty4.io.netty.handler.stream.ChunkedFile; import org.apache.flink.shaded.netty4.io.netty.util.CharsetUtil; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.RandomAccessFile; import java.net.URI; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Files; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Calendar; import java.util.Collections; import java.util.Date; import java.util.GregorianCalendar; import java.util.Locale; import java.util.TimeZone; import java.util.concurrent.CompletableFuture; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.CACHE_CONTROL; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.CONNECTION; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.DATE; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.EXPIRES; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.IF_MODIFIED_SINCE; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpHeaders.Names.LAST_MODIFIED; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus.NOT_MODIFIED; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpResponseStatus.OK; import static org.apache.flink.shaded.netty4.io.netty.handler.codec.http.HttpVersion.HTTP_1_1; import static org.apache.flink.util.Preconditions.checkNotNull; /** * Simple file server handler that serves requests to web frontend's static files, such as * HTML, CSS, or JS files. * * <p>This code is based on the "HttpStaticFileServerHandler" from the Netty project's HTTP server * example.</p> */ @ChannelHandler.Sharable public class StaticFileServerHandler<T extends RestfulGateway> extends RedirectHandler<T> { /** Timezone in which this server answers its "if-modified" requests. */ private static final TimeZone GMT_TIMEZONE = TimeZone.getTimeZone("GMT"); /** Date format for HTTP. */ public static final String HTTP_DATE_FORMAT = "EEE, dd MMM yyyy HH:mm:ss zzz"; /** Be default, we allow files to be cached for 5 minutes. */ private static final int HTTP_CACHE_SECONDS = 300; // ------------------------------------------------------------------------ /** The path in which the static documents are. */ private final File rootPath; public StaticFileServerHandler( GatewayRetriever<T> retriever, CompletableFuture<String> localJobManagerAddressFuture, Time timeout, File rootPath) throws IOException { super(localJobManagerAddressFuture, retriever, timeout, Collections.emptyMap()); this.rootPath = checkNotNull(rootPath).getCanonicalFile(); } // ------------------------------------------------------------------------ // Responses to requests // ------------------------------------------------------------------------ @Override protected void respondAsLeader(ChannelHandlerContext channelHandlerContext, Routed routed, T gateway) throws Exception { final HttpRequest request = routed.request(); final String requestPath; // make sure we request the "index.html" in case there is a directory request if (routed.path().endsWith("/")) { requestPath = routed.path() + "index.html"; } // in case the files being accessed are logs or stdout files, find appropriate paths. else if (routed.path().equals("/jobmanager/log") || routed.path().equals("/jobmanager/stdout")) { requestPath = ""; } else { requestPath = routed.path(); } respondToRequest(channelHandlerContext, request, requestPath); } /** * Response when running with leading JobManager. */ private void respondToRequest(ChannelHandlerContext ctx, HttpRequest request, String requestPath) throws IOException, ParseException, URISyntaxException { // convert to absolute path final File file = new File(rootPath, requestPath); if (!file.exists()) { // file does not exist. Try to load it with the classloader ClassLoader cl = StaticFileServerHandler.class.getClassLoader(); try (InputStream resourceStream = cl.getResourceAsStream("web" + requestPath)) { boolean success = false; try { if (resourceStream != null) { URL root = cl.getResource("web"); URL requested = cl.getResource("web" + requestPath); if (root != null && requested != null) { URI rootURI = new URI(root.getPath()).normalize(); URI requestedURI = new URI(requested.getPath()).normalize(); // Check that we don't load anything from outside of the // expected scope. if (!rootURI.relativize(requestedURI).equals(requestedURI)) { logger.debug("Loading missing file from classloader: {}", requestPath); // ensure that directory to file exists. file.getParentFile().mkdirs(); Files.copy(resourceStream, file.toPath()); success = true; } } } } catch (Throwable t) { logger.error("error while responding", t); } finally { if (!success) { logger.debug("Unable to load requested file {} from classloader", requestPath); sendError(ctx, NOT_FOUND); return; } } } } if (!file.exists() || file.isHidden() || file.isDirectory() || !file.isFile()) { sendError(ctx, NOT_FOUND); return; } if (!file.getCanonicalFile().toPath().startsWith(rootPath.toPath())) { sendError(ctx, NOT_FOUND); return; } // cache validation final String ifModifiedSince = request.headers().get(IF_MODIFIED_SINCE); if (ifModifiedSince != null && !ifModifiedSince.isEmpty()) { SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US); Date ifModifiedSinceDate = dateFormatter.parse(ifModifiedSince); // Only compare up to the second because the datetime format we send to the client // does not have milliseconds long ifModifiedSinceDateSeconds = ifModifiedSinceDate.getTime() / 1000; long fileLastModifiedSeconds = file.lastModified() / 1000; if (ifModifiedSinceDateSeconds == fileLastModifiedSeconds) { if (logger.isDebugEnabled()) { logger.debug("Responding 'NOT MODIFIED' for file '" + file.getAbsolutePath() + '\''); } sendNotModified(ctx); return; } } if (logger.isDebugEnabled()) { logger.debug("Responding with file '" + file.getAbsolutePath() + '\''); } // Don't need to close this manually. Netty's DefaultFileRegion will take care of it. final RandomAccessFile raf; try { raf = new RandomAccessFile(file, "r"); } catch (FileNotFoundException e) { sendError(ctx, NOT_FOUND); return; } try { long fileLength = raf.length(); HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK); setContentTypeHeader(response, file); // since the log and out files are rapidly changing, we don't want to browser to cache them if (!(requestPath.contains("log") || requestPath.contains("out"))) { setDateAndCacheHeaders(response, file); } if (HttpHeaders.isKeepAlive(request)) { response.headers().set(CONNECTION, HttpHeaders.Values.KEEP_ALIVE); } HttpHeaders.setContentLength(response, fileLength); // write the initial line and the header. ctx.write(response); // write the content. ChannelFuture lastContentFuture; if (ctx.pipeline().get(SslHandler.class) == null) { ctx.write(new DefaultFileRegion(raf.getChannel(), 0, fileLength), ctx.newProgressivePromise()); lastContentFuture = ctx.writeAndFlush(LastHttpContent.EMPTY_LAST_CONTENT); } else { lastContentFuture = ctx.writeAndFlush(new HttpChunkedInput(new ChunkedFile(raf, 0, fileLength, 8192)), ctx.newProgressivePromise()); // HttpChunkedInput will write the end marker (LastHttpContent) for us. } // close the connection, if no keep-alive is needed if (!HttpHeaders.isKeepAlive(request)) { lastContentFuture.addListener(ChannelFutureListener.CLOSE); } } catch (Exception e) { raf.close(); logger.error("Failed to serve file.", e); sendError(ctx, INTERNAL_SERVER_ERROR); } } @Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) { if (ctx.channel().isActive()) { logger.error("Caught exception", cause); sendError(ctx, INTERNAL_SERVER_ERROR); } } // ------------------------------------------------------------------------ // Utilities to encode headers and responses // ------------------------------------------------------------------------ /** * Writes a simple error response message. * * @param ctx The channel context to write the response to. * @param status The response status. */ public static void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) { FullHttpResponse response = new DefaultFullHttpResponse( HTTP_1_1, status, Unpooled.copiedBuffer("Failure: " + status + "\r\n", CharsetUtil.UTF_8)); response.headers().set(CONTENT_TYPE, "text/plain; charset=UTF-8"); // close the connection as soon as the error message is sent. ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } /** * Send the "304 Not Modified" response. This response can be used when the * file timestamp is the same as what the browser is sending up. * * @param ctx The channel context to write the response to. */ public static void sendNotModified(ChannelHandlerContext ctx) { FullHttpResponse response = new DefaultFullHttpResponse(HTTP_1_1, NOT_MODIFIED); setDateHeader(response); // close the connection as soon as the error message is sent. ctx.writeAndFlush(response).addListener(ChannelFutureListener.CLOSE); } /** * Sets the "date" header for the HTTP response. * * @param response HTTP response */ public static void setDateHeader(FullHttpResponse response) { SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US); dateFormatter.setTimeZone(GMT_TIMEZONE); Calendar time = new GregorianCalendar(); response.headers().set(DATE, dateFormatter.format(time.getTime())); } /** * Sets the "date" and "cache" headers for the HTTP Response. * * @param response The HTTP response object. * @param fileToCache File to extract the modification timestamp from. */ public static void setDateAndCacheHeaders(HttpResponse response, File fileToCache) { SimpleDateFormat dateFormatter = new SimpleDateFormat(HTTP_DATE_FORMAT, Locale.US); dateFormatter.setTimeZone(GMT_TIMEZONE); // date header Calendar time = new GregorianCalendar(); response.headers().set(DATE, dateFormatter.format(time.getTime())); // cache headers time.add(Calendar.SECOND, HTTP_CACHE_SECONDS); response.headers().set(EXPIRES, dateFormatter.format(time.getTime())); response.headers().set(CACHE_CONTROL, "private, max-age=" + HTTP_CACHE_SECONDS); response.headers().set(LAST_MODIFIED, dateFormatter.format(new Date(fileToCache.lastModified()))); } /** * Sets the content type header for the HTTP Response. * * @param response HTTP response * @param file file to extract content type */ public static void setContentTypeHeader(HttpResponse response, File file) { String mimeType = MimeTypes.getMimeTypeForFileName(file.getName()); String mimeFinal = mimeType != null ? mimeType : MimeTypes.getDefaultMimeType(); response.headers().set(CONTENT_TYPE, mimeFinal); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysds.test.functions.binary.matrix; import org.junit.Test; import org.apache.sysds.test.AutomatedTestBase; import org.apache.sysds.test.TestConfiguration; public class ScalarSubtractionTest extends AutomatedTestBase { private static final String TEST_DIR = "functions/binary/matrix/"; private static final String TEST_CLASS_DIR = TEST_DIR + ScalarSubtractionTest.class.getSimpleName() + "/"; @Override public void setUp() { // positive tests addTestConfiguration("IntConstTest", new TestConfiguration(TEST_CLASS_DIR, "ScalarSubtractionTest", new String[] { "vector_left", "vector_right", "matrix_left", "matrix_right" })); addTestConfiguration("IntVarTest", new TestConfiguration(TEST_CLASS_DIR, "ScalarSubtractionTest", new String[] { "vector_left", "vector_right", "matrix_left", "matrix_right" })); addTestConfiguration("DoubleConstTest", new TestConfiguration(TEST_CLASS_DIR, "ScalarSubtractionTest", new String[] { "vector_left", "vector_right", "matrix_left", "matrix_right" })); addTestConfiguration("DoubleVarTest", new TestConfiguration(TEST_CLASS_DIR, "ScalarSubtractionTest", new String[] { "vector_left", "vector_right", "matrix_left", "matrix_right" })); addTestConfiguration("SparseTest", new TestConfiguration(TEST_CLASS_DIR, "ScalarSubtractionTest", new String[] { "vector_left", "vector_right", "matrix_left", "matrix_right" })); addTestConfiguration("EmptyTest", new TestConfiguration(TEST_CLASS_DIR, "ScalarSubtractionTest", new String[] { "vector_left", "vector_right", "matrix_left", "matrix_right" })); // negative tests } @Test public void testIntConst() { int rows = 10; int cols = 10; int subtrahend = 2; int minuend = 2; TestConfiguration config = availableTestConfigurations.get("IntConstTest"); config.addVariable("rows", rows); config.addVariable("cols", cols); config.addVariable("vardeclaration", ""); config.addVariable("subtrahend", subtrahend); config.addVariable("minuend", minuend); loadTestConfiguration(config); double[][] vector = getRandomMatrix(rows, 1, 0, 1, 1, -1); double[][] computedVectorLeft = new double[rows][1]; double[][] computedVectorRight = new double[rows][1]; for(int i = 0; i < rows; i++) { computedVectorLeft[i][0] = vector[i][0] - subtrahend; computedVectorRight[i][0] = minuend - vector[i][0]; } writeInputMatrix("vector", vector); writeExpectedMatrix("vector_left", computedVectorLeft); writeExpectedMatrix("vector_right", computedVectorRight); double[][] matrix = getRandomMatrix(rows, cols, 0, 1, 1, -1); double[][] computedMatrixLeft = new double[rows][cols]; double[][] computedMatrixRight = new double[rows][cols]; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) { computedMatrixLeft[i][j] = matrix[i][j] - subtrahend; computedMatrixRight[i][j] = minuend - matrix[i][j]; } } writeInputMatrix("matrix", matrix); writeExpectedMatrix("matrix_left", computedMatrixLeft); writeExpectedMatrix("matrix_right", computedMatrixRight); runTest(); compareResults(); } @Test public void testIntVar() { int rows = 10; int cols = 10; int subtrahend = 2; int minuend = 2; TestConfiguration config = availableTestConfigurations.get("IntVarTest"); config.addVariable("rows", rows); config.addVariable("cols", cols); config.addVariable("vardeclaration", "Subtrahend = " + subtrahend); config.addVariable("subtrahend", "Subtrahend"); config.addVariable("minuend", minuend); loadTestConfiguration(config); double[][] vector = getRandomMatrix(rows, 1, 0, 1, 1, -1); double[][] computedVectorLeft = new double[rows][1]; double[][] computedVectorRight = new double[rows][1]; for(int i = 0; i < rows; i++) { computedVectorLeft[i][0] = vector[i][0] - subtrahend; computedVectorRight[i][0] = minuend - vector[i][0]; } writeInputMatrix("vector", vector); writeExpectedMatrix("vector_left", computedVectorLeft); writeExpectedMatrix("vector_right", computedVectorRight); double[][] matrix = getRandomMatrix(rows, cols, 0, 1, 1, -1); double[][] computedMatrixLeft = new double[rows][cols]; double[][] computedMatrixRight = new double[rows][cols]; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) { computedMatrixLeft[i][j] = matrix[i][j] - subtrahend; computedMatrixRight[i][j] = minuend - matrix[i][j]; } } writeInputMatrix("matrix", matrix); writeExpectedMatrix("matrix_left", computedMatrixLeft); writeExpectedMatrix("matrix_right", computedMatrixRight); runTest(); compareResults(); } @Test public void testDoubleConst() { int rows = 10; int cols = 10; double subtrahend = 2; double minuend = 2; TestConfiguration config = availableTestConfigurations.get("DoubleConstTest"); config.addVariable("rows", rows); config.addVariable("cols", cols); config.addVariable("vardeclaration", ""); config.addVariable("subtrahend", subtrahend); config.addVariable("minuend", minuend); loadTestConfiguration(config); double[][] vector = getRandomMatrix(rows, 1, 0, 1, 1, -1); double[][] computedVectorLeft = new double[rows][1]; double[][] computedVectorRight = new double[rows][1]; for(int i = 0; i < rows; i++) { computedVectorLeft[i][0] = vector[i][0] - subtrahend; computedVectorRight[i][0] = minuend - vector[i][0]; } writeInputMatrix("vector", vector); writeExpectedMatrix("vector_left", computedVectorLeft); writeExpectedMatrix("vector_right", computedVectorRight); double[][] matrix = getRandomMatrix(rows, cols, 0, 1, 1, -1); double[][] computedMatrixLeft = new double[rows][cols]; double[][] computedMatrixRight = new double[rows][cols]; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) { computedMatrixLeft[i][j] = matrix[i][j] - subtrahend; computedMatrixRight[i][j] = minuend - matrix[i][j]; } } writeInputMatrix("matrix", matrix); writeExpectedMatrix("matrix_left", computedMatrixLeft); writeExpectedMatrix("matrix_right", computedMatrixRight); runTest(); compareResults(); } @Test public void testDoubleVar() { int rows = 10; int cols = 10; double subtrahend = 2; double minuend = 2; TestConfiguration config = availableTestConfigurations.get("DoubleVarTest"); config.addVariable("rows", rows); config.addVariable("cols", cols); config.addVariable("vardeclaration", "Subtrahend = " + subtrahend); config.addVariable("subtrahend", "Subtrahend"); config.addVariable("minuend", minuend); loadTestConfiguration(config); double[][] vector = getRandomMatrix(rows, 1, 0, 1, 1, -1); double[][] computedVectorLeft = new double[rows][1]; double[][] computedVectorRight = new double[rows][1]; for(int i = 0; i < rows; i++) { computedVectorLeft[i][0] = vector[i][0] - subtrahend; computedVectorRight[i][0] = minuend - vector[i][0]; } writeInputMatrix("vector", vector); writeExpectedMatrix("vector_left", computedVectorLeft); writeExpectedMatrix("vector_right", computedVectorRight); double[][] matrix = getRandomMatrix(rows, cols, 0, 1, 1, -1); double[][] computedMatrixLeft = new double[rows][cols]; double[][] computedMatrixRight = new double[rows][cols]; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) { computedMatrixLeft[i][j] = matrix[i][j] - subtrahend; computedMatrixRight[i][j] = minuend - matrix[i][j]; } } writeInputMatrix("matrix", matrix); writeExpectedMatrix("matrix_left", computedMatrixLeft); writeExpectedMatrix("matrix_right", computedMatrixRight); runTest(); compareResults(); } @Test public void testSparse() { int rows = 100; int cols = 50; int subtrahend = 2; int minuend = 2; TestConfiguration config = availableTestConfigurations.get("SparseTest"); config.addVariable("rows", rows); config.addVariable("cols", cols); config.addVariable("vardeclaration", ""); config.addVariable("subtrahend", subtrahend); config.addVariable("minuend", minuend); loadTestConfiguration(config); double[][] vector = getRandomMatrix(rows, 1, -1, 1, 0.05, -1); double[][] computedVectorLeft = new double[rows][1]; double[][] computedVectorRight = new double[rows][1]; for(int i = 0; i < rows; i++) { computedVectorLeft[i][0] = vector[i][0] - subtrahend; computedVectorRight[i][0] = minuend - vector[i][0]; } writeInputMatrix("vector", vector); writeExpectedMatrix("vector_left", computedVectorLeft); writeExpectedMatrix("vector_right", computedVectorRight); double[][] matrix = getRandomMatrix(rows, cols, -1, 1, 0.05, -1); double[][] computedMatrixLeft = new double[rows][cols]; double[][] computedMatrixRight = new double[rows][cols]; for(int i = 0; i < rows; i++) { for(int j = 0; j < cols; j++) { computedMatrixLeft[i][j] = matrix[i][j] - subtrahend; computedMatrixRight[i][j] = minuend - matrix[i][j]; } } writeInputMatrix("matrix", matrix); writeExpectedMatrix("matrix_left", computedMatrixLeft); writeExpectedMatrix("matrix_right", computedMatrixRight); runTest(); compareResults(); } }
/** * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jboss.pnc.rest.endpoint; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import org.jboss.pnc.model.BuildConfigurationSet; import org.jboss.pnc.model.User; import org.jboss.pnc.rest.provider.BuildConfigSetRecordProvider; import org.jboss.pnc.rest.provider.BuildConfigurationProvider; import org.jboss.pnc.rest.provider.BuildConfigurationSetProvider; import org.jboss.pnc.rest.provider.BuildRecordProvider; import org.jboss.pnc.rest.restmodel.BuildConfigurationRest; import org.jboss.pnc.rest.restmodel.BuildConfigurationSetRest; import org.jboss.pnc.rest.restmodel.response.Singleton; import org.jboss.pnc.rest.restmodel.response.error.ErrorResponseRest; import org.jboss.pnc.rest.swagger.response.*; import org.jboss.pnc.rest.trigger.BuildConfigurationSetTriggerResult; import org.jboss.pnc.rest.trigger.BuildTriggerer; import org.jboss.pnc.rest.utils.EndpointAuthenticationProvider; import org.jboss.pnc.rest.validation.exceptions.EmptyEntityException; import org.jboss.pnc.rest.validation.exceptions.InvalidEntityException; import org.jboss.pnc.rest.validation.exceptions.RestValidationException; import org.jboss.pnc.spi.BuildOptions; import org.jboss.pnc.spi.builddriver.exception.BuildDriverException; import org.jboss.pnc.spi.datastore.Datastore; import org.jboss.pnc.spi.datastore.DatastoreException; import org.jboss.pnc.spi.exception.CoreException; import org.jboss.pnc.spi.repositorymanager.RepositoryManagerException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.inject.Inject; import javax.servlet.http.HttpServletRequest; import javax.validation.constraints.NotNull; import javax.ws.rs.Consumes; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.UriBuilder; import javax.ws.rs.core.UriInfo; import java.lang.invoke.MethodHandles; import java.net.MalformedURLException; import java.net.URI; import java.net.URL; import java.util.List; import java.util.stream.Collectors; import static org.jboss.pnc.rest.configuration.SwaggerConstants.CONFLICTED_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.CONFLICTED_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.INVALID_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.INVALID_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NOT_FOUND_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NOT_FOUND_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NO_CONTENT_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.NO_CONTENT_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_INDEX_DEFAULT_VALUE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_INDEX_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_INDEX_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_SIZE_DEFAULT_VALUE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_SIZE_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.PAGE_SIZE_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.QUERY_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.QUERY_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SERVER_ERROR_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SERVER_ERROR_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SORTING_DESCRIPTION; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SORTING_QUERY_PARAM; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SUCCESS_CODE; import static org.jboss.pnc.rest.configuration.SwaggerConstants.SUCCESS_DESCRIPTION; @Api(value = "/build-configuration-sets", description = "Set of related build configurations") @Path("/build-configuration-sets") @Produces(MediaType.APPLICATION_JSON) @Consumes(MediaType.APPLICATION_JSON) public class BuildConfigurationSetEndpoint extends AbstractEndpoint<BuildConfigurationSet, BuildConfigurationSetRest> { private static final Logger logger = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass()); private BuildTriggerer buildTriggerer; @Context private HttpServletRequest httpServletRequest; private Datastore datastore; private EndpointAuthenticationProvider endpointAuthProvider; private BuildConfigurationSetProvider buildConfigurationSetProvider; private BuildConfigurationProvider buildConfigurationProvider; private BuildRecordProvider buildRecordProvider; private BuildConfigSetRecordProvider buildConfigSetRecordProvider; public BuildConfigurationSetEndpoint() { } @Inject public BuildConfigurationSetEndpoint(BuildConfigurationSetProvider buildConfigurationSetProvider, BuildTriggerer buildTriggerer, BuildConfigurationProvider buildConfigurationProvider, BuildRecordProvider buildRecordProvider, BuildConfigSetRecordProvider buildConfigSetRecordProvider, Datastore datastore, EndpointAuthenticationProvider endpointAuthProvider) { super(buildConfigurationSetProvider); this.buildConfigurationSetProvider = buildConfigurationSetProvider; this.buildTriggerer = buildTriggerer; this.buildConfigurationProvider = buildConfigurationProvider; this.buildRecordProvider = buildRecordProvider; this.buildConfigSetRecordProvider = buildConfigSetRecordProvider; this.endpointAuthProvider = endpointAuthProvider; this.datastore = datastore; } @ApiOperation(value = "Gets all Build Configuration Sets") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationSetPage.class), @ApiResponse(code = NO_CONTENT_CODE, message = NO_CONTENT_DESCRIPTION, response = BuildConfigurationSetPage.class), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET public Response getAll(@ApiParam(value = PAGE_INDEX_DESCRIPTION) @QueryParam(PAGE_INDEX_QUERY_PARAM) @DefaultValue(PAGE_INDEX_DEFAULT_VALUE) int pageIndex, @ApiParam(value = PAGE_SIZE_DESCRIPTION) @QueryParam(PAGE_SIZE_QUERY_PARAM) @DefaultValue(PAGE_SIZE_DEFAULT_VALUE) int pageSize, @ApiParam(value = SORTING_DESCRIPTION) @QueryParam(SORTING_QUERY_PARAM) String sort, @ApiParam(value = QUERY_DESCRIPTION, required = false) @QueryParam(QUERY_QUERY_PARAM) String q) { return super.getAll(pageIndex, pageSize, sort, q); } @ApiOperation(value = "Creates a new Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationSetSingleton.class), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = CONFLICTED_CODE, message = CONFLICTED_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @POST public Response createNew(@NotNull BuildConfigurationSetRest buildConfigurationSetRest, @Context UriInfo uriInfo) throws RestValidationException { logger.debug("Creating new BuildConfigurationSet: {}", buildConfigurationSetRest.toString()); return super.createNew(buildConfigurationSetRest, uriInfo); } @ApiOperation(value = "Gets a specific Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationSetSingleton.class), @ApiResponse(code = NOT_FOUND_CODE, message = NOT_FOUND_DESCRIPTION, response = BuildConfigurationSetSingleton.class), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET @Path("/{id}") public Response getSpecific( @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id) { return super.getSpecific(id); } @ApiOperation(value = "Updates an existing Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = CONFLICTED_CODE, message = CONFLICTED_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @PUT @Path("/{id}") public Response update(@ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id, @NotNull BuildConfigurationSetRest buildConfigurationSetRest) throws RestValidationException { return super.update(id, buildConfigurationSetRest); } @ApiOperation(value = "Removes a specific Build Configuration Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @DELETE @Path("/{id}") public Response deleteSpecific(@ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id) throws RestValidationException { return super.delete(id); } @ApiOperation(value = "Gets the Configurations for the Specified Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationPage.class), @ApiResponse(code = NO_CONTENT_CODE, message = NO_CONTENT_DESCRIPTION, response = BuildConfigurationPage.class), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET @Path("/{id}/build-configurations") public Response getConfigurations(@ApiParam(value = PAGE_INDEX_DESCRIPTION) @QueryParam(PAGE_INDEX_QUERY_PARAM) @DefaultValue(PAGE_INDEX_DEFAULT_VALUE) int pageIndex, @ApiParam(value = PAGE_SIZE_DESCRIPTION) @QueryParam(PAGE_SIZE_QUERY_PARAM) @DefaultValue(PAGE_SIZE_DEFAULT_VALUE) int pageSize, @ApiParam(value = SORTING_DESCRIPTION) @QueryParam(SORTING_QUERY_PARAM) String sort, @ApiParam(value = QUERY_DESCRIPTION, required = false) @QueryParam(QUERY_QUERY_PARAM) String q, @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id) { return fromCollection( buildConfigurationProvider.getAllForBuildConfigurationSet(pageIndex, pageSize, sort, q, id)); } @PUT @Path("/{id}/build-configurations") public Response updateConfigurations(@ApiParam(value = "Build Configuration Set Id", required = true) @PathParam("id") Integer id, List<BuildConfigurationRest> buildConfigurationRests) throws RestValidationException { buildConfigurationSetProvider.updateConfigurations(id, buildConfigurationRests); return Response.ok().build(); } @ApiOperation(value = "Adds a configuration to the Specified Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @POST @Path("/{id}/build-configurations") public Response addConfiguration( @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id, BuildConfigurationRest buildConfig) throws RestValidationException { if (buildConfig == null || buildConfig.getId() == null) { throw new EmptyEntityException("No valid build config included in request to add config to set id: " + id); } buildConfigurationSetProvider.addConfiguration(id, buildConfig.getId()); return fromEmpty(); } @ApiOperation(value = "Removes a configuration from the specified config set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @DELETE @Path("/{id}/build-configurations/{configId}") public Response removeConfiguration( @ApiParam(value = "Build configuration set id", required = true) @PathParam("id") Integer id, @ApiParam(value = "Build configuration id", required = true) @PathParam("configId") Integer configId) throws RestValidationException { buildConfigurationSetProvider.removeConfiguration(id, configId); return fromEmpty(); } @ApiOperation(value = "Gets all build records associated with the contained build configurations") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildRecordPage.class), @ApiResponse(code = NO_CONTENT_CODE, message = NO_CONTENT_DESCRIPTION, response = BuildRecordPage.class), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET @Path("/{id}/build-records") public Response getBuildRecords( @ApiParam(value = "Build configuration set id", required = true) @PathParam("id") Integer id, @ApiParam(value = PAGE_INDEX_DESCRIPTION) @QueryParam(PAGE_INDEX_QUERY_PARAM) @DefaultValue(PAGE_INDEX_DEFAULT_VALUE) int pageIndex, @ApiParam(value = PAGE_SIZE_DESCRIPTION) @QueryParam(PAGE_SIZE_QUERY_PARAM) @DefaultValue(PAGE_SIZE_DEFAULT_VALUE) int pageSize, @ApiParam(value = SORTING_DESCRIPTION) @QueryParam(SORTING_QUERY_PARAM) String sort, @ApiParam(value = QUERY_DESCRIPTION, required = false) @QueryParam(QUERY_QUERY_PARAM) String q) { return fromCollection(buildRecordProvider.getAllForBuildConfigSetRecord(pageIndex, pageSize, sort, q, id)); } @ApiOperation(value = "Builds the Configurations for the Specified Set") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigSetRecordSingleton.class), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @POST @Path("/{id}/build") @Consumes(MediaType.APPLICATION_JSON) public Response build( @ApiParam(value = "Build Configuration Set id", required = true) @PathParam("id") Integer id, @ApiParam(value = "Optional Callback URL", required = false) @QueryParam("callbackUrl") String callbackUrl, @ApiParam(value = "Is it a temporary build or a standard build?") @QueryParam("temporaryBuild") @DefaultValue("false") boolean temporaryBuild, @ApiParam(value = "Should we force the rebuild of all build configurations?") @QueryParam("forceRebuild") @DefaultValue("false") boolean forceRebuild, @ApiParam(value = "Should we add a timestamp during the alignment? Valid only for temporary builds.") @QueryParam("timestampAlignment") @DefaultValue("false") boolean timestampAlignment, @Context UriInfo uriInfo) throws InterruptedException, CoreException, DatastoreException, BuildDriverException, RepositoryManagerException, MalformedURLException, InvalidEntityException { logger.info("Executing build configuration set id: " + id ); User currentUser = getCurrentUser(); BuildOptions buildOptions = new BuildOptions(temporaryBuild, forceRebuild, false, false, timestampAlignment); BuildConfigurationEndpoint.checkBuildOptionsValidity(buildOptions); BuildConfigurationSetTriggerResult result; // if callbackUrl is provided trigger build accordingly if (callbackUrl != null && !callbackUrl.isEmpty()) { result = buildTriggerer.triggerBuildConfigurationSet(id, currentUser, buildOptions, new URL(callbackUrl)); } else { result = buildTriggerer.triggerBuildConfigurationSet(id, currentUser, buildOptions); } logger.info("Started build configuration set id: {}. Build Tasks: {}", id, result.getBuildTasks().stream().map(bt -> Integer.toString(bt.getId())).collect( Collectors.joining())); UriBuilder uriBuilder = UriBuilder.fromUri(uriInfo.getBaseUri()).path("/build-config-set-records/{id}"); URI uri = uriBuilder.build(result.getBuildRecordSetId()); return Response.ok(uri).header("location", uri).entity(new Singleton<>(buildConfigSetRecordProvider.getSpecific(result.getBuildRecordSetId()))).build(); } private User getCurrentUser() throws InvalidEntityException { User currentUser = endpointAuthProvider.getCurrentUser(httpServletRequest); if (currentUser == null) { throw new InvalidEntityException("No such user exists to trigger builds. Before triggering builds" + " user must be initialized through /users/getLoggedUser"); } return currentUser; } @ApiOperation(value = "Get all build config set execution records associated with this build config set, returns empty list if none are found") @ApiResponses(value = { @ApiResponse(code = SUCCESS_CODE, message = SUCCESS_DESCRIPTION, response = BuildConfigurationSetRecordPage.class), @ApiResponse(code = NO_CONTENT_CODE, message = NO_CONTENT_DESCRIPTION, response = BuildConfigurationSetRecordPage.class), @ApiResponse(code = INVALID_CODE, message = INVALID_DESCRIPTION, response = ErrorResponseRest.class), @ApiResponse(code = SERVER_ERROR_CODE, message = SERVER_ERROR_DESCRIPTION, response = ErrorResponseRest.class) }) @GET @Path("/{id}/build-config-set-records") public Response getAllBuildConfigSetRecords( @ApiParam(value = PAGE_INDEX_DESCRIPTION) @QueryParam(PAGE_INDEX_QUERY_PARAM) @DefaultValue(PAGE_INDEX_DEFAULT_VALUE) int pageIndex, @ApiParam(value = PAGE_SIZE_DESCRIPTION) @QueryParam(PAGE_SIZE_QUERY_PARAM) @DefaultValue(PAGE_SIZE_DEFAULT_VALUE) int pageSize, @ApiParam(value = SORTING_DESCRIPTION) @QueryParam(SORTING_QUERY_PARAM) String sort, @ApiParam(value = QUERY_DESCRIPTION, required = false) @QueryParam(QUERY_QUERY_PARAM) String q, @ApiParam(value = "Build config set id", required = true) @PathParam("id") Integer id) { return fromCollection(buildConfigSetRecordProvider.getAllForBuildConfigSet(pageIndex, pageSize, sort, q, id)); } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.segment.incremental; import com.google.common.base.Supplier; import com.google.common.collect.Maps; import io.druid.collections.NonBlockingPool; import io.druid.collections.ResourceHolder; import io.druid.data.input.InputRow; import io.druid.java.util.common.IAE; import io.druid.java.util.common.ISE; import io.druid.java.util.common.StringUtils; import io.druid.java.util.common.io.Closer; import io.druid.java.util.common.logger.Logger; import io.druid.java.util.common.parsers.ParseException; import io.druid.query.aggregation.AggregatorFactory; import io.druid.query.aggregation.BufferAggregator; import io.druid.segment.ColumnSelectorFactory; import java.io.IOException; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; /** */ public class OffheapIncrementalIndex extends IncrementalIndex<BufferAggregator> { private static final Logger log = new Logger(OffheapIncrementalIndex.class); private final NonBlockingPool<ByteBuffer> bufferPool; private final List<ResourceHolder<ByteBuffer>> aggBuffers = new ArrayList<>(); private final List<int[]> indexAndOffsets = new ArrayList<>(); private final FactsHolder facts; private final AtomicInteger indexIncrement = new AtomicInteger(0); protected final int maxRowCount; private volatile Map<String, ColumnSelectorFactory> selectors; //given a ByteBuffer and an offset where all aggregates for a row are stored //offset + aggOffsetInBuffer[i] would give position in ByteBuffer where ith aggregate //is stored private volatile int[] aggOffsetInBuffer; private volatile int aggsTotalSize; private String outOfRowsReason = null; OffheapIncrementalIndex( IncrementalIndexSchema incrementalIndexSchema, boolean deserializeComplexMetrics, boolean reportParseExceptions, boolean concurrentEventAdd, boolean sortFacts, int maxRowCount, NonBlockingPool<ByteBuffer> bufferPool ) { super(incrementalIndexSchema, deserializeComplexMetrics, reportParseExceptions, concurrentEventAdd); this.maxRowCount = maxRowCount; this.bufferPool = bufferPool; this.facts = incrementalIndexSchema.isRollup() ? new RollupFactsHolder(sortFacts, dimsComparator(), getDimensions()) : new PlainFactsHolder(sortFacts); //check that stupid pool gives buffers that can hold at least one row's aggregators ResourceHolder<ByteBuffer> bb = bufferPool.take(); if (bb.get().capacity() < aggsTotalSize) { bb.close(); throw new IAE("bufferPool buffers capacity must be >= [%s]", aggsTotalSize); } aggBuffers.add(bb); } @Override public FactsHolder getFacts() { return facts; } @Override protected BufferAggregator[] initAggs( final AggregatorFactory[] metrics, final Supplier<InputRow> rowSupplier, final boolean deserializeComplexMetrics, final boolean concurrentEventAdd ) { selectors = Maps.newHashMap(); aggOffsetInBuffer = new int[metrics.length]; for (int i = 0; i < metrics.length; i++) { AggregatorFactory agg = metrics[i]; ColumnSelectorFactory columnSelectorFactory = makeColumnSelectorFactory( agg, rowSupplier, deserializeComplexMetrics ); selectors.put( agg.getName(), new OnheapIncrementalIndex.ObjectCachingColumnSelectorFactory(columnSelectorFactory, concurrentEventAdd) ); if (i == 0) { aggOffsetInBuffer[i] = 0; } else { aggOffsetInBuffer[i] = aggOffsetInBuffer[i - 1] + metrics[i - 1].getMaxIntermediateSize(); } } aggsTotalSize = aggOffsetInBuffer[metrics.length - 1] + metrics[metrics.length - 1].getMaxIntermediateSize(); return new BufferAggregator[metrics.length]; } @Override protected Integer addToFacts( AggregatorFactory[] metrics, boolean deserializeComplexMetrics, boolean reportParseExceptions, InputRow row, AtomicInteger numEntries, TimeAndDims key, ThreadLocal<InputRow> rowContainer, Supplier<InputRow> rowSupplier, boolean skipMaxRowsInMemoryCheck // ignored, we always want to check this for offheap ) throws IndexSizeExceededException { ByteBuffer aggBuffer; int bufferIndex; int bufferOffset; synchronized (this) { final int priorIndex = facts.getPriorIndex(key); if (TimeAndDims.EMPTY_ROW_INDEX != priorIndex) { final int[] indexAndOffset = indexAndOffsets.get(priorIndex); bufferIndex = indexAndOffset[0]; bufferOffset = indexAndOffset[1]; aggBuffer = aggBuffers.get(bufferIndex).get(); } else { if (metrics.length > 0 && getAggs()[0] == null) { // note: creation of Aggregators is done lazily when at least one row from input is available // so that FilteredAggregators could be initialized correctly. rowContainer.set(row); for (int i = 0; i < metrics.length; i++) { final AggregatorFactory agg = metrics[i]; getAggs()[i] = agg.factorizeBuffered(selectors.get(agg.getName())); } rowContainer.set(null); } bufferIndex = aggBuffers.size() - 1; ByteBuffer lastBuffer = aggBuffers.isEmpty() ? null : aggBuffers.get(aggBuffers.size() - 1).get(); int[] lastAggregatorsIndexAndOffset = indexAndOffsets.isEmpty() ? null : indexAndOffsets.get(indexAndOffsets.size() - 1); if (lastAggregatorsIndexAndOffset != null && lastAggregatorsIndexAndOffset[0] != bufferIndex) { throw new ISE("last row's aggregate's buffer and last buffer index must be same"); } bufferOffset = aggsTotalSize + (lastAggregatorsIndexAndOffset != null ? lastAggregatorsIndexAndOffset[1] : 0); if (lastBuffer != null && lastBuffer.capacity() - bufferOffset >= aggsTotalSize) { aggBuffer = lastBuffer; } else { ResourceHolder<ByteBuffer> bb = bufferPool.take(); aggBuffers.add(bb); bufferIndex = aggBuffers.size() - 1; bufferOffset = 0; aggBuffer = bb.get(); } for (int i = 0; i < metrics.length; i++) { getAggs()[i].init(aggBuffer, bufferOffset + aggOffsetInBuffer[i]); } // Last ditch sanity checks if (numEntries.get() >= maxRowCount && facts.getPriorIndex(key) == TimeAndDims.EMPTY_ROW_INDEX) { throw new IndexSizeExceededException("Maximum number of rows [%d] reached", maxRowCount); } final int rowIndex = indexIncrement.getAndIncrement(); // note that indexAndOffsets must be updated before facts, because as soon as we update facts // concurrent readers get hold of it and might ask for newly added row indexAndOffsets.add(new int[]{bufferIndex, bufferOffset}); final int prev = facts.putIfAbsent(key, rowIndex); if (TimeAndDims.EMPTY_ROW_INDEX == prev) { numEntries.incrementAndGet(); } else { throw new ISE("WTF! we are in sychronized block."); } } } rowContainer.set(row); for (int i = 0; i < metrics.length; i++) { final BufferAggregator agg = getAggs()[i]; synchronized (agg) { try { agg.aggregate(aggBuffer, bufferOffset + aggOffsetInBuffer[i]); } catch (ParseException e) { // "aggregate" can throw ParseExceptions if a selector expects something but gets something else. if (reportParseExceptions) { throw new ParseException(e, "Encountered parse error for aggregator[%s]", getMetricAggs()[i].getName()); } else { log.debug(e, "Encountered parse error, skipping aggregator[%s].", getMetricAggs()[i].getName()); } } } } rowContainer.set(null); return numEntries.get(); } @Override public int getLastRowIndex() { return indexIncrement.get() - 1; } @Override public boolean canAppendRow() { final boolean canAdd = size() < maxRowCount; if (!canAdd) { outOfRowsReason = StringUtils.format("Maximum number of rows [%d] reached", maxRowCount); } return canAdd; } @Override public String getOutOfRowsReason() { return outOfRowsReason; } @Override protected BufferAggregator[] getAggsForRow(int rowOffset) { return getAggs(); } @Override protected Object getAggVal(BufferAggregator agg, int rowOffset, int aggPosition) { int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.get(bb, indexAndOffset[1] + aggOffsetInBuffer[aggPosition]); } @Override public float getMetricFloatValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.getFloat(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } @Override public long getMetricLongValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.getLong(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } @Override public Object getMetricObjectValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.get(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } @Override public double getMetricDoubleValue(int rowOffset, int aggOffset) { BufferAggregator agg = getAggs()[aggOffset]; int[] indexAndOffset = indexAndOffsets.get(rowOffset); ByteBuffer bb = aggBuffers.get(indexAndOffset[0]).get(); return agg.getDouble(bb, indexAndOffset[1] + aggOffsetInBuffer[aggOffset]); } /** * NOTE: This is NOT thread-safe with add... so make sure all the adding is DONE before closing */ @Override public void close() { super.close(); facts.clear(); indexAndOffsets.clear(); if (selectors != null) { selectors.clear(); } Closer c = Closer.create(); aggBuffers.forEach(c::register); try { c.close(); } catch (IOException e) { throw new RuntimeException(e); } aggBuffers.clear(); } }
/* * Copyright 2015 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.compiler.kie.builder.impl; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; import java.util.Collections; import java.util.LinkedHashMap; import java.util.Map; import java.util.NavigableMap; import java.util.TreeMap; import java.util.concurrent.atomic.AtomicReference; import org.appformer.maven.support.PomModel; import org.drools.compiler.compiler.io.memory.MemoryFileSystem; import org.drools.compiler.kproject.ReleaseIdImpl; import org.drools.compiler.kproject.models.KieModuleModelImpl; import org.drools.core.io.internal.InternalResource; import org.kie.api.builder.KieModule; import org.kie.api.builder.KieRepository; import org.kie.api.builder.KieScannerFactoryService; import org.kie.api.builder.ReleaseId; import org.kie.api.builder.ReleaseIdComparator.ComparableVersion; import org.kie.api.builder.model.KieModuleModel; import org.kie.api.event.kiescanner.KieScannerEventListener; import org.kie.api.internal.utils.ServiceRegistry; import org.kie.api.io.Resource; import org.kie.api.runtime.KieContainer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.drools.compiler.kie.builder.impl.KieBuilderImpl.setDefaultsforEmptyKieModule; import static org.drools.compiler.kproject.ReleaseIdImpl.fromPropertiesStream; public class KieRepositoryImpl implements KieRepository { private static final Logger log = LoggerFactory.getLogger(KieRepositoryImpl.class); private static final String DEFAULT_VERSION = "1.0.0"; private static final String DEFAULT_ARTIFACT = "artifact"; private static final String DEFAULT_GROUP = "org.default"; private final AtomicReference<ReleaseId> defaultGAV = new AtomicReference(new ReleaseIdImpl(DEFAULT_GROUP, DEFAULT_ARTIFACT, DEFAULT_VERSION)); public static final KieRepository INSTANCE = new KieRepositoryImpl(); private final KieModuleRepo kieModuleRepo; public static void setInternalKieScanner(InternalKieScanner scanner) { synchronized (KieScannerHolder.class) { KieScannerHolder.kieScanner = scanner; } } private static class KieScannerHolder { // Use holder class idiom to lazily initialize the kieScanner private static volatile InternalKieScanner kieScanner = getInternalKieScanner(); private static InternalKieScanner getInternalKieScanner() { synchronized (KieScannerHolder.class) { if ( kieScanner != null ) { return kieScanner; } try { KieScannerFactoryService scannerFactoryService = ServiceRegistry.getInstance().get(KieScannerFactoryService.class); return (InternalKieScanner) scannerFactoryService.newKieScanner(); } catch (Exception e) { log.debug( "Cannot load a KieRepositoryScanner, using the DummyKieScanner" ); // kie-ci is not on the classpath return new DummyKieScanner(); } } } } public KieRepositoryImpl() { kieModuleRepo = new KieModuleRepo(); } public void setDefaultGAV(ReleaseId releaseId) { this.defaultGAV.set(releaseId); } public ReleaseId getDefaultReleaseId() { return this.defaultGAV.get(); } public void addKieModule(KieModule kieModule) { kieModuleRepo.store(kieModule); log.debug("KieModule was added: " + kieModule); } public KieModule getKieModule(ReleaseId releaseId) { return getKieModule(releaseId, null); } public KieModule removeKieModule(ReleaseId releaseId) { return kieModuleRepo.remove(releaseId); } KieModule getOldKieModule(ReleaseId releaseId) { KieModule kieModule = kieModuleRepo.loadOldAndRemove(releaseId); return kieModule != null ? kieModule : getKieModule(releaseId); } public KieModule getKieModule(ReleaseId releaseId, PomModel pomModel) { KieModule kieModule = kieModuleRepo.load( KieScannerHolder.kieScanner, releaseId ); if (kieModule == null) { log.debug("KieModule Lookup. ReleaseId {} was not in cache, checking classpath", releaseId.toExternalForm()); kieModule = checkClasspathForKieModule(releaseId); } if (kieModule == null) { log.debug("KieModule Lookup. ReleaseId {} was not in cache, checking maven repository", releaseId.toExternalForm()); kieModule = loadKieModuleFromMavenRepo(releaseId, pomModel); } return kieModule; } private KieModule checkClasspathForKieModule(ReleaseId releaseId) { ClassLoader contextClassLoader = Thread.currentThread().getContextClassLoader(); URL kmoduleUrl = contextClassLoader.getResource( KieModuleModelImpl.KMODULE_JAR_PATH ); if (kmoduleUrl == null) { return null; } String pomPropertiesPath = ReleaseIdImpl.getPomPropertiesPath(releaseId); URL pomPropertiesUrl = contextClassLoader.getResource( pomPropertiesPath ); if (pomPropertiesUrl == null) { return null; } ReleaseId pomReleaseId = fromPropertiesStream( contextClassLoader.getResourceAsStream(pomPropertiesPath), pomPropertiesUrl.getPath()); if (pomReleaseId.equals(releaseId)) { String path = pomPropertiesUrl.getPath(); String pathToJar = path.substring( 0, path.indexOf( ".jar!" ) + 4 ); URL pathToKmodule; try { pathToKmodule = new URL( pomPropertiesUrl.getProtocol(), pomPropertiesUrl.getHost(), pomPropertiesUrl.getPort(), pathToJar + "!/" + KieModuleModelImpl.KMODULE_JAR_PATH ); // URLConnection.getContentLength() returns -1 if the content length is not known, unable to locate and read from the kmodule // if URL backed by 'file:' then FileURLConnection.getContentLength() returns 0, as per java.io.File.length() returns 0L if the file does not exist. (the same also for WildFly's VFS FileURLConnection) if ( pathToKmodule.openConnection().getContentLength() <= 0 ) { return null; } } catch (MalformedURLException e) { log.error( "Unable to reconstruct path to kmodule for " + releaseId ); return null; } catch (IOException e) { log.error( "Unable to read from path to kmodule for " + releaseId ); return null; } log.info( "Adding KieModule from classpath: " + pathToJar ); return ClasspathKieProject.fetchKModule( pathToKmodule ); } return null; } private KieModule loadKieModuleFromMavenRepo(ReleaseId releaseId, PomModel pomModel) { return KieScannerHolder.kieScanner.loadArtifact( releaseId, pomModel ); } private static class DummyKieScanner implements InternalKieScanner { public void start(long pollingInterval) { } public void stop() { } public void shutdown() { } public void scanNow() { } public void setKieContainer(KieContainer kieContainer) { } public KieModule loadArtifact(ReleaseId releaseId) { logArtifactNotFetched(releaseId); return null; } public KieModule loadArtifact(ReleaseId releaseId, InputStream pomXML) { logArtifactNotFetched(releaseId); return null; } public KieModule loadArtifact(ReleaseId releaseId, PomModel pomModel) { logArtifactNotFetched(releaseId); return null; } public String getArtifactVersion(ReleaseId releaseId) { logArtifactNotFetched(releaseId); return null; } private void logArtifactNotFetched(ReleaseId releaseId) { log.info("Artifact not fetched from maven: " + releaseId + ". To enable the KieScanner you need kie-ci on the classpath"); } public ReleaseId getScannerReleaseId() { return null; } public ReleaseId getCurrentReleaseId() { return null; } public Status getStatus() { return Status.STOPPED; } public long getPollingInterval() { return 0; } public void addListener(KieScannerEventListener listener) { } public void removeListener(KieScannerEventListener listener) { } public Collection<KieScannerEventListener> getListeners() { return Collections.emptyList(); } } public KieModule addKieModule(Resource resource, Resource... dependencies) { log.info("Adding KieModule from resource: " + resource); KieModule kModule = getKieModule(resource); if (dependencies != null && dependencies.length > 0) { for (Resource depRes : dependencies) { InternalKieModule depKModule = (InternalKieModule) getKieModule(depRes); ((InternalKieModule) kModule).addKieDependency(depKModule); log.debug("Adding KieModule dependency from resource: " + resource); } } addKieModule(kModule); return kModule; } public KieModule getKieModule(Resource resource) { InternalResource res = (InternalResource) resource; try { KieModule kModule; // find kmodule.xml if (res.hasURL()) { String urlPath = res.getURL().toExternalForm(); if (res.isDirectory()) { if (!urlPath.endsWith("/")) { urlPath = urlPath + "/"; } urlPath = urlPath + KieModuleModelImpl.KMODULE_JAR_PATH; } else { urlPath = "jar:" + urlPath + "!/" + KieModuleModelImpl.KMODULE_JAR_PATH; } kModule = ClasspathKieProject.fetchKModule(new URL(urlPath)); log.debug("Fetched KieModule from resource: " + resource); } else { // might be a byte[] resource MemoryFileSystem mfs = MemoryFileSystem.readFromJar(res.getInputStream()); byte[] bytes = mfs.getBytes(KieModuleModelImpl.KMODULE_JAR_PATH); KieModuleModel kieProject = KieModuleModelImpl.fromXML(new ByteArrayInputStream(bytes)); setDefaultsforEmptyKieModule(kieProject); String pomProperties = mfs.findPomProperties(); ReleaseId releaseId = ReleaseIdImpl.fromPropertiesString(pomProperties); kModule = InternalKieModuleProvider.get( releaseId, kieProject, mfs ); } return kModule; } catch (Exception e) { throw new RuntimeException("Unable to fetch module from resource: " + res, e); } } private static final Object PRESENT = new Object(); /** * The methods in this class are all synchronized because * 1. performance is not particularly important here * 2. I wrote performant concurrent code and then realized it was not easily maintainable * (and maintainability is more important here, AFAICT), * so we're using synchronized methods instead */ // package scope so that we can test it static class KieModuleRepo { // PROPERTIES ------------------------------------------------------------------------------------------------------------- public static final String CACHE_GA_MAX_PROPERTY = "kie.repository.project.cache.size"; static int MAX_SIZE_GA_CACHE // made changeable for test purposes = Integer.parseInt(System.getProperty(CACHE_GA_MAX_PROPERTY, "100")); public static final String CACHE_VERSIONS_MAX_PROPERTY = "kie.repository.project.versions.cache.size"; static int MAX_SIZE_GA_VERSIONS_CACHE // made changeable for test purposes = Integer.parseInt(System.getProperty(CACHE_VERSIONS_MAX_PROPERTY, "10")); // FIELDS ----------------------------------------------------------------------------------------------------------------- // kieModules evicts based on access-time, not on insertion-time final Map<String, NavigableMap<ComparableVersion, KieModule>> kieModules = new LinkedHashMap<String, NavigableMap<ComparableVersion, KieModule>>(16, 0.75f, true) { @Override protected boolean removeEldestEntry( Map.Entry<String, NavigableMap<ComparableVersion, KieModule>> eldest) { return (size() > MAX_SIZE_GA_CACHE); } }; final LinkedHashMap<ReleaseId, KieModule> oldKieModules = new LinkedHashMap<ReleaseId, KieModule>() { @Override protected boolean removeEldestEntry( Map.Entry<ReleaseId, KieModule> eldest ) { return size() > (MAX_SIZE_GA_CACHE*MAX_SIZE_GA_VERSIONS_CACHE); }; }; // METHODS ---------------------------------------------------------------------------------------------------------------- synchronized KieModule remove(ReleaseId releaseId) { KieModule removedKieModule = null; String ga = releaseId.getGroupId() + ":" + releaseId.getArtifactId(); ComparableVersion comparableVersion = new ComparableVersion(releaseId.getVersion()); NavigableMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga); if (artifactMap != null) { removedKieModule = artifactMap.remove(comparableVersion); if (artifactMap.isEmpty()) { kieModules.remove(ga); } oldKieModules.remove(releaseId); } return removedKieModule; } synchronized void store(KieModule kieModule) { ReleaseId releaseId = kieModule.getReleaseId(); String ga = releaseId.getGroupId() + ":" + releaseId.getArtifactId(); ComparableVersion comparableVersion = new ComparableVersion(releaseId.getVersion()); NavigableMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga); if( artifactMap == null ) { artifactMap = createNewArtifactMap(); kieModules.put(ga, artifactMap); } KieModule oldReleaseIdKieModule = oldKieModules.get(releaseId); // variable used in order to test race condition if (oldReleaseIdKieModule == null) { KieModule oldKieModule = artifactMap.get(comparableVersion); if (oldKieModule != null) { oldKieModules.put( releaseId, oldKieModule ); } } artifactMap.put( comparableVersion, kieModule ); } /** * Returns a map that fulfills 2 purposes: <ol> * <li>It is a {@link NavigableMap} and thus can be used in the {@link KieModuleRepo#load(InternalKieScanner, ReleaseId, VersionRange)} method</li> * <li>It is a LRU cache, and thus will not grow without limit. * </ol> * @return a {@link NavigableMap} that is "backed" by a {@link LinkedHashMap} to enforce a LRU cache */ private NavigableMap<ComparableVersion, KieModule> createNewArtifactMap() { NavigableMap<ComparableVersion, KieModule> newArtifactMap = new TreeMap<ComparableVersion, KieModule>() { private final Map<ComparableVersion, KieModule> artifactMap = this; LinkedHashMap<ComparableVersion, Object> backingLRUMap = new LinkedHashMap<ComparableVersion, Object>(16, 0.75f, true) { @Override protected boolean removeEldestEntry( Map.Entry<ComparableVersion, Object> eldest ) { boolean remove = (size() > MAX_SIZE_GA_VERSIONS_CACHE); if( remove ) { artifactMap.remove(eldest.getKey()); } return remove; } }; @Override public KieModule put( ComparableVersion key, KieModule value ) { backingLRUMap.put(key, PRESENT); return super.put(key, value); } }; return newArtifactMap; } synchronized KieModule loadOldAndRemove(ReleaseId releaseId) { return oldKieModules.remove(releaseId); } synchronized KieModule load(InternalKieScanner kieScanner, ReleaseId releaseId) { return load(kieScanner, releaseId, new VersionRange(releaseId.getVersion())); } synchronized KieModule load(InternalKieScanner kieScanner, ReleaseId releaseId, VersionRange versionRange) { String ga = releaseId.getGroupId() + ":" + releaseId.getArtifactId(); NavigableMap<ComparableVersion, KieModule> artifactMap = kieModules.get(ga); if ( artifactMap == null || artifactMap.isEmpty() ) { return null; } KieModule kieModule = artifactMap.get(new ComparableVersion(releaseId.getVersion())); if (versionRange.fixed) { if ( kieModule != null && releaseId.isSnapshot() ) { String oldSnapshotVersion = ((ReleaseIdImpl)kieModule.getReleaseId()).getSnapshotVersion(); if ( oldSnapshotVersion != null ) { String currentSnapshotVersion = kieScanner.getArtifactVersion(releaseId); if (currentSnapshotVersion != null && new ComparableVersion(currentSnapshotVersion).compareTo(new ComparableVersion(oldSnapshotVersion)) > 0) { // if the snapshot currently available on the maven repo is newer than the cached one // return null to enforce the building of this newer version return null; } } } return kieModule; } Map.Entry<ComparableVersion, KieModule> entry = versionRange.upperBound == null ? artifactMap.lastEntry() : versionRange.upperInclusive ? artifactMap.floorEntry(new ComparableVersion(versionRange.upperBound)) : artifactMap.lowerEntry(new ComparableVersion(versionRange.upperBound)); if ( entry == null ) { return null; } if ( versionRange.lowerBound == null ) { return entry.getValue(); } int comparison = entry.getKey().compareTo(new ComparableVersion(versionRange.lowerBound)); return comparison > 0 || (comparison == 0 && versionRange.lowerInclusive) ? entry.getValue() : null; } } private static class VersionRange { private String lowerBound; private String upperBound; private boolean lowerInclusive; private boolean upperInclusive; private boolean fixed; private VersionRange(String version) { parse(version); } private void parse(String version) { if ("LATEST".equals(version) || "RELEASE".equals(version)) { fixed = false; lowerBound = "1.0"; upperBound = null; lowerInclusive = true; upperInclusive = false; return; } if (version.charAt(0) != '(' && version.charAt(0) != '[') { fixed = true; lowerBound = version; upperBound = version; lowerInclusive = true; upperInclusive = true; return; } lowerInclusive = version.charAt(0) == '['; upperInclusive = version.charAt(version.length() - 1) == ']'; int commaPos = version.indexOf(','); if (commaPos < 0) { fixed = true; lowerBound = version.substring(1, version.length() - 1); upperBound = lowerBound; } else { if (commaPos > 1) { lowerBound = version.substring(1, commaPos); } if (commaPos < version.length() - 2) { upperBound = version.substring(commaPos + 1, version.length() - 1); } } } } }
/* * $Id$ * * SARL is an general-purpose agent programming language. * More details on http://www.sarl.io * * Copyright (C) 2014-2017 the original authors or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.sarl.lang.bugfixes.unpublished; import java.util.List; import javax.inject.Singleton; import org.eclipse.emf.ecore.EObject; import org.eclipse.xtext.Keyword; import org.eclipse.xtext.formatting2.AbstractFormatter2; import org.eclipse.xtext.formatting2.IFormattableDocument; import org.eclipse.xtext.formatting2.ITextReplacer; import org.eclipse.xtext.formatting2.ITextReplacerContext; import org.eclipse.xtext.formatting2.regionaccess.IComment; import org.eclipse.xtext.formatting2.regionaccess.IHiddenRegion; import org.eclipse.xtext.formatting2.regionaccess.ISemanticRegion; import org.eclipse.xtext.formatting2.regionaccess.ITextRegionAccess; import org.eclipse.xtext.formatting2.regionaccess.ITextReplacement; import org.eclipse.xtext.formatting2.regionaccess.ITextSegment; import org.eclipse.xtext.util.Strings; /** FIXME: Fixing a bug in Xtext Formatter2 API that avoid to have a good * indentation for the first comment in a block. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ @Singleton public class BugMultilineCommentIndentation { /** Fixing the bug. * * @param context the replacement context. * @param comment the comment for which the fix must be applied. * @return the new context. */ @SuppressWarnings("static-method") public ITextReplacerContext fix(final ITextReplacerContext context, IComment comment) { final IHiddenRegion hiddenRegion = comment.getHiddenRegion(); if (detectBugSituation(hiddenRegion) && fixBug(hiddenRegion)) { // Indentation of the first comment line final ITextRegionAccess access = comment.getTextRegionAccess(); final ITextSegment target = access.regionForOffset(comment.getOffset(), 0); context.addReplacement(target.replaceWith(context.getIndentationString(1))); // Indentation of the comment's lines return new FixedReplacementContext(context); } return context; } private static boolean detectBugSituation(IHiddenRegion hiddenRegion) { if (hiddenRegion != null) { final ISemanticRegion semanticRegion = hiddenRegion.getPreviousSemanticRegion(); if (semanticRegion != null) { final EObject element = semanticRegion.getGrammarElement(); if (element instanceof Keyword && Strings.equal(((Keyword) element).getValue(), "{")) { //$NON-NLS-1$ return true; } } } return false; } private static boolean fixBug(IHiddenRegion hiddenRegion) { boolean needBugFix = true; final ISemanticRegion semanticRegion = hiddenRegion.getNextSemanticRegion(); if (semanticRegion != null) { final EObject element = semanticRegion.getGrammarElement(); if (element instanceof Keyword && Strings.equal(((Keyword) element).getValue(), "}")) { //$NON-NLS-1$ needBugFix = false; } } return needBugFix; } /** Comment fixer. * * @author $Author: sgalland$ * @version $FullVersion$ * @mavengroupid $GroupId$ * @mavenartifactid $ArtifactId$ */ private static class FixedReplacementContext implements ITextReplacerContext { private final ITextReplacerContext context; /** Constructor. * * @param context the replacement context to fix. */ FixedReplacementContext(ITextReplacerContext context) { this.context = context; } @Override public ITextReplacerContext withReplacer(ITextReplacer replacer) { return this.context.withReplacer(replacer); } @Override public ITextReplacerContext withIndentation(int indentation) { return this.context.withIndentation(indentation); } @Override public ITextReplacerContext withDocument(IFormattableDocument document) { return this.context.withDocument(document); } @Override public void setNextReplacerIsChild() { this.context.setNextReplacerIsChild(); } @Override public void setCanAutowrap(Integer value) { this.context.setCanAutowrap(value); } @Override public void setAutowrap(boolean value) { this.context.setAutowrap(value); } @Override public boolean isInsideFormattedRegion() { return this.context.isInsideFormattedRegion(); } @Override public boolean isAutowrap() { return this.context.isAutowrap(); } @Override public ITextReplacer getReplacer() { return this.context.getReplacer(); } @Override public List<ITextReplacement> getReplacementsUntil(ITextReplacerContext first) { return this.context.getReplacementsUntil(first); } @Override public ITextReplacerContext getPreviousContext() { return this.context.getPreviousContext(); } @Override public String getNewLinesString(int count) { return this.context.getNewLinesString(count); } @Override public Iterable<ITextReplacement> getLocalReplacements() { return this.context.getLocalReplacements(); } @Override public int getLeadingCharsInLineCount() { return this.context.getLeadingCharsInLineCount(); } @Override public String getIndentationString(int indentationLevel) { return this.context.getIndentationString(indentationLevel); } @Override public String getIndentationString() { return getIndentationString(getIndentation()); } @Override public int getIndentation() { return this.context.getIndentation() + 1; } @Override public AbstractFormatter2 getFormatter() { return this.context.getFormatter(); } @Override public IFormattableDocument getDocument() { return this.context.getDocument(); } @Override public Integer canAutowrap() { return this.context.canAutowrap(); } @Override public void addReplacement(ITextReplacement replacement) { this.context.addReplacement(replacement); } @Override public boolean isWrapInRegion() { return this.context.isWrapInRegion(); } @Override public boolean isWrapSincePrevious() { return this.context.isWrapSincePrevious(); } } }
/* * Licensed to The Apereo Foundation under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional * information regarding copyright ownership. * * The Apereo Foundation licenses this file to you under the Apache License, * Version 2.0, (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tle.admin.itemdefinition; import com.dytech.gui.ComponentHelper; import com.dytech.gui.TableLayout; import com.dytech.gui.VerticalFlowLayout; import com.tle.admin.PluginServiceImpl; import com.tle.admin.gui.common.JChangeDetectorPanel; import com.tle.beans.entity.itemdef.ItemDefinition; import com.tle.common.i18n.CurrentLocale; import java.awt.Component; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Rectangle; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.util.ArrayList; import java.util.List; import javax.swing.BorderFactory; import javax.swing.JButton; import javax.swing.JCheckBox; import javax.swing.JDialog; import javax.swing.JLabel; import javax.swing.JPanel; import javax.swing.WindowConstants; import org.java.plugin.registry.Extension; import org.java.plugin.registry.Extension.Parameter; public class ExtensionsTab extends AbstractItemdefTab { private static class ExtensionHandle { private final String name; private final String enabledAttribute; private final Class<? extends AbstractExtensionConfigPanel> configPanelClass; private final JCheckBox enabledCheckbox; @SuppressWarnings("unchecked") public ExtensionHandle(PluginServiceImpl service, Extension ext) throws ClassNotFoundException { this.name = CurrentLocale.get(ext.getParameter("name").valueAsString()); // $NON-NLS-1$ this.enabledAttribute = ext.getParameter("enabledAttribute").valueAsString(); // $NON-NLS-1$ Parameter param = ext.getParameter("configPanel"); // $NON-NLS-1$ this.configPanelClass = param == null ? null : (Class<? extends AbstractExtensionConfigPanel>) service.getBeanClass(ext.getDeclaringPluginDescriptor(), param.valueAsString()); this.enabledCheckbox = new JCheckBox(name); } public String getName() { return name; } public String getEnabledAttribute() { return enabledAttribute; } public Class<? extends AbstractExtensionConfigPanel> getConfigPanelClass() { return configPanelClass; } public JCheckBox getEnabledCheckbox() { return enabledCheckbox; } } private final List<ExtensionHandle> extensions = new ArrayList<ExtensionHandle>(); private String stagingId; public ExtensionsTab() { super(); } @Override public void init(final Component parent) { for (Extension ext : pluginService.getConnectedExtensions( "com.tle.admin.collection.tool", //$NON-NLS-1$ "extra")) //$NON-NLS-1$ { try { extensions.add(new ExtensionHandle(pluginService, ext)); } catch (Exception ex) { LOGGER.error("Error processing extension " + ext.getId(), ex); } } setLayout(new VerticalFlowLayout()); if (extensions.isEmpty()) { add(new JLabel("No extensions available")); } else { for (final ExtensionHandle extension : extensions) { final JCheckBox checkbox = extension.getEnabledCheckbox(); checkbox.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { ItemDefinition entity = state.getEntity(); if (checkbox.isSelected()) { entity.setAttribute(extension.getEnabledAttribute(), Boolean.TRUE.toString()); } else { entity.removeAttribute(extension.getEnabledAttribute()); } } }); if (extension.getConfigPanelClass() == null) { add(checkbox); } else { final JButton button = new JButton( CurrentLocale.get( "com.tle.admin.itemdefinition.extensionstab.config")); //$NON-NLS-1$ button.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { final AbstractExtensionConfigPanel configPanel; try { configPanel = extension.getConfigPanelClass().newInstance(); configPanel.setClientService(clientService); configPanel.load(stagingId, state.getEntity()); } catch (Exception ex) { throw new RuntimeException("Could not create config panel instance", ex); } JButton save = new JButton("OK"); JButton cancel = new JButton("Cancel"); final Dimension size = cancel.getPreferredSize(); final int[] rows = { TableLayout.FILL, size.height, }; final int[] cols = { TableLayout.FILL, size.width, size.width, }; JPanel panel = new JPanel(new TableLayout(rows, cols, 5, 5)); panel.setBorder(BorderFactory.createEmptyBorder(5, 5, 5, 5)); panel.add(configPanel, new Rectangle(0, 0, 3, 1)); panel.add(save, new Rectangle(1, 1, 1, 1)); panel.add(cancel, new Rectangle(2, 1, 1, 1)); final JDialog dialog = ComponentHelper.createJDialog(button); dialog.setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); dialog.setTitle(extension.getName()); dialog.getContentPane().add(panel); dialog.setModal(true); dialog.setSize(600, 500); ComponentHelper.centreOnScreen(dialog); save.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { configPanel.save(state.getEntity()); dialog.dispose(); // Dirty rotten hacks ((JChangeDetectorPanel) ExtensionsTab.this.getComponent()).forceChange(); } }); cancel.addActionListener( new ActionListener() { @Override public void actionPerformed(ActionEvent e) { dialog.dispose(); } }); dialog.setVisible(true); } }); JPanel panel = new JPanel(new FlowLayout(FlowLayout.LEFT, 10, 10)); panel.add(checkbox); panel.add(button); add(panel); } } } } @Override public void validation() { // Nothing to validate here. } @Override public String getTitle() { return CurrentLocale.get("com.tle.admin.itemdefinition.extensionstab.title"); // $NON-NLS-1$ } @Override public void load() { stagingId = state.getEntityPack().getStagingID(); ItemDefinition itemdef = state.getEntity(); for (ExtensionHandle extension : extensions) { boolean enabled = itemdef.getAttribute(extension.getEnabledAttribute(), false); extension.getEnabledCheckbox().setSelected(enabled); } } @Override public void save() { // Nothing to do here } }
/** * */ package org.sagebionetworks.repo.web.controller; import org.sagebionetworks.reflection.model.PaginatedResults; import org.sagebionetworks.repo.model.AuthorizationConstants; import org.sagebionetworks.repo.model.Count; import org.sagebionetworks.repo.model.InviteeVerificationSignedToken; import org.sagebionetworks.repo.model.MembershipInvitation; import org.sagebionetworks.repo.model.MembershipInvtnSignedToken; import org.sagebionetworks.repo.model.ServiceConstants; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.repo.web.UrlHelpers; import org.sagebionetworks.repo.web.rest.doc.ControllerInfo; import org.sagebionetworks.repo.web.service.ServiceProvider; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import org.springframework.web.bind.annotation.ResponseStatus; /** * The Membership Invitation Services create, retrieve and delete * membership invitations. A membership invitation is created by a Team administrator * to invite a Synapse user to join the Team. Without the invitation it is not possible * for an outside user to join. For more on Teams, see * <a href="#org.sagebionetworks.repo.web.controller.TeamController">Team Services</a>. * */ @ControllerInfo(displayName="Membership Invitation Services", path="repo/v1") @Controller @RequestMapping(UrlHelpers.REPO_PATH) public class MembershipInvitationController extends BaseController { @Autowired ServiceProvider serviceProvider; /** * Create a membership invitation. The team must be specified. Also, either an inviteeId or an inviteeEmail must be * specified. If an inviteeId is specified, the invitee is notified of the invitation through a notification. * If an inviteeEmail is specified instead, an email containing an invitation link is sent to the invitee. The link * will contain a serialized MembershipInvtnSignedToken. * Optionally, the creator may include an invitation message and/or expiration date for the invitation. * If no expiration date is specified then the invitation never expires. * Note: The client must be an administrator of the specified Team to make this request. * @param userId * @param invitation * @param acceptInvitationEndpoint the portal end-point for one-click acceptance of the membership * invitation. A signed, serialized token is appended to create the complete URL: * <ahref="${org.sagebionetworks.repo.model.JoinTeamSignedToken}">JoinTeamSignedToken</a> * @param notificationUnsubscribeEndpoint the portal prefix for one-click email unsubscription. * A signed, serialized token is appended to create the complete URL: * <ahref="${org.sagebionetworks.repo.model.message.NotificationSettingsSignedToken}">NotificationSettingsSignedToken</a> * @return * @throws NotFoundException */ @ResponseStatus(HttpStatus.CREATED) @RequestMapping(value = UrlHelpers.MEMBERSHIP_INVITATION, method = RequestMethod.POST) public @ResponseBody MembershipInvitation createInvitation( @RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId, @RequestParam(value = AuthorizationConstants.ACCEPT_INVITATION_ENDPOINT_PARAM, required = false) String acceptInvitationEndpoint, @RequestParam(value = AuthorizationConstants.NOTIFICATION_UNSUBSCRIBE_ENDPOINT_PARAM, required = false) String notificationUnsubscribeEndpoint, @RequestBody MembershipInvitation invitation ) throws NotFoundException { return serviceProvider. getMembershipInvitationService(). create(userId, invitation, acceptInvitationEndpoint, notificationUnsubscribeEndpoint); } /** * Retrieve the open invitations to a user, optionally filtering by the Team of origin. * An invitation is only open if it has not expired and if the user has not joined the Team. * @param id the ID of the Synapse user to which invitations have been extended. * @param teamId the ID of the Team extending the invitations (optional) * @param limit the maximum number of invitations to return (default 10) * @param offset the starting index of the returned results (default 0) * @return * @throws NotFoundException */ @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.OPEN_MEMBERSHIP_INVITATION_BY_USER, method = RequestMethod.GET) public @ResponseBody PaginatedResults<MembershipInvitation> getOpenInvitationsByUser( @PathVariable String id, @RequestParam(value = UrlHelpers.TEAM_ID_REQUEST_PARAMETER, required = false) String teamId, @RequestParam(value = ServiceConstants.PAGINATION_LIMIT_PARAM, required = false, defaultValue = ServiceConstants.DEFAULT_PAGINATION_LIMIT_PARAM) Integer limit, @RequestParam(value = ServiceConstants.PAGINATION_OFFSET_PARAM, required = false, defaultValue = ServiceConstants.DEFAULT_PAGINATION_OFFSET_PARAM) Integer offset ) throws NotFoundException { return serviceProvider.getMembershipInvitationService().getOpenInvitations(null, id, teamId, limit, offset); } /** * Retrieve the open invitations from a Team, optionally filtering by the invitee. * An invitation is only open if it has not expired and if the user has not joined the Team. * @param userId the ID of the user making the request * @param id the ID of the Team extending the invitations * @param inviteeId the ID of the Synapse user to which invitations have been extended (optional) * @param limit the maximum number of invitations to return (default 10) * @param offset the starting index of the returned results (default 0) * @return * @throws NotFoundException */ @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.OPEN_MEMBERSHIP_INVITATION_BY_TEAM, method = RequestMethod.GET) public @ResponseBody PaginatedResults<MembershipInvitation> getOpenInvitationsByTeam( @RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId, @PathVariable String id, @RequestParam(value = UrlHelpers.INVITEE_ID_REQUEST_PARAMETER, required = false) String inviteeId, @RequestParam(value = ServiceConstants.PAGINATION_LIMIT_PARAM, required = false, defaultValue = ServiceConstants.DEFAULT_PAGINATION_LIMIT_PARAM) Integer limit, @RequestParam(value = ServiceConstants.PAGINATION_OFFSET_PARAM, required = false, defaultValue = ServiceConstants.DEFAULT_PAGINATION_OFFSET_PARAM) Integer offset ) throws NotFoundException { return serviceProvider.getMembershipInvitationService().getOpenInvitationSubmissions(userId, inviteeId, id, limit, offset); } /** * Retrieve an invitation by ID * Note: The client must be an administrator of the Team referenced by the invitation or the invitee to make this request. * @param id the ID of the invitation * @param userId * @return * @throws NotFoundException */ @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.MEMBERSHIP_INVITATION_ID, method = RequestMethod.GET) public @ResponseBody MembershipInvitation getInvitation( @PathVariable String id, @RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId ) throws NotFoundException { return serviceProvider.getMembershipInvitationService().get(userId, id); } /** * Retrieve an invitation by ID using a MembershipInvtnSignedToken for authorization * @param id * @param token * @return * @throws NotFoundException */ @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.MEMBERSHIP_INVITATION_ID, method = RequestMethod.POST) public @ResponseBody MembershipInvitation getInvitation( @PathVariable String id, @RequestBody MembershipInvtnSignedToken token ) throws NotFoundException { return serviceProvider.getMembershipInvitationService().get(id, token); } /** * Delete an invitation * Note: The client must be an administrator of the Team referenced by the invitation or the invitee to make this request. * @param id the ID of the invitation to be deleted * @param userId * @throws NotFoundException */ @ResponseStatus(HttpStatus.NO_CONTENT) @RequestMapping(value = UrlHelpers.MEMBERSHIP_INVITATION_ID, method = RequestMethod.DELETE) public void deleteInvitation( @PathVariable String id, @RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId ) throws NotFoundException { serviceProvider.getMembershipInvitationService().delete(userId, id); } /** * Retrieve the number of pending Membership Invitations * @param userId * @return */ @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.OPEN_MEMBERSHIP_INVITATION_COUNT, method = RequestMethod.GET) public @ResponseBody Count getOpenInvitationCount( @RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId ) { return serviceProvider.getMembershipInvitationService().getOpenInvitationCount(userId); } /** * Verify whether the inviteeEmail of the indicated MembershipInvitation is associated with the authenticated user. * If it is, the response body will contain an InviteeVerificationSignedToken. * If it is not, a response status 403 Forbidden will be returned. * InviteeVerificationSignedTokens generated by this service expire 24 hours from creation. * See https://sagebionetworks.jira.com/wiki/spaces/PLFM/pages/143628166/Invite+a+new+user+to+join+a+team for more information. * @param membershipInvitationId * @param userId * @return * @throws NotFoundException */ @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.MEMBERSHIP_INVITATION_VERIFY_INVITEE, method = RequestMethod.GET) public @ResponseBody InviteeVerificationSignedToken getInviteeVerificationSignedToken( @PathVariable("id") String membershipInvitationId, @RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId ) throws NotFoundException { return serviceProvider.getMembershipInvitationService().getInviteeVerificationSignedToken(userId, membershipInvitationId); } /** * Set the inviteeId of a MembershipInvitation. * A valid InviteeVerificationSignedToken must have an inviteeId equal to the id of * the authenticated user and a membershipInvitationId equal to the id in the URI. * This call will only succeed if the indicated MembershipInvitation has a * null inviteeId and a non null inviteeEmail. * See https://sagebionetworks.jira.com/wiki/spaces/PLFM/pages/143628166/Invite+a+new+user+to+join+a+team for more information. * @param membershipInvitationId * @param userId * @param token */ @ResponseStatus(HttpStatus.OK) @RequestMapping(value = UrlHelpers.MEMBERSHIP_INVITATION_UPDATE_INVITEE_ID, method = RequestMethod.PUT) public void updateInviteeId( @PathVariable("id") String membershipInvitationId, @RequestParam(value = AuthorizationConstants.USER_ID_PARAM) Long userId, @RequestBody InviteeVerificationSignedToken token) { serviceProvider.getMembershipInvitationService().updateInviteeId(userId, membershipInvitationId, token); } }
/* * Copyright (C) 2006 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.server.am; import com.android.internal.app.procstats.ServiceState; import com.android.internal.os.BatteryStatsImpl; import com.android.server.LocalServices; import com.android.server.notification.NotificationManagerInternal; import android.app.INotificationManager; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.ComponentName; import android.content.Context; import android.content.Intent; import android.content.pm.ApplicationInfo; import android.content.pm.PackageManager; import android.content.pm.ServiceInfo; import android.net.Uri; import android.os.Binder; import android.os.IBinder; import android.os.RemoteException; import android.os.SystemClock; import android.os.UserHandle; import android.provider.Settings; import android.util.ArrayMap; import android.util.Slog; import android.util.TimeUtils; import java.io.PrintWriter; import java.util.ArrayList; import java.util.List; import java.util.Objects; import static com.android.server.am.ActivityManagerDebugConfig.TAG_AM; import static com.android.server.am.ActivityManagerDebugConfig.TAG_WITH_CLASS_NAME; /** * A running application service. */ final class ServiceRecord extends Binder { private static final String TAG = TAG_WITH_CLASS_NAME ? "ServiceRecord" : TAG_AM; // Maximum number of delivery attempts before giving up. static final int MAX_DELIVERY_COUNT = 3; // Maximum number of times it can fail during execution before giving up. static final int MAX_DONE_EXECUTING_COUNT = 6; final ActivityManagerService ams; final BatteryStatsImpl.Uid.Pkg.Serv stats; final ComponentName name; // service component. final String shortName; // name.flattenToShortString(). final Intent.FilterComparison intent; // original intent used to find service. final ServiceInfo serviceInfo; // all information about the service. final ApplicationInfo appInfo; // information about service's app. final int userId; // user that this service is running as final String packageName; // the package implementing intent's component final String processName; // process where this component wants to run final String permission;// permission needed to access service final boolean exported; // from ServiceInfo.exported final Runnable restarter; // used to schedule retries of starting the service final long createTime; // when this service was created final ArrayMap<Intent.FilterComparison, IntentBindRecord> bindings = new ArrayMap<Intent.FilterComparison, IntentBindRecord>(); // All active bindings to the service. final ArrayMap<IBinder, ArrayList<ConnectionRecord>> connections = new ArrayMap<IBinder, ArrayList<ConnectionRecord>>(); // IBinder -> ConnectionRecord of all bound clients ProcessRecord app; // where this service is running or null. ProcessRecord isolatedProc; // keep track of isolated process, if requested ServiceState tracker; // tracking service execution, may be null ServiceState restartTracker; // tracking service restart boolean whitelistManager; // any bindings to this service have BIND_ALLOW_WHITELIST_MANAGEMENT? boolean delayed; // are we waiting to start this service in the background? boolean isForeground; // is service currently in foreground mode? int foregroundId; // Notification ID of last foreground req. Notification foregroundNoti; // Notification record of foreground state. long lastActivity; // last time there was some activity on the service. long startingBgTimeout; // time at which we scheduled this for a delayed start. boolean startRequested; // someone explicitly called start? boolean delayedStop; // service has been stopped but is in a delayed start? boolean stopIfKilled; // last onStart() said to stop if service killed? boolean callStart; // last onStart() has asked to alway be called on restart. int executeNesting; // number of outstanding operations keeping foreground. boolean executeFg; // should we be executing in the foreground? long executingStart; // start time of last execute request. boolean createdFromFg; // was this service last created due to a foreground process call? int crashCount; // number of times proc has crashed with service running int totalRestartCount; // number of times we have had to restart. int restartCount; // number of restarts performed in a row. long restartDelay; // delay until next restart attempt. long restartTime; // time of last restart. long nextRestartTime; // time when restartDelay will expire. boolean destroying; // set when we have started destroying the service long destroyTime; // time at which destory was initiated. String stringName; // caching of toString private int lastStartId; // identifier of most recent start request. static class StartItem { final ServiceRecord sr; final boolean taskRemoved; final int id; final Intent intent; final ActivityManagerService.NeededUriGrants neededGrants; long deliveredTime; int deliveryCount; int doneExecutingCount; UriPermissionOwner uriPermissions; String stringName; // caching of toString StartItem(ServiceRecord _sr, boolean _taskRemoved, int _id, Intent _intent, ActivityManagerService.NeededUriGrants _neededGrants) { sr = _sr; taskRemoved = _taskRemoved; id = _id; intent = _intent; neededGrants = _neededGrants; } UriPermissionOwner getUriPermissionsLocked() { if (uriPermissions == null) { uriPermissions = new UriPermissionOwner(sr.ams, this); } return uriPermissions; } void removeUriPermissionsLocked() { if (uriPermissions != null) { uriPermissions.removeUriPermissionsLocked(); uriPermissions = null; } } public String toString() { if (stringName != null) { return stringName; } StringBuilder sb = new StringBuilder(128); sb.append("ServiceRecord{") .append(Integer.toHexString(System.identityHashCode(sr))) .append(' ').append(sr.shortName) .append(" StartItem ") .append(Integer.toHexString(System.identityHashCode(this))) .append(" id=").append(id).append('}'); return stringName = sb.toString(); } } final ArrayList<StartItem> deliveredStarts = new ArrayList<StartItem>(); // start() arguments which been delivered. final ArrayList<StartItem> pendingStarts = new ArrayList<StartItem>(); // start() arguments that haven't yet been delivered. void dumpStartList(PrintWriter pw, String prefix, List<StartItem> list, long now) { final int N = list.size(); for (int i=0; i<N; i++) { StartItem si = list.get(i); pw.print(prefix); pw.print("#"); pw.print(i); pw.print(" id="); pw.print(si.id); if (now != 0) { pw.print(" dur="); TimeUtils.formatDuration(si.deliveredTime, now, pw); } if (si.deliveryCount != 0) { pw.print(" dc="); pw.print(si.deliveryCount); } if (si.doneExecutingCount != 0) { pw.print(" dxc="); pw.print(si.doneExecutingCount); } pw.println(""); pw.print(prefix); pw.print(" intent="); if (si.intent != null) pw.println(si.intent.toString()); else pw.println("null"); if (si.neededGrants != null) { pw.print(prefix); pw.print(" neededGrants="); pw.println(si.neededGrants); } if (si.uriPermissions != null) { si.uriPermissions.dump(pw, prefix); } } } void dump(PrintWriter pw, String prefix) { pw.print(prefix); pw.print("intent={"); pw.print(intent.getIntent().toShortString(false, true, false, true)); pw.println('}'); pw.print(prefix); pw.print("packageName="); pw.println(packageName); pw.print(prefix); pw.print("processName="); pw.println(processName); if (permission != null) { pw.print(prefix); pw.print("permission="); pw.println(permission); } long now = SystemClock.uptimeMillis(); long nowReal = SystemClock.elapsedRealtime(); if (appInfo != null) { pw.print(prefix); pw.print("baseDir="); pw.println(appInfo.sourceDir); if (!Objects.equals(appInfo.sourceDir, appInfo.publicSourceDir)) { pw.print(prefix); pw.print("resDir="); pw.println(appInfo.publicSourceDir); } pw.print(prefix); pw.print("dataDir="); pw.println(appInfo.dataDir); } pw.print(prefix); pw.print("app="); pw.println(app); if (isolatedProc != null) { pw.print(prefix); pw.print("isolatedProc="); pw.println(isolatedProc); } if (whitelistManager) { pw.print(prefix); pw.print("whitelistManager="); pw.println(whitelistManager); } if (delayed) { pw.print(prefix); pw.print("delayed="); pw.println(delayed); } if (isForeground || foregroundId != 0) { pw.print(prefix); pw.print("isForeground="); pw.print(isForeground); pw.print(" foregroundId="); pw.print(foregroundId); pw.print(" foregroundNoti="); pw.println(foregroundNoti); } pw.print(prefix); pw.print("createTime="); TimeUtils.formatDuration(createTime, nowReal, pw); pw.print(" startingBgTimeout="); TimeUtils.formatDuration(startingBgTimeout, now, pw); pw.println(); pw.print(prefix); pw.print("lastActivity="); TimeUtils.formatDuration(lastActivity, now, pw); pw.print(" restartTime="); TimeUtils.formatDuration(restartTime, now, pw); pw.print(" createdFromFg="); pw.println(createdFromFg); if (startRequested || delayedStop || lastStartId != 0) { pw.print(prefix); pw.print("startRequested="); pw.print(startRequested); pw.print(" delayedStop="); pw.print(delayedStop); pw.print(" stopIfKilled="); pw.print(stopIfKilled); pw.print(" callStart="); pw.print(callStart); pw.print(" lastStartId="); pw.println(lastStartId); } if (executeNesting != 0) { pw.print(prefix); pw.print("executeNesting="); pw.print(executeNesting); pw.print(" executeFg="); pw.print(executeFg); pw.print(" executingStart="); TimeUtils.formatDuration(executingStart, now, pw); pw.println(); } if (destroying || destroyTime != 0) { pw.print(prefix); pw.print("destroying="); pw.print(destroying); pw.print(" destroyTime="); TimeUtils.formatDuration(destroyTime, now, pw); pw.println(); } if (crashCount != 0 || restartCount != 0 || restartDelay != 0 || nextRestartTime != 0) { pw.print(prefix); pw.print("restartCount="); pw.print(restartCount); pw.print(" restartDelay="); TimeUtils.formatDuration(restartDelay, now, pw); pw.print(" nextRestartTime="); TimeUtils.formatDuration(nextRestartTime, now, pw); pw.print(" crashCount="); pw.println(crashCount); } if (deliveredStarts.size() > 0) { pw.print(prefix); pw.println("Delivered Starts:"); dumpStartList(pw, prefix, deliveredStarts, now); } if (pendingStarts.size() > 0) { pw.print(prefix); pw.println("Pending Starts:"); dumpStartList(pw, prefix, pendingStarts, 0); } if (bindings.size() > 0) { pw.print(prefix); pw.println("Bindings:"); for (int i=0; i<bindings.size(); i++) { IntentBindRecord b = bindings.valueAt(i); pw.print(prefix); pw.print("* IntentBindRecord{"); pw.print(Integer.toHexString(System.identityHashCode(b))); if ((b.collectFlags()&Context.BIND_AUTO_CREATE) != 0) { pw.append(" CREATE"); } pw.println("}:"); b.dumpInService(pw, prefix + " "); } } if (connections.size() > 0) { pw.print(prefix); pw.println("All Connections:"); for (int conni=0; conni<connections.size(); conni++) { ArrayList<ConnectionRecord> c = connections.valueAt(conni); for (int i=0; i<c.size(); i++) { pw.print(prefix); pw.print(" "); pw.println(c.get(i)); } } } } ServiceRecord(ActivityManagerService ams, BatteryStatsImpl.Uid.Pkg.Serv servStats, ComponentName name, Intent.FilterComparison intent, ServiceInfo sInfo, boolean callerIsFg, Runnable restarter) { this.ams = ams; this.stats = servStats; this.name = name; shortName = name.flattenToShortString(); this.intent = intent; serviceInfo = sInfo; appInfo = sInfo.applicationInfo; packageName = sInfo.applicationInfo.packageName; processName = sInfo.processName; permission = sInfo.permission; exported = sInfo.exported; this.restarter = restarter; createTime = SystemClock.elapsedRealtime(); lastActivity = SystemClock.uptimeMillis(); userId = UserHandle.getUserId(appInfo.uid); createdFromFg = callerIsFg; } public ServiceState getTracker() { if (tracker != null) { return tracker; } if ((serviceInfo.applicationInfo.flags&ApplicationInfo.FLAG_PERSISTENT) == 0) { tracker = ams.mProcessStats.getServiceStateLocked(serviceInfo.packageName, serviceInfo.applicationInfo.uid, serviceInfo.applicationInfo.versionCode, serviceInfo.processName, serviceInfo.name); tracker.applyNewOwner(this); } return tracker; } public void forceClearTracker() { if (tracker != null) { tracker.clearCurrentOwner(this, true); tracker = null; } } public void makeRestarting(int memFactor, long now) { if (restartTracker == null) { if ((serviceInfo.applicationInfo.flags&ApplicationInfo.FLAG_PERSISTENT) == 0) { restartTracker = ams.mProcessStats.getServiceStateLocked(serviceInfo.packageName, serviceInfo.applicationInfo.uid, serviceInfo.applicationInfo.versionCode, serviceInfo.processName, serviceInfo.name); } if (restartTracker == null) { return; } } restartTracker.setRestarting(true, memFactor, now); } public AppBindRecord retrieveAppBindingLocked(Intent intent, ProcessRecord app) { Intent.FilterComparison filter = new Intent.FilterComparison(intent); IntentBindRecord i = bindings.get(filter); if (i == null) { i = new IntentBindRecord(this, filter); bindings.put(filter, i); } AppBindRecord a = i.apps.get(app); if (a != null) { return a; } a = new AppBindRecord(this, i, app); i.apps.put(app, a); return a; } public boolean hasAutoCreateConnections() { // XXX should probably keep a count of the number of auto-create // connections directly in the service. for (int conni=connections.size()-1; conni>=0; conni--) { ArrayList<ConnectionRecord> cr = connections.valueAt(conni); for (int i=0; i<cr.size(); i++) { if ((cr.get(i).flags&Context.BIND_AUTO_CREATE) != 0) { return true; } } } return false; } public void updateWhitelistManager() { whitelistManager = false; for (int conni=connections.size()-1; conni>=0; conni--) { ArrayList<ConnectionRecord> cr = connections.valueAt(conni); for (int i=0; i<cr.size(); i++) { if ((cr.get(i).flags&Context.BIND_ALLOW_WHITELIST_MANAGEMENT) != 0) { whitelistManager = true; return; } } } } public void resetRestartCounter() { restartCount = 0; restartDelay = 0; restartTime = 0; } public StartItem findDeliveredStart(int id, boolean remove) { final int N = deliveredStarts.size(); for (int i=0; i<N; i++) { StartItem si = deliveredStarts.get(i); if (si.id == id) { if (remove) deliveredStarts.remove(i); return si; } } return null; } public int getLastStartId() { return lastStartId; } public int makeNextStartId() { lastStartId++; if (lastStartId < 1) { lastStartId = 1; } return lastStartId; } public void postNotification() { final int appUid = appInfo.uid; final int appPid = app.pid; if (foregroundId != 0 && foregroundNoti != null) { // Do asynchronous communication with notification manager to // avoid deadlocks. final String localPackageName = packageName; final int localForegroundId = foregroundId; final Notification _foregroundNoti = foregroundNoti; ams.mHandler.post(new Runnable() { public void run() { NotificationManagerInternal nm = LocalServices.getService( NotificationManagerInternal.class); if (nm == null) { return; } Notification localForegroundNoti = _foregroundNoti; try { if (localForegroundNoti.getSmallIcon() == null) { // It is not correct for the caller to not supply a notification // icon, but this used to be able to slip through, so for // those dirty apps we will create a notification clearly // blaming the app. Slog.v(TAG, "Attempted to start a foreground service (" + name + ") with a broken notification (no icon: " + localForegroundNoti + ")"); CharSequence appName = appInfo.loadLabel( ams.mContext.getPackageManager()); if (appName == null) { appName = appInfo.packageName; } Context ctx = null; try { ctx = ams.mContext.createPackageContextAsUser( appInfo.packageName, 0, new UserHandle(userId)); Notification.Builder notiBuilder = new Notification.Builder(ctx); // it's ugly, but it clearly identifies the app notiBuilder.setSmallIcon(appInfo.icon); // mark as foreground notiBuilder.setFlag(Notification.FLAG_FOREGROUND_SERVICE, true); // we are doing the app a kindness here notiBuilder.setPriority(Notification.PRIORITY_MIN); Intent runningIntent = new Intent( Settings.ACTION_APPLICATION_DETAILS_SETTINGS); runningIntent.setData(Uri.fromParts("package", appInfo.packageName, null)); PendingIntent pi = PendingIntent.getActivity(ams.mContext, 0, runningIntent, PendingIntent.FLAG_UPDATE_CURRENT); notiBuilder.setColor(ams.mContext.getColor( com.android.internal .R.color.system_notification_accent_color)); notiBuilder.setContentTitle( ams.mContext.getString( com.android.internal.R.string .app_running_notification_title, appName)); notiBuilder.setContentText( ams.mContext.getString( com.android.internal.R.string .app_running_notification_text, appName)); notiBuilder.setContentIntent(pi); localForegroundNoti = notiBuilder.build(); } catch (PackageManager.NameNotFoundException e) { } } if (localForegroundNoti.getSmallIcon() == null) { // Notifications whose icon is 0 are defined to not show // a notification, silently ignoring it. We don't want to // just ignore it, we want to prevent the service from // being foreground. throw new RuntimeException("invalid service notification: " + foregroundNoti); } int[] outId = new int[1]; nm.enqueueNotification(localPackageName, localPackageName, appUid, appPid, null, localForegroundId, localForegroundNoti, outId, userId); foregroundNoti = localForegroundNoti; // save it for amending next time } catch (RuntimeException e) { Slog.w(TAG, "Error showing notification for service", e); // If it gave us a garbage notification, it doesn't // get to be foreground. ams.setServiceForeground(name, ServiceRecord.this, 0, null, 0); ams.crashApplication(appUid, appPid, localPackageName, "Bad notification for startForeground: " + e); } } }); } } public void cancelNotification() { if (foregroundId != 0) { // Do asynchronous communication with notification manager to // avoid deadlocks. final String localPackageName = packageName; final int localForegroundId = foregroundId; ams.mHandler.post(new Runnable() { public void run() { INotificationManager inm = NotificationManager.getService(); if (inm == null) { return; } try { inm.cancelNotificationWithTag(localPackageName, null, localForegroundId, userId); } catch (RuntimeException e) { Slog.w(TAG, "Error canceling notification for service", e); } catch (RemoteException e) { } } }); } } public void stripForegroundServiceFlagFromNotification() { if (foregroundId == 0) { return; } final int localForegroundId = foregroundId; final int localUserId = userId; final String localPackageName = packageName; // Do asynchronous communication with notification manager to // avoid deadlocks. ams.mHandler.post(new Runnable() { @Override public void run() { NotificationManagerInternal nmi = LocalServices.getService( NotificationManagerInternal.class); if (nmi == null) { return; } nmi.removeForegroundServiceFlagFromNotification(localPackageName, localForegroundId, localUserId); } }); } public void clearDeliveredStartsLocked() { for (int i=deliveredStarts.size()-1; i>=0; i--) { deliveredStarts.get(i).removeUriPermissionsLocked(); } deliveredStarts.clear(); } public String toString() { if (stringName != null) { return stringName; } StringBuilder sb = new StringBuilder(128); sb.append("ServiceRecord{") .append(Integer.toHexString(System.identityHashCode(this))) .append(" u").append(userId) .append(' ').append(shortName).append('}'); return stringName = sb.toString(); } }
/* * Copyright 2011 Inaiat H. Moraes. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * under the License. */ package br.com.digilabs.jqplot.elements; import br.com.digilabs.jqplot.JqPlotResources; /** * The Class Serie. * * @author inaiat */ public class Serie implements Element { /** The Constant serialVersionUID. */ private static final long serialVersionUID = 7138260563176853708L; /** The label. */ private String label; /** The renderer. */ private JqPlotResources renderer; /** The renderer options. */ private RendererOptions rendererOptions; /** The fill. */ private Boolean fill; /** The line width. */ private Integer lineWidth; /** The marker options. */ private MarkerOptions markerOptions; /** The show marker. */ private Boolean showMarker; /** The show alpha. */ private Boolean showAlpha; /** The shadow. */ private Boolean shadow; /** The shadow alpha. */ private String shadowAlpha; /** The shadow depth. */ private Integer shadowDepth; /** The show line. */ private Boolean showLine; /** The color. */ private String color; /** * Instantiates a new serie. */ public Serie() { } /** * Instantiates a new serie. * * @param label the label */ public Serie(String label) { this.label = label; } /** * Line width. * * @param lineWidth the line width * @return the serie */ public Serie lineWidth(Integer lineWidth) { this.lineWidth = lineWidth; return this; } /** * Marker options. * * @param markerOptions the marker options * @return the serie */ public Serie markerOptions(MarkerOptions markerOptions) { this.markerOptions = markerOptions; return this; } /** * Show line. * * @param showLine the show line * @return the serie */ public Serie showLine(Boolean showLine) { this.setShowLine(showLine); return this; } /** * Renderer options. * * @param rendererOptions the renderer options * @return the serie */ public Serie rendererOptions(RendererOptions rendererOptions) { this.rendererOptions = rendererOptions; return this; } /** * Renderer. * * @param renderer the renderer * @return the serie */ public Serie renderer(JqPlotResources renderer) { this.renderer = renderer; return this; } /** * Gets the shadow alpha. * * @return shadowAlpha */ public String getShadowAlpha() { return shadowAlpha; } /** * Sets the shadow alpha. * * @param shadowAlpha the new shadow alpha * @return Serie */ public Serie setShadowAlpha(String shadowAlpha) { this.shadowAlpha = shadowAlpha; return this; } /** * Gets the shadow depth. * * @return shadowDepth */ public Integer getShadowDepth() { return shadowDepth; } /** * Sets the shadow depth. * * @param shadowDepth the new shadow depth * @return Serie */ public Serie setShadowDepth(Integer shadowDepth) { this.shadowDepth = shadowDepth; return this; } /** * Gets the show alpha. * * @return showAlpha true ou false */ public Boolean getShowAlpha() { return showAlpha; } /** * Sets the show alpha. * * @param showAlpha the new show alpha * @return Serie */ public Serie setShowAlpha(Boolean showAlpha) { this.showAlpha = showAlpha; return this; } /** * Gets the shadow. * * @return shadow */ public Boolean getShadow() { return shadow; } /** * Sets the shadow. * * @param shadow the new shadow * @return Serie */ public Serie setShadow(Boolean shadow) { this.shadow = shadow; return this; } /** * Gets the show marker. * * @return the show marker */ public Boolean getShowMarker() { return showMarker; } /** * Sets the show marker. * * @param showMarker the new show marker * @return Serie */ public Serie setShowMarker(Boolean showMarker) { this.showMarker = showMarker; return this; } /** * Gets the marker options. * * @return the marker options */ public MarkerOptions getMarkerOptions() { return markerOptions; } /** * Sets the marker options. * * @param markerOptions the new marker options * @return Serie */ public Serie setMarkerOptions(MarkerOptions markerOptions) { this.markerOptions = markerOptions; return this; } /** * Gets the line width. * * @return the line width */ public Integer getLineWidth() { return lineWidth; } /** * Sets the line width. * * @param lineWidth the new line width * @return Serie */ public Serie setLineWidth(Integer lineWidth) { this.lineWidth = lineWidth; return this; } /** * Gets the renderer options. * * @return the renderer options */ public RendererOptions getRendererOptions() { return rendererOptions; } /** * Sets the renderer options. * * @param rendererOptions the new renderer options * @return Serie */ public Serie setRendererOptions(RendererOptions rendererOptions) { this.rendererOptions = rendererOptions; return this; } /** * Gets the label. * * @return the label */ public String getLabel() { return label; } /** * Sets the label. * * @param label the label to set * @return Serie */ public Serie setLabel(String label) { this.label = label; return this; } /** * Gets the renderer. * * @return the renderer */ public JqPlotResources getRenderer() { return renderer; } /** * Sets the renderer. * * @param renderer the renderer to set * @return Serie */ public Serie setRenderer(JqPlotResources renderer) { this.renderer = renderer; return this; } /** * Gets the fill. * * @return the fill */ public Boolean getFill() { return fill; } /** * Sets the fill. * * @param fill the fill to set * @return Serie */ public Serie setFill(Boolean fill) { this.fill = fill; return this; } /** * Gets the color. * * @return the color */ public String getColor() { return color; } /** * Sets the color. * * @param color the new color * @return Serie */ public Serie setColor(String color) { this.color = color; return this; } /** * Gets the show line. * * @return the show line */ public Boolean getShowLine() { return showLine; } /** * Sets the show line. * * @param showLine the new show line * @return Serie */ public Serie setShowLine(Boolean showLine) { this.showLine = showLine; return this; } /** * Get renderer options instance * @return RendererOptions */ public RendererOptions rendererOptionsInstance() { if (rendererOptions==null) { this.rendererOptions = new RendererOptions(); } return rendererOptions; } public MarkerOptions markerOptionsInstance() { if (markerOptions==null) { markerOptions = new MarkerOptions(); } return markerOptions; } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.skyframe; import com.google.common.base.MoreObjects; import com.google.common.base.Preconditions; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe; import com.google.devtools.build.lib.packages.AdvertisedProviderSet; import com.google.devtools.build.lib.packages.Rule; import com.google.devtools.build.lib.packages.Target; import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec; import com.google.devtools.build.lib.util.StringCanonicalizer; import com.google.devtools.build.skyframe.SkyKey; import com.google.devtools.build.skyframe.SkyValue; import java.util.Objects; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import javax.annotation.Nullable; /** * A <i>transitive</i> target reference that, when built in skyframe, loads the entire transitive * closure of a target. Retains the first error message found during the transitive traversal, the * kind of target, and a set of names of providers if the target is a {@link Rule}. * * <p>Interns values for error-free traversal nodes that correspond to built-in rules. */ @Immutable @ThreadSafe @AutoCodec public abstract class TransitiveTraversalValue implements SkyValue { // A quick-lookup cache that allows us to get the value for a given target kind, assuming no error // messages for the target. The number of built-in target kinds is limited, so memory bloat is not // a concern. private static final ConcurrentMap<String, TransitiveTraversalValue> VALUES_BY_TARGET_KIND = new ConcurrentHashMap<>(); /** * A strong interner of TransitiveTargetValue objects. Because we only wish to intern values for * built-in non-Starlark targets, we need an interner with an additional method to return the * canonical representative if it is present without interning our sample. This is only mutated in * {@link #forTarget}, and read in {@link #forTarget} and {@link #create}. */ private static final InternerWithPresenceCheck<TransitiveTraversalValue> VALUE_INTERNER = new InternerWithPresenceCheck<>(); private final String kind; protected TransitiveTraversalValue(String kind) { this.kind = Preconditions.checkNotNull(kind); } static TransitiveTraversalValue unsuccessfulTransitiveTraversal( String errorMessage, Target target) { return new TransitiveTraversalValueWithError( Preconditions.checkNotNull(errorMessage), target.getTargetKind()); } static TransitiveTraversalValue forTarget(Target target, @Nullable String errorMessage) { if (errorMessage == null) { if (target instanceof Rule && ((Rule) target).getRuleClassObject().isStarlark()) { Rule rule = (Rule) target; // Do not intern values for Starlark rules. return TransitiveTraversalValue.create( rule.getRuleClassObject().getAdvertisedProviders(), rule.getTargetKind(), errorMessage); } else { TransitiveTraversalValue value = VALUES_BY_TARGET_KIND.get(target.getTargetKind()); if (value != null) { return value; } AdvertisedProviderSet providers = target instanceof Rule ? ((Rule) target).getRuleClassObject().getAdvertisedProviders() : AdvertisedProviderSet.EMPTY; value = new TransitiveTraversalValueWithoutError(providers, target.getTargetKind()); // May already be there from another target or a concurrent put. value = VALUE_INTERNER.intern(value); // May already be there from a concurrent put. VALUES_BY_TARGET_KIND.putIfAbsent(target.getTargetKind(), value); return value; } } else { return new TransitiveTraversalValueWithError(errorMessage, target.getTargetKind()); } } @AutoCodec.Instantiator public static TransitiveTraversalValue create( AdvertisedProviderSet providers, String kind, @Nullable String errorMessage) { TransitiveTraversalValue value = errorMessage == null ? new TransitiveTraversalValueWithoutError(providers, kind) : new TransitiveTraversalValueWithError(errorMessage, kind); if (errorMessage == null) { TransitiveTraversalValue oldValue = VALUE_INTERNER.getCanonical(value); return oldValue == null ? value : oldValue; } return value; } /** Returns if the associated target can have any provider. True for "alias" rules. */ public abstract boolean canHaveAnyProvider(); /** * Returns the set of provider names from the target, if the target is a {@link Rule}. Otherwise * returns the empty set. */ public abstract AdvertisedProviderSet getProviders(); /** Returns the target kind. */ public String getKind() { return kind; } /** * Returns a deterministic error message, if any, from loading the target and its transitive * dependencies. */ @Nullable public abstract String getErrorMessage(); @Override public boolean equals(Object o) { if (this == o) { return true; } if (!(o instanceof TransitiveTraversalValue)) { return false; } TransitiveTraversalValue that = (TransitiveTraversalValue) o; return Objects.equals(this.getErrorMessage(), that.getErrorMessage()) && Objects.equals(this.getKind(), that.getKind()) && this.getProviders().equals(that.getProviders()); } @Override public int hashCode() { return Objects.hash(getErrorMessage(), getKind(), getProviders()); } @ThreadSafe public static SkyKey key(Label label) { Preconditions.checkArgument(!label.getPackageIdentifier().getRepository().isDefault()); return label; } /** A transitive target reference without error. */ public static final class TransitiveTraversalValueWithoutError extends TransitiveTraversalValue { private final AdvertisedProviderSet advertisedProviders; private TransitiveTraversalValueWithoutError( AdvertisedProviderSet providers, @Nullable String kind) { super(kind); this.advertisedProviders = Preconditions.checkNotNull(providers); } @Override public boolean canHaveAnyProvider() { return advertisedProviders.canHaveAnyProvider(); } @Override public AdvertisedProviderSet getProviders() { return advertisedProviders; } @Override @Nullable public String getErrorMessage() { return null; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("kind", getKind()) .add("providers", advertisedProviders) .toString(); } } /** A transitive target reference with error. */ public static final class TransitiveTraversalValueWithError extends TransitiveTraversalValue { private final String errorMessage; private TransitiveTraversalValueWithError(String errorMessage, String kind) { super(kind); this.errorMessage = StringCanonicalizer.intern(Preconditions.checkNotNull(errorMessage)); } @Override public boolean canHaveAnyProvider() { return AdvertisedProviderSet.EMPTY.canHaveAnyProvider(); } @Override public AdvertisedProviderSet getProviders() { return AdvertisedProviderSet.EMPTY; } @Override @Nullable public String getErrorMessage() { return errorMessage; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("error", errorMessage) .add("kind", getKind()) .toString(); } } }
/** * */ package com.trendrr.nsq; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.Date; import java.util.List; import java.util.Timer; import java.util.TimerTask; import java.util.concurrent.Executor; import java.util.concurrent.Executors; import java.util.concurrent.ExecutorService; import org.jboss.netty.bootstrap.ClientBootstrap; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelFuture; import org.jboss.netty.channel.socket.nio.NioClientSocketChannelFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.trendrr.nsq.exceptions.NoConnectionsException; import com.trendrr.nsq.netty.NSQPipeline; /** * Base class for producer and consumer * * @author Dustin Norlander * @created Jan 22, 2013 * */ public abstract class AbstractNSQClient { protected static Logger log = LoggerFactory.getLogger(AbstractNSQClient.class); /** * Protocol version sent to nsqd on initial connect */ public static byte[] MAGIC_PROTOCOL_VERSION = " V2".getBytes(); private int messagesPerBatch = 200; private long lookupPeriod = 60 * 1000; // how often to recheck for new nodes (and clean up non responsive nodes) Connections connections = new Connections(); // Configure the client. protected ClientBootstrap bootstrap = null; protected Timer timer = null; //this executor is where the callback code is handled protected ExecutorService executor = Executors.newSingleThreadExecutor(); /** * connects, ready to produce. */ public synchronized void start() { this.connect(); if (timer != null) { timer.cancel(); } timer = new Timer(); timer.schedule(new TimerTask() { @Override public void run() { try { connect(); } catch (Throwable t) { log.error("Error in periodic `connect` call", t); } } }, lookupPeriod, lookupPeriod); } /** * Should return a list of all the addresses that we should be currently connected to. * @return */ public abstract List<ConnectionAddress> lookupAddresses(); /** * this is the executor where the callbacks happen. default is a new cached threadpool. * @param executor */ public synchronized void setExecutor(ExecutorService executor) { this.executor = executor; } public ExecutorService getExecutor() { return this.executor; } /** * use this if you want to specify your own netty executors. by default will use * * Executors.newCachedThreadPool() * * @param boss * @param worker */ public synchronized void setNettyExecutors(Executor boss, Executor worker) { if (this.bootstrap != null) { this.bootstrap.releaseExternalResources(); } this.bootstrap = new ClientBootstrap(new NioClientSocketChannelFactory(boss, worker)); bootstrap.setPipelineFactory(new NSQPipeline()); } /** * Creates a new connection object. * * Handles connection and sending magic protocol * @param address * @param port * @return */ protected Connection createConnection(String address, int port) { // Start the connection attempt. ChannelFuture future = bootstrap.connect(new InetSocketAddress(address, port)); // Wait until the connection attempt succeeds or fails. Channel channel = future.awaitUninterruptibly().getChannel(); if (!future.isSuccess()) { log.error("Caught", future.getCause()); return null; } log.info("Creating connection: " + address + " : " + port); Connection conn = new Connection(address, port, channel, this); conn.setMessagesPerBatch(this.messagesPerBatch); ChannelBuffer buf = ChannelBuffers.dynamicBuffer(); buf.writeBytes(MAGIC_PROTOCOL_VERSION); channel.write(buf); //indentify try { String identJson = "{" + "\"short_id\":\"" + InetAddress.getLocalHost().getHostName() + "\"" + "," + "\"long_id\":\"" + InetAddress.getLocalHost().getCanonicalHostName() + "\"" + "}"; NSQCommand ident = NSQCommand.instance("IDENTIFY", identJson.getBytes()); conn.command(ident); } catch (UnknownHostException e) { log.error("Caught", e); } return conn; } /** * * Connects and subscribes to the requested topic and channel. * * safe to call repeatedly for node discovery. */ protected synchronized void connect() { if (this.bootstrap == null) { //create default bootstrap this.setNettyExecutors(Executors.newCachedThreadPool(), Executors.newCachedThreadPool()); } List<ConnectionAddress> addresses = this.lookupAddresses(); for (ConnectionAddress addr : addresses ) { int num = addr.getPoolsize() - this.connections.connectionSize(addr.getHost(), addr.getPort()); for (int i=0; i < num; i++) { Connection conn = this.createConnection(addr.getHost(), addr.getPort()); this.connections.addConnection(conn); } //TODO: handle negative num? (i.e. if user lowered the poolsize we should kill some connections) } this.cleanupOldConnections(); this.adjustPerConnectionMessagePerBatch(); } /** * will run through and remove any connections that have not recieved a ping in the last 2 minutes. */ public synchronized void cleanupOldConnections() { Date cutoff = new Date(new Date().getTime() - (1000*60*2)); try { for (Connection c : this.connections.getConnections()) { if (cutoff.after(c.getLastHeartbeat())) { log.warn("Removing dead connection: " + c.getHost() + ":" + c.getPort()); c.close(); connections.remove(c); } } } catch (NoConnectionsException e) { //ignore } } /** * Adjust max in flight depending on the number of connections */ public synchronized void adjustPerConnectionMessagePerBatch() { try { int numConnections = this.connections.size(); if (numConnections == 0) { log.warn("connect: No connections; skipping max-in-flight adjustment"); } else { int perConnectionInFlight; if (this.messagesPerBatch < numConnections) { perConnectionInFlight = 1; } else { perConnectionInFlight = this.messagesPerBatch / numConnections; } for (Connection conn: this.connections.getConnections()) { conn.setMessagesPerBatch(perConnectionInFlight); } } } catch (NoConnectionsException nce) { // This should never happen since it looks like the code doesn't actualy throw an exception log.warn("Attempting to adjust max-in-flight but found no connections.", nce); } } public void setMessagesPerBatch(int messagesPerBatch) { this.messagesPerBatch = messagesPerBatch; } public void setLookupPeriod(long periodMillis) { this.lookupPeriod = periodMillis; } /** * for internal use. called when a connection is disconnected * @param connection */ public synchronized void _disconnected(Connection connection) { log.warn("Disconnected!" + connection); this.connections.remove(connection); } public void close() { this.timer.cancel(); this.connections.close(); this.bootstrap.releaseExternalResources(); this.executor.shutdown(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.rdf.simple.experimental; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.apache.commons.rdf.api.RDFSyntax.*; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.util.concurrent.TimeUnit; import org.apache.commons.rdf.api.Graph; import org.apache.commons.rdf.api.IRI; import org.apache.commons.rdf.api.Literal; import org.apache.commons.rdf.api.RDFSyntax; import org.apache.commons.rdf.api.RDFTerm; import org.apache.commons.rdf.api.RDF; import org.apache.commons.rdf.api.Triple; import org.apache.commons.rdf.experimental.RDFParser; import org.apache.commons.rdf.simple.DummyRDFParserBuilder; import org.apache.commons.rdf.simple.SimpleRDF; import org.apache.commons.rdf.simple.Types; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; public class AbstractRDFParserTest { private final RDF factory = new SimpleRDF(); private final DummyRDFParserBuilder dummyParser = new DummyRDFParserBuilder(); private Path testNt; private Path testTtl; private Path testXml; @Before public void createTempFile() throws IOException { testNt = Files.createTempFile("test", ".nt"); testTtl = Files.createTempFile("test", ".ttl"); testXml = Files.createTempFile("test", ".xml"); // No need to populate the files as the dummy parser // doesn't actually read anything } @After public void deleteTempFiles() throws IOException { Files.deleteIfExists(testNt); Files.deleteIfExists(testTtl); Files.deleteIfExists(testXml); } @Test public void guessRDFSyntax() throws Exception { assertEquals(RDFSyntax.NTRIPLES, AbstractRDFParser.guessRDFSyntax(testNt).get()); assertEquals(RDFSyntax.TURTLE, AbstractRDFParser.guessRDFSyntax(testTtl).get()); assertFalse(AbstractRDFParser.guessRDFSyntax(testXml).isPresent()); } private void checkGraph(final Graph g) throws Exception { assertTrue(g.size() > 0); final IRI greeting = factory.createIRI("http://example.com/greeting"); // Should only have parsed once! assertEquals(1, g.stream(null, greeting, null).count()); final Triple triple = g.stream(null, greeting, null).findAny().get(); assertTrue(triple.getSubject() instanceof IRI); final IRI parsing = (IRI) triple.getSubject(); assertTrue(parsing.getIRIString().startsWith("urn:uuid:")); assertEquals("http://example.com/greeting", triple.getPredicate().getIRIString()); assertTrue(triple.getObject() instanceof Literal); final Literal literal = (Literal) triple.getObject(); assertEquals("Hello world", literal.getLexicalForm()); assertFalse(literal.getLanguageTag().isPresent()); assertEquals(Types.XSD_STRING, literal.getDatatype()); // Check uniqueness of properties that are always present assertEquals(1, g.stream(null, factory.createIRI("http://example.com/source"), null).count()); // Check optional properties that are unique assertTrue(2 > g.stream(null, factory.createIRI("http://example.com/base"), null).count()); assertTrue(2 > g.stream(null, factory.createIRI("http://example.com/contentType"), null).count()); assertTrue(2 > g.stream(null, factory.createIRI("http://example.com/contentTypeSyntax"), null).count()); } @Test public void parseFile() throws Exception { final Graph g = factory.createGraph(); final RDFParser parser = dummyParser.source(testNt).target(g); parser.parse().get(5, TimeUnit.SECONDS); checkGraph(g); // FIXME: this could potentially break if the equivalent of /tmp // includes // international characters assertEquals("<" + testNt.toUri().toString() + ">", firstPredicate(g, "source")); // Should be set to the file path assertEquals("<" + testNt.toUri().toString() + ">", firstPredicate(g, "base")); // Should NOT have guessed the content type assertNull(firstPredicate(g, "contentType")); assertNull(firstPredicate(g, "contentTypeSyntax")); } @Test public void parseNoSource() throws Exception { thrown.expect(IllegalStateException.class); dummyParser.parse(); } @Test public void parseBaseAndContentTypeNoSource() throws Exception { // Can set the other options, even without source() final IRI base = dummyParser.createRDFTermFactory().createIRI("http://www.example.org/test.rdf"); final RDFParser parser = dummyParser.base(base).contentType(RDFSyntax.RDFXML); thrown.expect(IllegalStateException.class); thrown.expectMessage("No source has been set"); // but .parse() should fail parser.parse(); } @Test public void parseFileMissing() throws Exception { Files.delete(testNt); // This should not fail yet final RDFParser parser = dummyParser.source(testNt); // but here: thrown.expect(IOException.class); parser.parse(); } @Test public void parseFileContentType() throws Exception { final Graph g = factory.createGraph(); final RDFParser parser = dummyParser.source(testNt).contentType(RDFSyntax.NTRIPLES).target(g); parser.parse().get(5, TimeUnit.SECONDS); checkGraph(g); // FIXME: this could potentially break if the equivalent of /tmp // includes // international characters assertEquals("<" + testNt.toUri().toString() + ">", firstPredicate(g, "source")); assertEquals("<" + testNt.toUri().toString() + ">", firstPredicate(g, "base")); assertEquals("\"" + RDFSyntax.NTRIPLES.name() + "\"", firstPredicate(g, "contentTypeSyntax")); assertEquals("\"application/n-triples\"", firstPredicate(g, "contentType")); } private String firstPredicate(final Graph g, final String pred) { return g.stream(null, factory.createIRI("http://example.com/" + pred), null).map(Triple::getObject) .map(RDFTerm::ntriplesString).findAny().orElse(null); } @Rule public ExpectedException thrown = ExpectedException.none(); @Test public void parseInputStreamFailsIfBaseMissing() throws Exception { final InputStream inputStream = new ByteArrayInputStream(new byte[0]); // Should not fail at this point final RDFParser parser = dummyParser.source(inputStream); // but here: thrown.expect(IllegalStateException.class); thrown.expectMessage("base iri required for inputstream source"); parser.parse(); } @Test public void parseInputStreamWithBase() throws Exception { final InputStream inputStream = new ByteArrayInputStream(new byte[0]); final IRI base = dummyParser.createRDFTermFactory().createIRI("http://www.example.org/test.rdf"); final Graph g = factory.createGraph(); final RDFParser parser = dummyParser.source(inputStream).base(base).target(g); parser.parse().get(5, TimeUnit.SECONDS); checkGraph(g); assertEquals("<http://www.example.org/test.rdf>", firstPredicate(g, "base")); // in our particular debug output, // bnode source indicates InputStream assertTrue(firstPredicate(g, "source").startsWith("_:")); assertNull(firstPredicate(g, "contentType")); assertNull(firstPredicate(g, "contentTypeSyntax")); } @Test public void parseInputStreamWithNQuads() throws Exception { final InputStream inputStream = new ByteArrayInputStream(new byte[0]); final Graph g = factory.createGraph(); final RDFParser parser = dummyParser.source(inputStream).contentType(RDFSyntax.NQUADS).target(g); parser.parse().get(5, TimeUnit.SECONDS); checkGraph(g); assertNull(firstPredicate(g, "base")); // in our particular debug output, // bnode source indicates InputStream assertTrue(firstPredicate(g, "source").startsWith("_:")); assertEquals("\"application/n-quads\"", firstPredicate(g, "contentType")); assertEquals("\"" + RDFSyntax.NQUADS.name() + "\"", firstPredicate(g, "contentTypeSyntax")); } @Test public void parseIRI() throws Exception { final IRI iri = dummyParser.createRDFTermFactory().createIRI("http://www.example.net/test.ttl"); final Graph g = factory.createGraph(); final RDFParser parser = dummyParser.source(iri).target(g); parser.parse().get(5, TimeUnit.SECONDS); checkGraph(g); assertEquals("<http://www.example.net/test.ttl>", firstPredicate(g, "source")); // No base - assuming the above IRI is always // the base would break server-supplied base from // any HTTP Location redirects and Content-Location header assertNull(firstPredicate(g, "base")); // ".ttl" in IRI string does not imply any content type assertNull(firstPredicate(g, "contentType")); assertNull(firstPredicate(g, "contentTypeSyntax")); } @Test public void parseIRIBaseContentType() throws Exception { final IRI iri = dummyParser.createRDFTermFactory().createIRI("http://www.example.net/test.ttl"); final Graph g = factory.createGraph(); final RDFParser parser = dummyParser.source(iri).base(iri).contentType(RDFSyntax.TURTLE).target(g); parser.parse().get(5, TimeUnit.SECONDS); checkGraph(g); assertEquals("<http://www.example.net/test.ttl>", firstPredicate(g, "source")); assertEquals("<http://www.example.net/test.ttl>", firstPredicate(g, "base")); assertEquals("\"" + RDFSyntax.TURTLE.name() + "\"", firstPredicate(g, "contentTypeSyntax")); assertEquals("\"text/turtle\"", firstPredicate(g, "contentType")); } }
/* * MIT License * * Copyright (c) 2016 EPAM Systems * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.epam.catgenome.controller; import java.io.IOException; import java.util.Collections; import java.util.Map; import java.util.Optional; import javax.servlet.http.HttpServletResponse; import com.epam.catgenome.constant.MessagesConstants; import com.epam.catgenome.controller.vo.UrlRequestVO; import com.epam.catgenome.entity.security.SessionExpirationBehavior; import com.epam.catgenome.manager.UrlShorterManager; import com.epam.catgenome.util.IndexUtils; import com.epam.catgenome.entity.UrlWithAliasItem; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; import org.springframework.http.MediaType; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.ResponseBody; import com.epam.catgenome.controller.vo.FilesVO; import com.epam.catgenome.manager.BiologicalDataItemManager; import com.epam.catgenome.manager.FileManager; import com.fasterxml.jackson.core.JsonProcessingException; import com.wordnik.swagger.annotations.ApiOperation; import com.wordnik.swagger.annotations.ApiResponse; import com.wordnik.swagger.annotations.ApiResponses; import static com.epam.catgenome.component.MessageHelper.getMessage; /** * Source: UtilsController * Created: 25.04.16, 16:11 * Project: CATGenome Browser * Make: IntelliJ IDEA 14.1.4, JDK 1.8 * * <p> * A REST controller to supply some common util data * </p> */ @Controller public class UtilsController extends AbstractRESTController { @Autowired private FileManager fileManager; @Autowired private UrlShorterManager urlShorterManager; @Autowired private BiologicalDataItemManager biologicalDataItemManager; @Value("#{catgenome['version']}") private String version; @Value("${security.acl.enable: false}") private boolean aclSecurityEnabled; @Value("${session.expiration.behavior: CONFIRM}") private SessionExpirationBehavior expirationBehavior; @ResponseBody @RequestMapping(value = "/version", method = RequestMethod.GET) @ApiOperation( value = "Returns application's version", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<String> loadVersion() { return Result.success(version); } @ResponseBody @RequestMapping(value = "/isRoleModelEnabled", method = RequestMethod.GET) @ApiOperation( value = "Returns application's version", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<Boolean> isRoleModelEnabled() { return Result.success(aclSecurityEnabled); } @ResponseBody @RequestMapping(value = "/sessionExpirationBehavior", method = RequestMethod.GET) @ApiOperation( value = "Returns application's version", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<SessionExpirationBehavior> sessionExpirationBehaviour() { return Result.success(expirationBehavior); } @ResponseBody @RequestMapping(value = "/files", method = RequestMethod.GET) @ApiOperation( value = "Returns directory contents", notes = "Returns directory contents, specified by path", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<FilesVO> loadDirectoryContents(@RequestParam(required = false) String path) throws IOException { return Result.success(new FilesVO(fileManager.loadDirectoryContents(path), fileManager.getNgsDataRootPath())); } @ResponseBody @RequestMapping(value = "/files/allowed", method = RequestMethod.GET) @ApiOperation( value = "Checks is directory browsing is allowed", notes = "Returns true if directory browsing is allowed and false if not", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<Boolean> isFilesBrowsingAllowed() throws IOException { return Result.success(fileManager.isFilesBrowsingAllowed()); } @ResponseBody @RequestMapping(value = "/url", method = RequestMethod.POST) @ApiOperation( value = "Generates URL postfix", notes = "Generates URL that displays specified files, optionally on specified interval", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<String> generateUrl( @RequestBody UrlRequestVO request, @RequestParam(required = false) String chromosomeName, @RequestParam(required = false) Integer startIndex, @RequestParam(required = false) Integer endIndex) throws JsonProcessingException { return Result.success(biologicalDataItemManager.generateUrl(request.getDataset(), request.getIds() == null ? Collections.emptyList() : request.getIds(), chromosomeName, startIndex, endIndex)); } @ResponseBody @RequestMapping(value = "/generateShortUrl", method = RequestMethod.POST) @ApiOperation( value = "Generates short URL postfix", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<String> generateShortUrl(@RequestBody UrlWithAliasItem urlWithAlias) { String alias = urlWithAlias.getAlias(); String url = urlWithAlias.getUrl(); String payload = urlShorterManager.generateAndSaveShortUrlPostfix(url, alias); Result<String> result; if (alias != null && !alias.equals(payload)) { result = Result.success(payload, getMessage(MessagesConstants.INFO_ALIAS_ALREADY_EXIST_MASSAGE)); } else { result = Result.success(payload); } return result; } @ResponseBody @RequestMapping(value = "/navigate", method = RequestMethod.GET) @ApiOperation( value = "redirect on a original URL by short URL postfix, or on the 404 if short url doesn't exist", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public void redirectToOriginalUrlByAlias(@RequestParam String alias, HttpServletResponse resp) throws IOException { Optional<String> maybeOriginalUrl = urlShorterManager.getOriginalUrl(alias); if (maybeOriginalUrl.isPresent()) { String url = maybeOriginalUrl.get(); resp.addHeader("Location", url); resp.setStatus(HttpServletResponse.SC_MOVED_PERMANENTLY); resp.sendRedirect(url); } else { resp.sendError(HttpServletResponse.SC_NOT_FOUND, MessagesConstants.ERROR_URL_WAS_EXPIRED); } } @ResponseBody @RequestMapping(value = "/defaultTrackSettings", method = RequestMethod.GET) @ApiOperation( value = "Return default track settings", notes = "Return default track settings, which specified in catgenome.properties file", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses( value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION) }) public Result<Map<String, Map<String, Object>>> getDefaultTracksSettings() throws IOException { return Result.success(fileManager.getDefaultTrackSettings()); } @ResponseBody @RequestMapping(value = "/getPathToExistingIndex", method = RequestMethod.GET) @ApiOperation(value = "Return path of existing index for file, or null if it don't exist", produces = MediaType.APPLICATION_JSON_VALUE) @ApiResponses(value = {@ApiResponse(code = HTTP_STATUS_OK, message = API_STATUS_DESCRIPTION)}) public Result<String> getPathToExistingIndex(@RequestParam String filePath) throws IOException { return Result.success(IndexUtils.checkExistingIndex(filePath)); } }
package picard.sam; import htsjdk.samtools.SAMFileHeader; import htsjdk.samtools.SAMFileWriter; import htsjdk.samtools.SAMFileWriterFactory; import htsjdk.samtools.SAMRecord; import htsjdk.samtools.SAMRecordSetBuilder; import htsjdk.samtools.SAMUtils; import htsjdk.samtools.SamReader; import htsjdk.samtools.SamReaderFactory; import htsjdk.samtools.util.SequenceUtil; import htsjdk.samtools.util.StringUtil; import org.testng.Assert; import org.testng.annotations.DataProvider; import org.testng.annotations.Test; import picard.cmdline.CommandLineProgramTest; import picard.cmdline.argumentcollections.RequiredReferenceArgumentCollection; import java.io.File; import java.io.IOException; import java.util.Collections; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * Tests related to code in AbstractAlignmentMerger */ public class AbstractAlignmentMergerTest extends CommandLineProgramTest { // the actual data provider is overlapReadDataWithSwaps public Object[][] overlapReadData() { // The spaces here are deliberate to illustrate the region the two default reads match final String default120LongR1Bases = "ATCACACCAGTGTCTGCGTTCACAGCAGGCATCATCAGTAGCCTCCAGAGGCCTCAGGTCCAGTCTCTAAAAATATCTCAGGAGGCTGCAGTGGCTGACCAGATTCTCCTGTCAGTTTGC"; final String default120LongR2Bases = "CGTTGGCAATGCCGGGCACAATCACACCAGTGTCTGCGTTCACAGCAGGCATCATCAGTAGCCTCCAGAGGCCTCAGGTCCAGTCTCTAAAAATATCTCAGGAGGCTGCAGTGGCTGACC"; final String default110LongR1Bases = "ATCACACCAGTGTCTGCGTTCACAGCAGGCATCATCAGTAGCCTCCAGAGGCCTCAGGTCCAGTCTCTAAAAATATCTCAGGAGGCTGCAGTGGCTGACCAGATTCTCCT"; final String default110LongR2Bases = "GCCGGGCACAATCACACCAGTGTCTGCGTTCACAGCAGGCATCATCAGTAGCCTCCAGAGGCCTCAGGTCCAGTCTCTAAAAATATCTCAGGAGGCTGCAGTGGCTGACC"; final String sharedBases = "ATCACACCAGTGTCTGCGTTCACAGCAGGCATCATCAGTAGCCTCCAGAGGCCTCAGGTCCAGTCTCTAAAAATATCTCAGGAGGCTGCAGTGGCTGACC"; final String default120LongR1ClippedBases = "AGATTCTCCTGTCAGTTTGC"; final String default120LongR2ClippedBases = "TGTGCCCGGCATTGCCAACG"; final String default110LongR1ClippedBases = "AGATTCTCCT"; final String default110LongR2ClippedBases = "TGTGCCCGGC"; final String default27LongR1Bases = "AGATTCTCCTTGTGCCCGGCAGATTCT"; final String default27LongR2Bases = "TGTGCCCGGCAGATTCTCCTCTTGTGC"; final String default27LongR1Qualities = "ABCDEFGHIJKLMNOPQRSTUVWXYZ."; final String default27LongR2Qualities = "abcdefghijklmnopqrstuvwxyz,"; final String default120LongR1BaseQualities = "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF.FFF.FFF.FFF"; final String default120LongR2BaseQualities = "FFFFFF.FFFFF.FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"; final String default110LongR1BaseQualities = "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF.FFF.FFF"; final String default110LongR2BaseQualities = "FFFFFF.FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"; final String sharedQualities = "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF"; final String r1ClippedQualities10 = default120LongR1BaseQualities.substring(default120LongR1BaseQualities.length() - 10); final String r2ClippedQualities10 = StringUtil.reverseString(default120LongR2BaseQualities.substring(0, 10)); final String r1ClippedQualities20 = default120LongR1BaseQualities.substring(default120LongR1BaseQualities.length() - 20); final String r2ClippedQualities20 = StringUtil.reverseString(default120LongR2BaseQualities.substring(0, 20)); return new Object[][]{ {110, false, 100, 200, "110M", "110M", false, true, 100, 200, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Non overhanging reads {110, false, 100, 200, "110M", "110M", false, true, 100, 200, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 300, "110M", "110M", false, true, 100, 300, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Non overlapping reads {110, false, 100, 300, "110M", "110M", false, true, 100, 300, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 210, "110M", "110M", true, false, 100, 210, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Non overlapping reads, outies, abutting {110, false, 100, 210, "110M", "110M", true, false, 100, 210, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 210, "110M", "110M", false, true, 100, 210, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Non overlapping reads, innies, abutting {110, false, 100, 210, "110M", "110M", false, true, 100, 210, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 209, "110M", "110M", true, false, 209, 209, "109S1M", "1M109S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // overlap by one base only {110, true, 100, 209, "110M", "110M", true, false, 209, 209, "109H1M", "1M109H", default110LongR1Bases, default110LongR2Bases, "T", "G", SequenceUtil.reverseComplement(default110LongR1Bases.substring(0, default110LongR1Bases.length() - 1)), default110LongR2Bases.substring(1), default110LongR1BaseQualities, default110LongR2BaseQualities, "F", "F", StringUtil.reverseString(default110LongR1BaseQualities.substring(0, default110LongR1BaseQualities.length() - 1)), default110LongR2BaseQualities.substring(1)}, {110, true, 100, 200, "110M", "110M", false, true, 100, 200, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 200, "110M", "110M", false, false, 100, 200, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // F1F2 {110, true, 100, 200, "110M", "110M", false, false, 100, 200, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 200, "110M", "110M", true, true, 100, 200, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // R1R2 {110, true, 100, 200, "110M", "110M", true, true, 100, 200, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 300, "110M", "110M", true, false, 100, 300, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Non overlapping "outies" {110, true, 100, 300, "110M", "110M", true, false, 100, 300, "110M", "110M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 100, 90, "110M", "110M", false, true, 100, 100, "100M10S", "10S100M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Basic overlapped read {110, true, 100, 90, "110M", "110M", false, true, 100, 100, "100M10H", "10H100M", default110LongR1Bases, default110LongR2Bases, sharedBases, sharedBases, default110LongR1ClippedBases, default110LongR2ClippedBases, default110LongR1BaseQualities, default110LongR2BaseQualities, sharedQualities, sharedQualities, r1ClippedQualities10, r2ClippedQualities10}, {120, false, 100, 95, "110M10S5H", "5H15S105M", false, true, 100, 100, "100M20S5H", "5H20S100M", default120LongR1Bases, default120LongR2Bases, default120LongR1Bases, default120LongR2Bases, null, null, default120LongR1BaseQualities, default120LongR2BaseQualities, default120LongR1BaseQualities, default120LongR2BaseQualities, null, null}, // Already hard and soft clipped {120, true, 100, 95, "110M10S5H", "5H15S105M", false, true, 100, 100, "100M25H", "25H100M", default120LongR1Bases, default120LongR2Bases, sharedBases, sharedBases, default120LongR1ClippedBases, default120LongR2ClippedBases, default120LongR1BaseQualities, default120LongR2BaseQualities, sharedQualities, sharedQualities, r1ClippedQualities20, r2ClippedQualities20}, {120, false, 100, 95, "110M10S", "15S105M", false, true, 100, 100, "100M20S", "20S100M", default120LongR1Bases, default120LongR2Bases, default120LongR1Bases, default120LongR2Bases, null, null, default120LongR1BaseQualities, default120LongR2BaseQualities, default120LongR1BaseQualities, default120LongR2BaseQualities, null, null}, // Already soft clipped {120, true, 100, 95, "110M10S", "15S105M", false, true, 100, 100, "100M20H", "20H100M", default120LongR1Bases, default120LongR2Bases, sharedBases, sharedBases, default120LongR1ClippedBases, default120LongR2ClippedBases, default120LongR1BaseQualities, default120LongR2BaseQualities, sharedQualities, sharedQualities, r1ClippedQualities20, r2ClippedQualities20}, {120, true, 100, 95, "95M25S", "15S105M", false, true, 100, 100, "95M5S20H", "20H100M", default120LongR1Bases, default120LongR2Bases, sharedBases, sharedBases, default120LongR1ClippedBases, default120LongR2ClippedBases, default120LongR1BaseQualities, default120LongR2BaseQualities, sharedQualities, sharedQualities, r1ClippedQualities20, r2ClippedQualities20}, // 5' end soft clip tests /* SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM-> <-MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSS soft clipping becomes: SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSSSSSS-> <-SSSSSSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSS hard clipping becomes: SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSSHHHH-> <-HHHHSSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSS */ {110, false, 105, 90, "5S105M", "103M7S", false, true, 105, 105, "5S88M17S", "15S88M7S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Already soft clipped at 5' end {110, true, 105, 90, "5S105M", "103M7S", false, true, 105, 105, "5S88M7S10H", "10H5S88M7S", default110LongR1Bases, default110LongR2Bases, sharedBases, sharedBases, default110LongR1ClippedBases, default110LongR2ClippedBases, default110LongR1BaseQualities, default110LongR2BaseQualities, sharedQualities, sharedQualities, r1ClippedQualities10, r2ClippedQualities10}, // 3' end soft clip tests /* SSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM> <SSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSS soft clipping becomes: SSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSSS> <SSSSSSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSS hard clipping becomes: SSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSHHHHH> <HHHHHSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSS */ {110, false, 105, 100, "10S100M", "10S95M5S", false, true, 105, 105, "10S90M10S", "15S90M5S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, // Already soft clipped at 5' end {110, true, 105, 100, "10S100M", "10S95M5S", false, true, 105, 105, "10S90M5S5H", "5H10S90M5S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases.substring(0, 105), default110LongR2Bases.substring(5), default110LongR1Bases.substring(105), SequenceUtil.reverseComplement(default110LongR2Bases.substring(0, 5)), default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities.substring(0, 105), default110LongR2BaseQualities.substring(5), default110LongR1BaseQualities.substring(105), StringUtil.reverseString(default110LongR2BaseQualities.substring(0, 5))}, // Already soft clipped at 5' end /* SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM-> <-MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSS soft clipping becomes: SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSS-> <-SSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSS hard-clipping results in the same as with soft-clipping in this case. */ {110, false, 105, 100, "10S100M", "103M7S", false, true, 105, 105, "10S98M2S", "5S98M7S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 105, 100, "10S100M", "103M7S", false, true, 105, 105, "10S98M2S", "5S98M7S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, /* SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSS-> <-MMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSSSSSSSSSSSSSSS soft clipping becomes: SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSSSSSSS-> <-SSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSSSSSSSSSSSSSSSSSS hard-clipping results in the same as with soft-clipping in this case. */ {110, false, 105, 100, "10S97M3S", "99M11S", false, true, 105, 105, "10S94M6S", "5S94M11S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, false, 105, 100, "10S97M3S", "99M11S", false, true, 105, 105, "10S94M6S", "5S94M11S", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, /* SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSS-> <-SSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM soft clipping becomes: SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSSSS-> <-SSSSSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM hard clipping becomes: SSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMSSSHHH-> <-HHHSSSSSSSSSSSMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMMM */ {110, false, 105, 96, "12S80M18S", "13S97M", false, true, 105, 105, "12S80M18S", "22S88M", default110LongR1Bases, default110LongR2Bases, default110LongR1Bases, default110LongR2Bases, null, null, default110LongR1BaseQualities, default110LongR2BaseQualities, default110LongR1BaseQualities, default110LongR2BaseQualities, null, null}, {110, true, 105, 96, "12S80M18S", "13S97M", false, true, 105, 105, "12S80M8S10H", "10H12S88M", default110LongR1Bases, default110LongR2Bases, sharedBases, sharedBases, default110LongR1ClippedBases, default110LongR2ClippedBases, default110LongR1BaseQualities, default110LongR2BaseQualities, sharedQualities, sharedQualities, r1ClippedQualities10, r2ClippedQualities10}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMMsssssssss> // <ssssssssMMM-MMMMMMMMMMMMMMMM // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMSSsssssssss> // <ssssssssSSSMMMMMMMMMMMMMMMM {27, false, 18, 14, "18M9S", "8S3M1D16M", false, true, 18, 18, "16M11S", "11S16M", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMMsssssssss> // <ssssssssMMM---MMMMMMMMMMMMMMMM // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMSSsssssssss> // <ssssssssSSSMMMMMMMMMMMMMMMM {27, false, 18, 12, "18M9S", "8S3M3D16M", false, true, 18, 18, "16M11S", "11S16M", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMM-MMMsssssssss> // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssSSSSMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMSSSsssssssss> {27, false, 14, 18, "8S19M", "15M1D3M9S", true, false, 18, 18, "12S15M", "15M12S", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMM---MMMsssssssss> // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssSSSSMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMSSSsssssssss> {27, false, 14, 18, "8S19M", "15M3D3M9S", true, false, 18, 18, "12S15M", "15M12S", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMM---MMMMsssssssss> // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssSSSSMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMSSSSsssssssss> {27, false, 14, 18, "8S19M", "14M3D4M9S", true, false, 18, 18, "12S15M", "14M13S", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.12-3456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMMMMMMMMMMM-> ## deletion right at the end // should remain // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMMMMMMMMMMM-> ## deletion right at the end {27, false, 14, 6, "8S19M", "27M1D", true, false, 14, 6, "8S19M", "27M1D", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456--789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMMsssssssss> // <sssssMMiiMMMMMMMMMMMMMMMMMM // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMSsssssssss> // <sssssSSSSSMMMMMMMMMMMMMMMMM {27, false, 18, 15, "18M9S", "5S2M2I18M", false, true, 18, 18, "17M10S", "10S17M", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.1234--56789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMiiMsssssss> // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <sssssssSSSSMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMSSSSsssssss> {27, false, 14, 18, "7S20M", "17M2I1M7S", true, false, 18, 18, "11S16M", "16M11S", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.123--456789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMiiMMsssssss> // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <sssssssSSSSMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMSSSSsssssss> {27, false, 14, 18, "7S20M", "16M2I2M7S", true, false, 18, 18, "11S16M", "16M11S", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.123-456789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMiMMMsssssss> // ^ // with an insertion of one base here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssSSSSSMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMSSSSsssssss> {27, false, 14, 18, "7S20M", "16M1I3M7S", true, false, 18, 18, "11S16M", "16M11S", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.1234--56789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMiiMMssssss> // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <sssssssSSSSMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMSSSSSssssss> {27, false, 14, 18, "7S20M", "17M2I2M6S", true, false, 18, 18, "11S16M", "16M11S", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMMsssssssss> // <ssssssssMMM-MMMMMMMMMMMMMMMM // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMHHHHHHHHHHH> // <HHHHHHHHHHHMMMMMMMMMMMMMMMM {27, true, 18, 14, "18M9S", "8S3M1D16M", false, true, 18, 18, "16M11H", "11H16M", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(0, 16), default27LongR2Bases.substring(11, 27), default27LongR1Bases.substring(16, 27), SequenceUtil.reverseComplement(default27LongR2Bases.substring(0, 11)), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(0, 16), default27LongR2Qualities.substring(11, 27), default27LongR1Qualities.substring(16, 27), StringUtil.reverseString(default27LongR2Qualities.substring(0, 11)), }, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMMsssssssss> // <ssssssssMMM---MMMMMMMMMMMMMMMM // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMHHHHHHHHHHH> // <HHHHHHHHHHHMMMMMMMMMMMMMMMM {27, true, 18, 12, "18M9S", "8S3M3D16M", false, true, 18, 18, "16M11H", "11H16M", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(0, 16), default27LongR2Bases.substring(11, 27), default27LongR1Bases.substring(16, 27), SequenceUtil.reverseComplement(default27LongR2Bases.substring(0, 11)), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(0, 16), default27LongR2Qualities.substring(11, 27), default27LongR1Qualities.substring(16, 27), StringUtil.reverseString(default27LongR2Qualities.substring(0, 11)) }, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssSSSSMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMSSSsssssssss> // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <HHHHHHHHHHHHMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMHHHHHHHHHHHH> {27, true, 14, 18, "8S19M", "15M1D3M9S", true, false, 18, 18, "12H15M", "15M12H", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(12, 27), default27LongR2Bases.substring(0, 15), SequenceUtil.reverseComplement(default27LongR1Bases.substring(0, 12)), default27LongR2Bases.substring(15, 27), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(12, 27), default27LongR2Qualities.substring(0, 15), StringUtil.reverseString(default27LongR1Qualities.substring(0, 12)), default27LongR2Qualities.substring(15, 27)}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMM---MMMsssssssss> // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <HHHHHHHHHHHHMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMHHHHHHHHHHHH> {27, true, 14, 18, "8S19M", "15M3D3M9S", true, false, 18, 18, "12H15M", "15M12H", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(12, 27), default27LongR2Bases.substring(0, 15), SequenceUtil.reverseComplement(default27LongR1Bases.substring(0, 12)), default27LongR2Bases.substring(15, 27), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(12, 27), default27LongR2Qualities.substring(0, 15), StringUtil.reverseString(default27LongR1Qualities.substring(0, 12)), default27LongR2Qualities.substring(15, 27)}, // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMM---MMMMsssssssss> // should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <HHHHHHHHHHHHMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMSHHHHHHHHHHHH> {27, true, 14, 18, "8S19M", "14M3D4M9S", true, false, 18, 18, "12H15M", "14M1S12H", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(12, 27), default27LongR2Bases.substring(0, 15), SequenceUtil.reverseComplement(default27LongR1Bases.substring(0, 12)), default27LongR2Bases.substring(15, 27), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(12, 27), default27LongR2Qualities.substring(0, 15), StringUtil.reverseString(default27LongR1Qualities.substring(0, 12)), default27LongR2Qualities.substring(15, 27)}, // 123456789.123456789.123456789.12-3456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMMMMMMMMMMM-> ## deletion right at the end // should remain // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <ssssssssMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMMMMMMMMMMM-> ## deletion right at the end {27, true, 14, 6, "8S19M", "27M1D", true, false, 14, 6, "8S19M", "27M1D", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases, default27LongR2Bases, null, null, default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities, default27LongR2Qualities, null, null}, // 123456789.123456--789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMMsssssssss> // <sssssMMiiMMMMMMMMMMMMMMMMMM // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // MMMMMMMMMMMMMMMMMHHHHHHHHHH> // <HHHHHHHHHHMMMMMMMMMMMMMMMMM {27, true, 18, 15, "18M9S", "5S2M2I18M", false, true, 18, 18, "17M10H", "10H17M", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(0, 17), default27LongR2Bases.substring(10, 27), default27LongR1Bases.substring(17, 27), SequenceUtil.reverseComplement(default27LongR2Bases.substring(0, 10)), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(0, 17), default27LongR2Qualities.substring(10, 27), default27LongR1Qualities.substring(17, 27), StringUtil.reverseString(default27LongR2Qualities.substring(0, 10))}, // 123456789.123456789.123456789.1234--56789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMiiMsssssss> // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <HHHHHHHHHHHMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMHHHHHHHHHHH> {27, true, 14, 18, "7S20M", "17M2I1M7S", true, false, 18, 18, "11H16M", "16M11H", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(11, 27), default27LongR2Bases.substring(0, 16), SequenceUtil.reverseComplement(default27LongR1Bases.substring(0, 11)), default27LongR2Bases.substring(16, 27), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(11, 27), default27LongR2Qualities.substring(0, 16), StringUtil.reverseString(default27LongR1Qualities.substring(0, 11)), default27LongR2Qualities.substring(16, 27)}, // 123456789.123456789.123456789.123--456789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMiiMMsssssss> // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <sssssssSSSSMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMSSSSsssssss> {27, true, 14, 18, "7S20M", "16M2I2M7S", true, false, 18, 18, "11H16M", "16M11H", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(11, 27), default27LongR2Bases.substring(0, 16), SequenceUtil.reverseComplement(default27LongR1Bases.substring(0, 11)), default27LongR2Bases.substring(16, 27), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(11, 27), default27LongR2Qualities.substring(0, 16), StringUtil.reverseString(default27LongR1Qualities.substring(0, 11)), default27LongR2Qualities.substring(16, 27)}, // 123456789.123456789.123456789.123-456789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMiMMMsssssss> // ^ // with an insertion of one base here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <HHHHHHHHHHHMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMHHHHHHHHHHH> {27, true, 14, 18, "7S20M", "16M1I3M7S", true, false, 18, 18, "11H16M", "16M11H", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(11, 27), default27LongR2Bases.substring(0, 16), SequenceUtil.reverseComplement(default27LongR1Bases.substring(0, 11)), default27LongR2Bases.substring(16, 27), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(11, 27), default27LongR2Qualities.substring(0, 16), StringUtil.reverseString(default27LongR1Qualities.substring(0, 11)), default27LongR2Qualities.substring(16, 27)}, // 123456789.123456789.123456789.1234--56789.123456789.123456789.123456789.123456789 // <sssssssMMMMMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMMiiMMssssss> // ^^ // with an insertion of two bases here should become // 123456789.123456789.123456789.123456789.123456789.123456789.123456789.123456789 // <HHHHHHHHHHHMMMMMMMMMMMMMMMM // MMMMMMMMMMMMMMMMHHHHHHHHHHH> {27, true, 14, 18, "7S20M", "17M2I2M6S", true, false, 18, 18, "11H16M", "16M11H", default27LongR1Bases, default27LongR2Bases, default27LongR1Bases.substring(11, 27), default27LongR2Bases.substring(0, 16), SequenceUtil.reverseComplement(default27LongR1Bases.substring(0, 11)), default27LongR2Bases.substring(16, 27), default27LongR1Qualities, default27LongR2Qualities, default27LongR1Qualities.substring(11, 27), default27LongR2Qualities.substring(0, 16), StringUtil.reverseString(default27LongR1Qualities.substring(0, 11)), default27LongR2Qualities.substring(16, 27)}, }; } @DataProvider public Iterator<Object[]> overlapReadDataWithSwaps() { List<Object[]> tests = new LinkedList<>(); for (Object[] inputs : overlapReadData()) { tests.add(inputs); final Object[] swappedInputs = new Object[inputs.length]; swappedInputs[0] = inputs[0]; // read length swappedInputs[1] = inputs[1]; // hard_clip for (int i = 2; i < inputs.length; i += 2) { swappedInputs[i] = inputs[i + 1]; swappedInputs[i + 1] = inputs[i]; } tests.add(swappedInputs); } return tests.iterator(); } @Test(dataProvider = "overlapReadDataWithSwaps") public void testOverlappedReadClipping( final int readLength, final boolean hardClipOverlappingReads, final int start1, final int start2, final String cigar1, final String cigar2, final boolean strand1, final boolean strand2, final int r1ExpectedAlignmentStart, final int r2ExpectedAlignmentStart, final String expectedR1Cigar, final String expectedR2Cigar, final String read1Bases, final String read2Bases, final String expectedR1Bases, final String expectedR2Bases, final String expectedR1ClippedBases, final String expectedR2ClippedBases, final String read1Qualities, final String read2Qualities, final String expectedR1Qualities, final String expectedR2Qualities, final String expectedR1ClippedQualities, final String expectedR2ClippedQualities) { final SAMRecordSetBuilder set = new SAMRecordSetBuilder(); set.setReadLength(readLength); final List<SAMRecord> recs = set.addPair("q1", 0, start1, start2, false, false, cigar1, cigar2, strand1, strand2, 30); final SAMRecord r1 = recs.get(0); final SAMRecord r2 = recs.get(1); r1.setReadBases(StringUtil.stringToBytes(read1Bases)); r2.setReadBases(StringUtil.stringToBytes(read2Bases)); r1.setBaseQualities(SAMUtils.fastqToPhred(read1Qualities)); r2.setBaseQualities(SAMUtils.fastqToPhred(read2Qualities)); AbstractAlignmentMerger.clipForOverlappingReads(r1, r2, hardClipOverlappingReads); Assert.assertEquals(r1.getAlignmentStart(), r1ExpectedAlignmentStart, "r1 POS"); Assert.assertEquals(r1.getCigarString(), expectedR1Cigar, "r1 CIGAR"); Assert.assertEquals(r2.getAlignmentStart(), r2ExpectedAlignmentStart, "r2 POS"); Assert.assertEquals(r2.getCigarString(), expectedR2Cigar, "r2 CIGAR"); Assert.assertEquals(r1.getReadString(), expectedR1Bases, "r1 BASES"); Assert.assertEquals(r2.getReadString(), expectedR2Bases, "r1 BASES"); Assert.assertEquals(SAMUtils.phredToFastq(r1.getBaseQualities()), expectedR1Qualities, "r1 QUAL"); Assert.assertEquals(SAMUtils.phredToFastq(r2.getBaseQualities()), expectedR2Qualities, "r2 QUAL"); Assert.assertEquals(r1.getAttribute(AbstractAlignmentMerger.HARD_CLIPPED_BASES_TAG), expectedR1ClippedBases, "r1 CLIPPED BASES"); Assert.assertEquals(r2.getAttribute(AbstractAlignmentMerger.HARD_CLIPPED_BASES_TAG), expectedR2ClippedBases, "r2 CLIPPED BASES"); Assert.assertEquals(r1.getAttribute(AbstractAlignmentMerger.HARD_CLIPPED_BASE_QUALITIES_TAG), expectedR1ClippedQualities, "r1 CLIPPED QUALS"); Assert.assertEquals(r2.getAttribute(AbstractAlignmentMerger.HARD_CLIPPED_BASE_QUALITIES_TAG), expectedR2ClippedQualities, "r2 CLIPPED QUALS"); } @Test public void testUnmapBacterialContamination() throws IOException { final SAMRecordSetBuilder builder = new SAMRecordSetBuilder(true, SAMFileHeader.SortOrder.queryname); final SAMFileHeader header = builder.getHeader(); final SAMFileHeader.SortOrder sortOrder = header.getSortOrder(); final SAMFileHeader newHeader = SAMRecordSetBuilder.makeDefaultHeader(sortOrder, 100000, true); builder.setHeader(newHeader); final File reference = File.createTempFile("reference", ".fasta"); reference.deleteOnExit(); builder.writeRandomReference(reference.toPath()); builder.addPair("overlappingpair", 0, 500, 500, false, false, "20S20M60S", "20S20M60M", true, false, 45); builder.addPair("overlappingpairFirstAligned", 0, 500, 500, false, true, "20S20M60S", null, true, false, 45); builder.addPair("overlappingpairSecondAligned", 0, 500, 500, true, false, null, "20S20M60S", true, false, 45); builder.addPair("overlappingpairFirstAlignedB", 0, 500, 500, false, true, "20S20M60S", null, false, true, 45); builder.addPair("overlappingpairSecondAlignedB", 0, 500, 500, true, false, null, "20S20M60S", false, true, 45); // builder.addFrag("frag",1,500,false,false,"20S20M60S",null, 45); // builder.addFrag("frag2",1,500,true,false,"20S20M60S",null, 45); // builder.addFrag("frag3",1,500,false,false,"20S20M60S",null, 45); // builder.addFrag("frag4",1,500,true,false,"20S20M60S",null, 45); final File file = newTempSamFile("aligned"); try (SAMFileWriter writer = new SAMFileWriterFactory().makeWriter(builder.getHeader(), true, file, null)) { builder.getRecords().forEach(writer::addAlignment); } final RevertSam revertSam = new RevertSam(); revertSam.INPUT = file; final File fileUnaligned = newTempSamFile("unaligned"); revertSam.OUTPUT = fileUnaligned; revertSam.SANITIZE = false; revertSam.REMOVE_ALIGNMENT_INFORMATION = true; revertSam.REMOVE_DUPLICATE_INFORMATION = true; revertSam.SORT_ORDER = SAMFileHeader.SortOrder.queryname; Assert.assertEquals(revertSam.doWork(), 0); MergeBamAlignment mergeBamAlignment = new MergeBamAlignment(); mergeBamAlignment.ALIGNED_BAM = Collections.singletonList(file); mergeBamAlignment.UNMAPPED_BAM = fileUnaligned; mergeBamAlignment.UNMAP_CONTAMINANT_READS = true; //yuck! final RequiredReferenceArgumentCollection requiredReferenceArgumentCollection = new RequiredReferenceArgumentCollection(); requiredReferenceArgumentCollection.REFERENCE_SEQUENCE = reference; mergeBamAlignment.referenceSequence = requiredReferenceArgumentCollection; final File fileMerged = newTempSamFile("merged"); mergeBamAlignment.OUTPUT = fileMerged; // merge file with itself. Assert.assertEquals(mergeBamAlignment.doWork(), 0); // check that all reads have been unmapped due to bacterial contamination as needed. try (SamReader mergedReader = SamReaderFactory.makeDefault().open(fileMerged)) { for (SAMRecord mergedRecord : mergedReader) { Assert.assertTrue(mergedRecord.getReadUnmappedFlag(), mergedRecord.toString()); Assert.assertTrue(!mergedRecord.getReadPairedFlag() || mergedRecord.getMateUnmappedFlag(), mergedRecord.toString()); } } } @Override public String getCommandLineProgramName() { return this.getClass().getSimpleName(); } private static File newTempSamFile(final String filename) throws IOException { final File file = File.createTempFile(filename, ".sam"); file.deleteOnExit(); return file; } @DataProvider(name = "readPositionIgnoringSoftClips") public Object[][] readPositionIgnoringSoftClips() { return new Object[][]{ {"26S58M62S", 3688, 3827, 0}, // This is from the read that made us aware of a bug {"26S58M62S", 3688, 3665, 4}, {"26S58M62S", 3688, 3660, 0}, // Before soft clip {"10S100M2S", 5, 10, 16}, {"10S100M2S", 5, 3, 9}, {"10S100M2S", 10, 12, 13}, {"10S100M2S", 5, 107, 0} }; } @Test(dataProvider = "readPositionIgnoringSoftClips") public void testGetReadPositionIgnoringSoftClips(final String cigarString, final int startPosition, final int queryPosition, final int expectedReadPosititon) { final SAMFileHeader newHeader = SAMRecordSetBuilder.makeDefaultHeader(SAMFileHeader.SortOrder.queryname, 100000, false); final SAMRecord rec = new SAMRecord(newHeader); rec.setCigarString(cigarString); rec.setAlignmentStart(startPosition); final int readPosition = AbstractAlignmentMerger.getReadPositionAtReferencePositionIgnoreSoftClips(rec, queryPosition); Assert.assertEquals(readPosition, expectedReadPosititon); } @DataProvider public Object[][] referencePositionAndReadPositions() { return new Object[][]{ {1, 0, false}, {1, 0, true}, {2, 0, false}, {2, 0, true}, {3, 0, false}, {3, 0, true}, {4, 1, false}, {4, 1, true}, {5, 2, false}, {5, 2, true}, {6, 3, false}, {6, 3, true}, {7, 4, false}, {7, 4, true}, {8, 5, false}, {8, 5, true}, {9, 6, false}, {9, 6, true}, {10, 7, false}, {10, 7, true}, {11, 8, false}, {11, 8, true}, {12, 8, false}, {12, 8, true}, {13, 8, false}, {13, 8, true}, {14, 8, false}, {14, 8, true}, {15, 8, false}, {15, 8, true}, {16, 9, false}, {16, 9, true}, {17, 10, false}, {17, 10, true}, {18, 11, false}, {18, 11, true}, {19, 12, false}, {19, 12, true}, {20, 17, false}, {20, 17, true}, {21, 18, false}, {21, 18, true}, {22, 19, false}, {22, 19, true}, {23, 20, false}, {23, 20, true}, {24, 20, false}, {24, 20, true}, {25, 20, false}, {25, 20, true}, {26, 20, false}, {26, 20, true}, {27, 20, false}, {27, 20, true}, {28, 21, false}, {28, 21, true}, {29, 22, false}, {29, 22, true}, {30, 23, false}, {30, 23, true}, {31, 24, false}, {31, 24, true}, {32, 0, false}, {32, 0, true}, }; } @Test(dataProvider = "referencePositionAndReadPositions") public void testGetReadPositionAtReferencePositionIgnoreSoftClips(final int refPos, final int readPos, final boolean negativeStrand) { final SAMRecordSetBuilder set = new SAMRecordSetBuilder(); //REF 123456789.123456789----.123456789.123456789. // ....||||----||||....||||----.... // SSSSMMMMDDDDMMMMIIIIMMMMDDDDSSSS> //READ 12345678----9.123456789.----12345678 final SAMRecord samRecord = set.addFrag("test", 0, 8, negativeStrand, false, "4S4M4D4M4I4M4D4S", "null", 30); Assert.assertEquals(AbstractAlignmentMerger.getReadPositionAtReferencePositionIgnoreSoftClips(samRecord, refPos), readPos); } }
package water.util; import edu.emory.mathcs.jtransforms.dct.DoubleDCT_1D; import edu.emory.mathcs.jtransforms.dct.DoubleDCT_2D; import edu.emory.mathcs.jtransforms.dct.DoubleDCT_3D; import edu.emory.mathcs.utils.ConcurrencyUtils; import hex.quantile.Quantile; import hex.quantile.QuantileModel; import water.*; import water.exceptions.H2OIllegalArgumentException; import water.fvec.Chunk; import water.fvec.Frame; import water.fvec.NewChunk; import water.fvec.Vec; import java.util.Arrays; public class MathUtils { public static double weightedSigma(long nobs, double wsum, double xSum, double xxSum) { double reg = 1.0/wsum; return nobs <= 1? 0 : Math.sqrt(xxSum*reg - (xSum*xSum) * reg * reg); } public static double logFactorial(long y) { if(y <= 100) { double l = 0; for (long i = 2; i <= y; ++i) l += Math.log(i); return l; } return y * Math.log(y) - y + .5*Math.log(2*Math.PI*y); } static public double computeWeightedQuantile(Vec weight, Vec values, double alpha) { QuantileModel.QuantileParameters parms = new QuantileModel.QuantileParameters(); Frame tempFrame = weight == null ? new Frame(Key.<Frame>make(), new String[]{"y"}, new Vec[]{values}) : new Frame(Key.<Frame>make(), new String[]{"y","w"}, new Vec[]{values, weight}); DKV.put(tempFrame); parms._train = tempFrame._key; parms._probs = new double[]{alpha}; parms._weights_column = weight == null ? null : "w"; Job<QuantileModel> job = new Quantile(parms).trainModel(); QuantileModel kmm = job.get(); double value = kmm._output._quantiles[0/*col*/][0/*quantile*/]; assert(!Double.isNaN(value)); Log.debug("weighted " + alpha + "-quantile: " + value); job.remove(); kmm.remove(); DKV.remove(tempFrame._key); return value; } static public class ComputeAbsDiff extends MRTask<ComputeAbsDiff> { @Override public void map(Chunk chks[], NewChunk nc[]) { for (int i=0; i<chks[0].len(); ++i) nc[0].addNum(Math.abs(chks[0].atd(i) - chks[1].atd(i))); } } /** * Wrapper around weighted paralell basic stats computation (mean, variance) */ public static final class BasicStats extends Iced { private final double[] _mean; private final double[] _m2; double[] _wsums; transient double[] _nawsums; long [] _naCnt; double[] _var; double[] _sd; public double _wsum = Double.NaN; public long[] _nzCnt; long _nobs = -1; public BasicStats(int n) { _mean = MemoryManager.malloc8d(n); _m2 = MemoryManager.malloc8d(n); _wsums = MemoryManager.malloc8d(n); _nzCnt = MemoryManager.malloc8(n); _nawsums = MemoryManager.malloc8d(n); _naCnt = MemoryManager.malloc8(n); } public void add(double x, double w, int i) { if(Double.isNaN(x)) { _nawsums[i] += w; _naCnt[i]++; } else if (w != 0) { double wsum = _wsums[i] + w; double delta = x - _mean[i]; double R = delta * w / wsum; _mean[i] += R; _m2[i] += _wsums[i] * delta * R; _wsums[i] = wsum; ++_nzCnt[i]; } } public void add(double[] x, double w) { for (int i = 0; i < x.length; ++i) add(x[i], w, i); } public void setNobs(long nobs, double wsum) { _nobs = nobs; _wsum = wsum; } public void fillSparseZeros(int i) { int zeros = (int)(_nobs - _nzCnt[i]); if(zeros > 0) { double muReg = 1.0 / (_wsum - _nawsums[i]); double zeromean = 0; double delta = _mean[i] - zeromean; double zerowsum = _wsum - _wsums[i] - _nawsums[i]; _mean[i] *= _wsums[i] * muReg; _m2[i] += delta * delta * _wsums[i] * zerowsum * muReg; //this is the variance*(N-1), will do sqrt(_sigma/(N-1)) later in postGlobal _wsums[i] += zerowsum; } } public void fillSparseNAs(int i) {_naCnt[i] = (int)(_nobs - _nzCnt[i]);} public void reduce(BasicStats bs) { ArrayUtils.add(_nzCnt, bs._nzCnt); ArrayUtils.add(_naCnt, bs._naCnt); for (int i = 0; i < _mean.length; ++i) { double wsum = _wsums[i] + bs._wsums[i]; if(wsum != 0) { double delta = bs._mean[i] - _mean[i]; _mean[i] = (_wsums[i] * _mean[i] + bs._wsums[i] * bs._mean[i]) / wsum; _m2[i] += bs._m2[i] + delta * delta * _wsums[i] * bs._wsums[i] / wsum; } _wsums[i] = wsum; } _nobs += bs._nobs; _wsum += bs._wsum; } private double[] variance(double[] res) { for (int i = 0; i < res.length; ++i) { long nobs = _nobs - _naCnt[i]; res[i] = (nobs / (nobs - 1.0)) * _m2[i] / _wsums[i]; } return res; } public double variance(int i){return variance()[i];} public double[] variance() { // if(sparse()) throw new UnsupportedOperationException("Can not do single pass sparse variance computation"); if (_var != null) return _var; return _var = variance(MemoryManager.malloc8d(_mean.length)); } public double sigma(int i){return sigma()[i];} public double[] sigma() { if(_sd != null) return _sd; double[] res = variance().clone(); for (int i = 0; i < res.length; ++i) res[i] = Math.sqrt(res[i]); return _sd = res; } public double[] mean() {return _mean;} public double mean(int i) {return _mean[i];} public long nobs() {return _nobs;} public boolean isSparse(int col) {return _nzCnt[col] < _nobs;} } /** Fast approximate sqrt * @return sqrt(x) with up to 5% relative error */ public static double approxSqrt(double x) { return Double.longBitsToDouble(((Double.doubleToLongBits(x) >> 32) + 1072632448) << 31); } /** Fast approximate sqrt * @return sqrt(x) with up to 5% relative error */ public static float approxSqrt(float x) { return Float.intBitsToFloat(532483686 + (Float.floatToRawIntBits(x) >> 1)); } /** Fast approximate 1./sqrt * @return 1./sqrt(x) with up to 2% relative error */ public static double approxInvSqrt(double x) { double xhalf = 0.5d*x; x = Double.longBitsToDouble(0x5fe6ec85e7de30daL - (Double.doubleToLongBits(x)>>1)); return x*(1.5d - xhalf*x*x); } /** Fast approximate 1./sqrt * @return 1./sqrt(x) with up to 2% relative error */ public static float approxInvSqrt(float x) { float xhalf = 0.5f*x; x = Float.intBitsToFloat(0x5f3759df - (Float.floatToIntBits(x)>>1)); return x*(1.5f - xhalf*x*x); } /** Fast approximate exp * @return exp(x) with up to 5% relative error */ public static double approxExp(double x) { return Double.longBitsToDouble(((long)(1512775 * x + 1072632447)) << 32); } /** Fast approximate log for values greater than 1, otherwise exact * @return log(x) with up to 0.1% relative error */ public static double approxLog(double x){ if (x > 1) return ((Double.doubleToLongBits(x) >> 32) - 1072632447d) / 1512775d; else return Math.log(x); } /** Fast calculation of log base 2 for integers. * @return log base 2 of n */ public static int log2(int n) { if (n <= 0) throw new IllegalArgumentException(); return 31 - Integer.numberOfLeadingZeros(n); } public static int log2(long n) { return 63 - Long.numberOfLeadingZeros(n); } public static float[] div(float[] nums, float n) { assert !Float.isInfinite(n) : "Trying to divide " + Arrays.toString(nums) + " by " + n; // Almost surely not what you want for (int i=0; i<nums.length; i++) nums[i] /= n; return nums; } public static double[] div(double[] nums, double n) { assert !Double.isInfinite(n) : "Trying to divide " + Arrays.toString(nums) + " by " + n; // Almost surely not what you want for (int i=0; i<nums.length; i++) nums[i] /= n; return nums; } public static float sum(final float[] from) { float result = 0; for (float d: from) result += d; return result; } public static double sum(final double[] from) { double result = 0; for (double d: from) result += d; return result; } public static float sumSquares(final float[] a) { return sumSquares(a, 0, a.length); } /** * Approximate sumSquares * @param a Array with numbers * @param from starting index (inclusive) * @param to ending index (exclusive) * @return approximate sum of squares based on a sample somewhere in the middle of the array (pos determined by bits of a[0]) */ public static float approxSumSquares(final float[] a, int from, int to) { final int len = to-from; final int samples = Math.max(len / 16, 1); final int offset = from + Math.abs(Float.floatToIntBits(a[0])) % (len-samples); assert(offset+samples <= to); return sumSquares(a, offset, offset + samples) * (float)len / (float)samples; } public static float sumSquares(final float[] a, int from, int to) { assert(from >= 0 && to <= a.length); float result = 0; final int cols = to-from; final int extra=cols-cols%8; final int multiple = (cols/8)*8-1; float psum1 = 0, psum2 = 0, psum3 = 0, psum4 = 0; float psum5 = 0, psum6 = 0, psum7 = 0, psum8 = 0; for (int c = from; c < from + multiple; c += 8) { psum1 += a[c ]*a[c ]; psum2 += a[c+1]*a[c+1]; psum3 += a[c+2]*a[c+2]; psum4 += a[c+3]*a[c+3]; psum5 += a[c+4]*a[c+4]; psum6 += a[c+5]*a[c+5]; psum7 += a[c+6]*a[c+6]; psum8 += a[c+7]*a[c+7]; } result += psum1 + psum2 + psum3 + psum4; result += psum5 + psum6 + psum7 + psum8; for (int c = from + extra; c < to; ++c) { result += a[c]*a[c]; } return result; } /** * Compare two numbers to see if they are within one ulp of the smaller decade. * Order of the arguments does not matter. * * @param a First number * @param b Second number * @return true if a and b are essentially equal, false otherwise. */ public static boolean equalsWithinOneSmallUlp(float a, float b) { if (Double.isNaN(a) && Double.isNaN(b)) return true; float ulp_a = Math.ulp(a); float ulp_b = Math.ulp(b); float small_ulp = Math.min(ulp_a, ulp_b); float absdiff_a_b = Math.abs(a - b); // subtraction order does not matter, due to IEEE 754 spec return absdiff_a_b <= small_ulp; } public static boolean equalsWithinOneSmallUlp(double a, double b) { if (Double.isNaN(a) && Double.isNaN(b)) return true; double ulp_a = Math.ulp(a); double ulp_b = Math.ulp(b); double small_ulp = Math.min(ulp_a, ulp_b); double absdiff_a_b = Math.abs(a - b); // subtraction order does not matter, due to IEEE 754 spec return absdiff_a_b <= small_ulp; } // Section 4.2: Error bound on recursive sum from Higham, Accuracy and Stability of Numerical Algorithms, 2nd Ed // |E_n| <= (n-1) * u * \sum_i^n |x_i| + P(u^2) public static boolean equalsWithinRecSumErr(double actual, double expected, int n, double absum) { return Math.abs(actual - expected) <= (n-1) * Math.ulp(actual) * absum; } /** Compare 2 doubles within a tolerance * @param a double * @param b double * @param abseps - Absolute allowed tolerance * @param releps - Relative allowed tolerance * @return true if equal within tolerances */ public static boolean compare(double a, double b, double abseps, double releps) { return Double.compare(a, b) == 0 || // check for equality Math.abs(a-b)/Math.max(a,b) < releps || // check for small relative error Math.abs(a - b) <= abseps; // check for small absolute error } // some common Vec ops public static double innerProduct(double [] x, double [] y){ double result = 0; for (int i = 0; i < x.length; i++) result += x[i] * y[i]; return result; } public static double l2norm2(double [] x){ double sum = 0; for(double d:x) sum += d*d; return sum; } public static double l1norm(double [] x){ double sum = 0; for(double d:x) sum += d >= 0?d:-d; return sum; } public static double l2norm(double [] x){ return Math.sqrt(l2norm2(x)); } public static double [] wadd(double [] x, double [] y, double w){ for(int i = 0; i < x.length; ++i) x[i] += w*y[i]; return x; } // Random 1000 larger primes public static final long[] PRIMES = { 709887397L, 98016697L, 85080053L, 56490571L, 385003067, 57525611L, 191172517L, 707389223L, 38269029L, 971065009L, 969012193L, 932573549L, 88277861L, 557977913L, 186530489L, 971846399L, 93684557L, 568491823L, 374500471L, 260955337L, 98748991L, 571124921L, 268388903L, 931975097L, 80137923L, 378339371L, 191476231L, 982164353L, 96991951L, 193488247L, 186331151L, 186059399L, 99717967L, 714703333L, 195765091L, 934873301L, 33844087L, 392819423L, 709242049L, 975098351L, 15814261L, 846357791L, 973645069L, 968987629L, 27247177L, 939785537L, 714611087L, 846883019L, 98514157L, 851126069L, 180055321L, 378662957L, 97312573L, 553353439L, 268057183L, 554327167L, 24890223L, 180650339L, 964569689L, 565633303L, 52962097L, 931225723L, 556700413L, 570525509L, 99233241L, 270892441L, 185716603L, 928527371L, 21286513L, 561435671L, 561547303L, 696202733L, 53624617L, 930346357L, 567779323L, 973736227L, 91898247L, 560750693L, 187256227L, 373704811L, 35668549L, 191257589L, 934128313L, 698681153L, 81768851L, 378742241L, 971211347L, 848250443L, 57148391L, 844575103L, 976095787L, 193706609L, 12680637L, 929060857L, 973363793L, 979803301L, 59840627L, 923478557L, 262430459L, 970229543L, 77980417L, 924763579L, 703130651L, 263613989L, 88115473L, 695202203L, 378625519L, 850417619L, 37875123L, 696088793L, 553766351L, 381382453L, 90515451L, 570302171L, 962465983L, 923407679L, 19931057L, 856231703L, 941060833L, 971397239L, 10339277L, 379853059L, 845156227L, 187980707L, 87821407L, 938344853L, 380122333L, 270054377L, 83320839L, 261180221L, 192697819L, 839701211L, 12564821L, 556717591L, 848036339L, 374151047L, 97257047L, 936281293L, 188681027L, 195149543L, 87704907L, 927976717L, 844819139L, 273676181L, 39585799L, 706129079L, 384034087L, 933489013L, 59297633L, 268994839L, 981927539L, 195840863L, 67345573L, 967452049L, 560096107L, 381740743L, 30924129L, 924804943L, 856120231L, 378647363L, 80385621L, 697508593L, 274289269L, 193688753L, 73891551L, 271848133L, 932057111L, 257551951L, 91279349L, 938126183L, 555432523L, 981016831L, 30805159L, 196382603L, 706893793L, 933713923L, 24244231L, 378590591L, 710972333L, 269517089L, 16916897L, 562526791L, 183312523L, 189463201L, 38989417L, 391893721L, 972826333L, 386610647L, 64896971L, 926400467L, 932555329L, 850558381L, 89064649L, 714662899L, 384851339L, 265636697L, 91508059L, 275418673L, 559709609L, 922161403L, 10531101L, 857303261L, 853919329L, 558603317L, 55745273L, 856595459L, 923077957L, 841009783L, 16850687L, 708322837L, 184264963L, 696558959L, 93682079L, 375977179L, 974002649L, 849803629L, 97926061L, 968610047L, 844793123L, 384591617L, 55237313L, 935336407L, 559316999L, 554674333L, 14130253L, 846839069L, 931726963L, 696160733L, 75174581L, 557994317L, 838168543L, 966852493L, 77072929L, 970159979L, 964704397L, 189568151L, 86268653L, 855284593L, 850048289L, 191313583L, 93713647L, 191142043L, 388880231L, 553249517L, 30195511L, 387150937L, 849836231L, 970592537L, 28652147L, 268424399L, 558866377L, 186814247L, 39044643L, 976912063L, 845625881L, 711967423L, 50662731L, 386395531L, 188849761L, 711490979L, 15549633L, 979839541L, 559484329L, 563433161L, 59397379L, 920856857L, 192399139L, 187354667L, 55056687L, 196880249L, 558354787L, 967650823L, 94294149L, 389784139L, 180486277L, 565918721L, 20466667L, 268413349L, 267469649L, 936151193L, 72346123L, 979276561L, 695068741L, 699857383L, 54711473L, 182608813L, 183270007L, 702031919L, 97944489L, 387586607L, 381249059L, 376605809L, 77319227L, 556347787L, 701093269L, 192346391L, 90335227L, 256723087L, 962532569L, 266508769L, 17739193L, 937662653L, 847160927L, 555998467L, 88295583L, 857415067L, 261917263L, 385579793L, 51141643L, 373631119L, 705996133L, 973170461L, 55331307L, 967455763L, 938587709L, 706688057L, 21297597L, 922065379L, 185517257L, 187628431L, 96410283L, 563376631L, 570763741L, 936993961L, 52224149L, 979458331L, 392576593L, 700887227L, 68821447L, 979730771L, 980082293L, 273639451L, 50288347L, 378934783L, 571910639L, 557914661L, 96941061L, 260494543L, 711310849L, 192637969L, 22890911L, 963887479L, 554730437L, 922265609L, 78772921L, 696207877L, 570249107L, 393007129L, 86456451L, 385480783L, 926825371L, 267285527L, 22092111L, 713561533L, 393315437L, 856347343L, 93146269L, 855525691L, 939838357L, 708335053L, 93532607L, 714598517L, 853725269L, 844167949L, 21977701L, 270958973L, 192136349L, 375609701L, 19897797L, 966888187L, 932260729L, 383532827L, 25237737L, 272543773L, 392590733L, 853665451L, 21725587L, 700887881L, 194074883L, 981838607L, 80417439L, 704312201L, 553750697L, 980933669L, 74528743L, 179675627L, 383340833L, 709235897L, 90741063L, 192309673L, 571935391L, 194902511L, 94110553L, 924261131L, 191984729L, 269236567L, 58470623L, 182656571L, 849099131L, 569471723L, 11961733L, 851046631L, 262712029L, 193922059L, 51451747L, 854728031L, 264981697L, 842532959L, 11163561L, 967373513L, 857689213L, 971242631L, 91159577L, 376996001L, 561336649L, 709380197L, 53406409L, 963273559L, 273184829L, 559905089L, 80983593L, 570001207L, 181289533L, 846881023L, 28890767L, 845688421L, 555569233L, 189620681L, 78793177L, 854935111L, 572712211L, 965532551L, 37847349L, 262570873L, 963609191L, 926753309L, 58346681L, 189095527L, 842218019L, 265500401L, 58861247L, 389674489L, 390095639L, 841892383L, 85054659L, 191505641L, 712111369L, 841407407L, 91256717L, 930216869L, 196419757L, 714269687L, 27174241L, 572612297L, 191433857L, 180735229L, 55107853L, 183312203L, 981881179L, 185146877L, 82402047L, 187382323L, 274363207L, 191076499L, 57751437L, 187785713L, 924689923L, 393190717L, 71161873L, 197227729L, 180143683L, 381192601L, 15005641L, 376847017L, 567605161L, 838240673L, 80153253L, 965992537L, 857310253L, 261754247L, 36064557L, 267898751L, 967090921L, 937570097L, 12337347L, 712318247L, 978577751L, 568905091L, 94257099L, 842182967L, 374004977L, 381257309L, 96791961L, 921781121L, 557889977L, 192185387L, 93247459L, 193216277L, 700322947L, 970295303L, 13157043L, 377418233L, 938901113L, 380496409L, 27278997L, 980067787L, 921546019L, 182505511L, 80115941L, 934837181L, 926914847L, 259623571L, 28102691L, 562673513L, 967105907L, 926710639L, 94210853L, 920748757L, 391684499L, 387247697L, 57752203L, 839753723L, 566374183L, 569364071L, 91244107L, 701970299L, 183147761L, 192938983L, 57579247L, 387206317L, 938222833L, 270174413L, 80376961L, 923378317L, 383078257L, 191690461L, 96389807L, 267712741L, 850101353L, 970424239L, 34699577L, 707392033L, 846517769L, 572099873L, 80426597L, 980129011L, 846324977L, 571031159L, 93248107L, 567629729L, 192701459L, 375630173L, 97379631L, 558891877L, 385348591L, 708982787L, 99143939L, 181841897L, 192597829L, 854675441L, 71312189L, 383257489L, 382600903L, 714164239L, 14287911L, 555130057L, 970321717L, 570861703L, 25868783L, 559474921L, 269746163L, 934658899L, 11042893L, 188907143L, 933254173L, 275577487L, 22606051L, 570314989L, 706436851L, 382812809L, 20093987L, 383146817L, 258516589L, 180236977L, 70049377L, 929492677L, 704664187L, 185934289L, 58575211L, 392996663L, 856628287L, 197998483L, 95194827L, 980551813L, 927882983L, 391326917L, 24153433L, 378212663L, 849772571L, 382378159L, 69371443L, 259661527L, 380291797L, 970105957L, 39696727L, 931108069L, 557712577L, 706204777L, 90975487L, 377724973L, 976364429L, 258731423L, 32280277L, 966276109L, 392993767L, 922543927L, 35895501L, 843852797L, 842395019L, 938078633L, 80021733L, 180972413L, 972384389L, 257708257L, 11399039L, 699607547L, 179571479L, 381531497L, 95577441L, 967694027L, 703939237L, 560134033L, 10374449L, 969953659L, 570804607L, 188228603L, 98870849L, 695911061L, 179866429L, 566537623L, 18741029L, 572525543L, 705109633L, 374728357L, 66409487L, 857997661L, 969932363L, 271021117L, 87386813L, 924659837L, 930064451L, 699659099L, 92722127L, 940860467L, 381665183L, 979952719L, 27144841L, 274646369L, 936578021L, 559210007L, 16684763L, 196169173L, 926404139L, 192762901L, 17681727L, 189521161L, 181515617L, 858437443L, 23552873L, 258885643L, 572831971L, 973561471L, 59372601L, 181459769L, 566285441L, 965442013L, 93491029L, 180786043L, 929988151L, 845756941L, 35529257L, 699442283L, 853078201L, 390950671L, 15958801L, 712435631L, 387157913L, 976160347L, 68684279L, 179988047L, 389090791L, 699322219L, 10307823L, 259064219L, 377097319L, 850345549L, 66881839L, 933108151L, 266299519L, 260426339L, 72105031L, 931087667L, 973797767L, 392582221L, 66105353L, 843357917L, 965549551L, 555596219L, 98867657L, 973871617L, 928572781L, 965246651L, 73876453L, 934831181L, 940948433L, 570264209L, 71210171L, 847592843L, 262149649L, 555835717L, 17468753L, 388931927L, 260194087L, 970748903L, 39762147L, 181554757L, 711884729L, 261162977L, 35297709L, 856201667L, 380186867L, 180397589L, 11201441L, 922615327L, 376981837L, 554670449L, 34089477L, 964124867L, 569139349L, 853955087L, 95490287L, 709207027L, 572850679L, 566624309L, 39946727L, 968467037L, 840315521L, 923008613L, 96636383L, 570123877L, 695094643L, 695377961L, 85046823L, 698062327L, 840797417L, 197750629L, 88399737L, 389835253L, 939584969L, 923130347L, 71023647L, 981863369L, 696543251L, 375409421L, 13752431L, 855538433L, 269223991L, 980951861L, 17976011L, 383342473L, 696386767L, 383000213L, 38001763L, 260224427L, 969142787L, 924409687L, 92289037L, 705677339L, 854639273L, 709648501L, 51602861L, 927498401L, 963151939L, 257969059L, 99942561L, 702552397L, 378807467L, 843849547L, 20636249L, 838174921L, 921188483L, 697743737L, 55171601L, 963313399L, 969542537L, 268784609L, 10638293L, 554031749L, 257309069L, 856356289L, 272064581L, 193518863L, 272811667L, 382857571L, 705293539L, 94434307L, 841390831L, 378434863L, 22644091L, 933591301L, 263483903L, 937305671L, 92030791L, 855482651L, 706132187L, 703258151L, 34513681L, 262886671L, 193130321L, 977976803L, 51169839L, 934495231L, 266741317L, 974393971L, 22079491L, 700151497L, 705291473L, 568384493L, 93712889L, 851253661L, 265654027L, 393268147L, 56217787L, 850416367L, 857303827L, 391728109L, 98810113L, 191962153L, 268291579L, 181466911L, 94017901L, 921053269L, 186716597L, 963617209L, 59349733L, 192916351L, 853395997L, 181896479L, 54769193L, 186653633L, 841422889L, 560707079L, 92365467L, 703592261L, 982412807L, 982243111L, 78892241L, 927464383L, 930534359L, 268636259L, 94549379L, 712074763L, 559450939L, 857428151L, 71670509L, 256671463L, 936352111L, 980141417L, 36271839L, 186475811L, 925100521L, 972243169L, 91920501L, 696389069L, 928678631L, 381418831L, 12023729L, 844714907L, 857426887L, 846161201L, 99505771L, 386542469L, 856860959L, 572063227L, 56038117L, 385629949L, 979920607L, 258498697L, 81234773L, 389956109L, 556370957L, 379944343L, 50730109L, 565321789L, 981670519L, 974403491L, 96057349L, 711469903L, 979604279L, 265069711L, 35443673L, 197595613L, 925185959L, 940443347L, 17173331L, 854818409L, 707162809L, 557260003L, 12290843L, 973388453L, 713357609L, 379834097L, 16945751L, 272464273L, 853795783L, 975641603L, 20326481L, 271093661L, 560031733L, 563000783L, 89785227L, 381224603L, 389678899L, 382372531L, 93398507L, 713755909L, 379280107L, 849555587L, 12726569L, 713067799L, 386762897L, 699452197L, 68249743L, 921329677L, 969662999L, 708401153L, 92343817L, 695690659L, 376186373L, 971774849L, 68191267L, 559122461L, 846282403L, 928908247L, 36511479L, 921516097L, 270107843L, 568075631L, 87827469L, 844675283L, 562808263L, 191356681L, 14927579L, 840652927L, 553679459L, 558298787L, 89230059L, 980861633L, 266720513L, 566820913L, 69320183L, 554150749L, 970182487L, 196312381L, 13836923L, 927087017L, 269236103L, 197279059L, 27011321L, 190280689L, 844923689L, 708889619L, 35296049L, 383543333L, 971450659L, 932468473L, 94659689L, 569153671L, 378633757L, 972685003L, 94676831L, 383130073L, 184098373L, 848604173L, 57587529L, 383922947L, 257719843L, 377849887L, 94816741L, 974841787L, 851800231L, 386896033L, 28408719L, 852139663L, 975564299L, 268145221L, 11937199L, 386365229L, 190900637L, 187768367L, }; public static double roundToNDigits(double d, int n) { if(d == 0)return d; int log = (int)Math.log10(d); int exp = n; exp -= log; int ival = (int)(Math.round(d * Math.pow(10,exp))); return ival/Math.pow(10,exp); } public enum Norm {L1,L2,L2_2,L_Infinite} public static double[] min_max_mean_stddev(long[] counts) { double min = Float.MAX_VALUE; double max = Float.MIN_VALUE; double mean = 0; for (long tmp : counts) { min = Math.min(tmp, min); max = Math.max(tmp, max); mean += tmp; } mean /= counts.length; double stddev = 0; for (long tmp : counts) { stddev += Math.pow(tmp - mean, 2); } stddev /= counts.length; stddev = Math.sqrt(stddev); return new double[] {min,max,mean,stddev}; } public static double sign(double d) { if(d == 0)return 0; return d < 0?-1:1; } public static class DCT { public static void initCheck(Frame input, int width, int height, int depth) { ConcurrencyUtils.setNumberOfThreads(1); if (width < 1 || height < 1 || depth < 1) throw new H2OIllegalArgumentException("dimensions must be >= 1"); if (width*height*depth != input.numCols()) throw new H2OIllegalArgumentException("dimensions HxWxD must match the # columns of the frame"); for (Vec v : input.vecs()) { if (v.naCnt() > 0) throw new H2OIllegalArgumentException("DCT can not be computed on rows with missing values"); if (!v.isNumeric()) throw new H2OIllegalArgumentException("DCT can only be computed on numeric columns"); } } /** * Compute the 1D discrete cosine transform for each row in the given Frame, and return a new Frame * * @param input Frame containing numeric columns with data samples * @param N Number of samples (must be less or equal than number of columns) * @param inverse Whether to compute the inverse * @return Frame containing 1D (inverse) DCT of each row (same dimensionality) */ public static Frame transform1D(Frame input, final int N, final boolean inverse) { initCheck(input, N, 1, 1); return new MRTask() { @Override public void map(Chunk[] cs, NewChunk[] ncs) { double[] a = new double[N]; for (int row = 0; row < cs[0]._len; ++row) { // fill 1D array for (int i = 0; i < N; ++i) a[i] = cs[i].atd(row); // compute DCT for each row if (!inverse) new DoubleDCT_1D(N).forward(a, true); else new DoubleDCT_1D(N).inverse(a, true); // write result to NewChunk for (int i = 0; i < N; ++i) ncs[i].addNum(a[i]); } } }.doAll(input.numCols(), Vec.T_NUM, input).outputFrame(); } /** * Compute the 2D discrete cosine transform for each row in the given Frame, and return a new Frame * * @param input Frame containing numeric columns with data samples * @param height height * @param width width * @param inverse Whether to compute the inverse * @return Frame containing 2D DCT of each row (same dimensionality) */ public static Frame transform2D(Frame input, final int height, final int width, final boolean inverse) { initCheck(input, height, width, 1); return new MRTask() { @Override public void map(Chunk[] cs, NewChunk[] ncs) { double[][] a = new double[height][width]; // each row is a 2D sample for (int row = 0; row < cs[0]._len; ++row) { for (int i = 0; i < height; ++i) for (int j = 0; j < width; ++j) a[i][j] = cs[i * width + j].atd(row); // compute 2D DCT if (!inverse) new DoubleDCT_2D(height, width).forward(a, true); else new DoubleDCT_2D(height, width).inverse(a, true); // write result to NewChunk for (int i = 0; i < height; ++i) for (int j = 0; j < width; ++j) ncs[i * width + j].addNum(a[i][j]); } } }.doAll(height * width, Vec.T_NUM, input).outputFrame(); } /** * Compute the 3D discrete cosine transform for each row in the given Frame, and return a new Frame * * @param input Frame containing numeric columns with data samples * @param height height * @param width width * @param depth depth * @param inverse Whether to compute the inverse * @return Frame containing 3D DCT of each row (same dimensionality) */ public static Frame transform3D(Frame input, final int height, final int width, final int depth, final boolean inverse) { initCheck(input, height, width, depth); return new MRTask() { @Override public void map(Chunk[] cs, NewChunk[] ncs) { double[][][] a = new double[height][width][depth]; // each row is a 3D sample for (int row = 0; row < cs[0]._len; ++row) { for (int i = 0; i < height; ++i) for (int j = 0; j < width; ++j) for (int k = 0; k < depth; ++k) a[i][j][k] = cs[i*(width*depth) + j*depth + k].atd(row); // compute 3D DCT if (!inverse) new DoubleDCT_3D(height, width, depth).forward(a, true); else new DoubleDCT_3D(height, width, depth).inverse(a, true); // write result to NewChunk for (int i = 0; i < height; ++i) for (int j = 0; j < width; ++j) for (int k = 0; k < depth; ++k) ncs[i*(width*depth) + j*depth + k].addNum(a[i][j][k]); } } }.doAll(height*width*depth, Vec.T_NUM, input).outputFrame(); } } public static class SquareError extends MRTask<SquareError> { public double _sum; @Override public void map( Chunk resp, Chunk pred ) { double sum = 0; for( int i=0; i<resp._len; i++ ) { double err = resp.atd(i)-pred.atd(i); sum += err*err; } _sum = sum; } @Override public void reduce( SquareError ce ) { _sum += ce._sum; } } public static double y_log_y(double y, double mu) { if(y == 0)return 0; if(mu < Double.MIN_NORMAL) mu = Double.MIN_NORMAL; return y * Math.log(y / mu); } /** Compare signed longs */ public static int compare(long x, long y) { return (x < y) ? -1 : ((x == y) ? 0 : 1); } /** Copmarision of unsigned longs. */ public static int compareUnsigned(long a, long b) { // Just map [0, 2^64-1] to [-2^63, 2^63-1] return compare(a^0x8000000000000000L, b^0x8000000000000000L); } /** Comparision of 128bit unsigned values represented by 2 longs */ public static int compareUnsigned(long hiA, long loA, long hiB, long loB) { int resHi = compareUnsigned(hiA, hiB); int resLo = compareUnsigned(loA, loB); return resHi != 0 ? resHi : resLo; } /** * Logloss * @param err prediction error (between 0 and 1) * @return logloss */ public static double logloss(double err) { return Math.min(MAXLL, -Math.log(1.0-err)); } final static double MAXLL = -Math.log(1e-15); //34.53878 }
/* * ModeShape (http://www.modeshape.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.modeshape.jcr; import static org.hamcrest.core.Is.is; import static org.hamcrest.core.IsNull.notNullValue; import static org.hamcrest.core.IsNull.nullValue; import static org.junit.Assert.assertThat; import java.io.File; import java.io.InputStream; import java.util.HashSet; import java.util.List; import java.util.Set; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.nodetype.NodeDefinition; import javax.jcr.nodetype.NodeTypeDefinition; import javax.jcr.nodetype.PropertyDefinition; import javax.jcr.version.OnParentVersionAction; import org.junit.Before; import org.junit.Test; import org.modeshape.common.FixFor; import org.modeshape.common.collection.Problem; import org.modeshape.common.collection.SimpleProblems; import org.modeshape.common.text.ParsingException; import org.modeshape.jcr.cache.PropertyTypeUtil; import org.modeshape.jcr.value.Name; import org.modeshape.jcr.value.NamespaceException; import org.modeshape.jcr.value.NamespaceRegistry; /** * Unit test for {@link org.modeshape.jcr.CndImporter} */ public class CndImporterTest { public static final String CND_FILE_PATH = "src/test/resources/cnd/"; private ExecutionContext context; private CndImporter importer; private SimpleProblems problems; @Before public void beforeEach() { problems = new SimpleProblems(); context = new ExecutionContext(); context.getNamespaceRegistry().register(ModeShapeLexicon.Namespace.PREFIX, ModeShapeLexicon.Namespace.URI); context.getNamespaceRegistry().register(JcrLexicon.Namespace.PREFIX, JcrLexicon.Namespace.URI); context.getNamespaceRegistry().register(JcrNtLexicon.Namespace.PREFIX, JcrNtLexicon.Namespace.URI); // Set up the importer ... importer = new CndImporter(context); } protected Name name( String name ) { return context.getValueFactories().getNameFactory().create(name); } protected void printProblems() { for (Problem problem : problems) { System.out.println(problem); } } protected InputStream openCndStream( String cndFileName ) { return this.getClass().getClassLoader().getResourceAsStream("cnd/" + cndFileName); } protected File openCndFile( String cndFileName ) { File result = new File(CND_FILE_PATH + cndFileName); assertThat(result.exists(), is(true)); return result; } @Test( expected = ParsingException.class ) public void shouldReportErrorIfTheNodeTypeNameIsEmpty() { String cnd = "<ns = 'http://namespace.com/ns'> [] abstract"; importer.parse(cnd); } @Test( expected = ParsingException.class ) public void shouldReportErrorIfTheNodeTypeNameIsBlank() { String cnd = "<ns = 'http://namespace.com/ns'> [ ] abstract"; importer.parse(cnd); } @Test( expected = ParsingException.class ) public void shouldReportErrorIfTheNodeTypeNameIsNotFollowedByClosingBracket() { String cnd = "<ns = 'http://namespace.com/ns'> [ abstract"; importer.parse(cnd); } @Test( expected = NamespaceException.class ) @FixFor( "MODE-1962" ) public void shouldReportErrorIfTheNodeTypeNameUsesInvalidNamespace() { String cnd = "<ns = 'http://namespace.com/ns'> [xyz:acme] abstract"; importer.parse(cnd); } @Test public void shouldParseNamespaceDeclarationWithQuotedUriAndQuotedPrefix() { String cnd = "<'ns' = 'http://namespace.com/ns'>"; importer.parse(cnd); } @Test public void shouldParseNamespaceDeclarationWithUnquotedUriAndQuotedPrefix() { String cnd = "<'ns' = http_namespace.com_ns>"; importer.parse(cnd); } @Test public void shouldParseNamespaceDeclarationWithQuotedUriAndUnquotedPrefix() { String cnd = "<ns = 'http://namespace.com/ns'>"; importer.parse(cnd); } @Test public void shouldParseNamespaceDeclarationWithUnquotedUriAndUnquotedPrefix() { String cnd = "<ns = http_namespace.com_ns>"; importer.parse(cnd); } @Test public void shouldParseMinimalNodeDefinition() { String cnd = "[nodeTypeName]"; importer.parse(cnd); } @Test public void shouldParseMinimalNodeDefinitionWithSupertype() { String cnd = "[nodeTypeName] > supertype"; importer.parse(cnd); } @Test public void shouldParseMinimalNodeDefinitionWithSupertypes() { String cnd = "[nodeTypeName] > supertype1, supertype2"; importer.parse(cnd); } @Test public void shouldParseNodeDefinitionWithNameThatIsKeyword() { String cnd = "[abstract] > supertype1, supertype2"; importer.parse(cnd); } @Test public void shouldImportCndThatUsesAllFeatures() throws RepositoryException { // importer.setDebug(true); String cnd = "<ex = 'http://namespace.com/ns'>\n" + "[ex:NodeType] > ex:ParentType1, ex:ParentType2 abstract orderable mixin noquery primaryitem ex:property\n" + "- ex:property (STRING) = 'default1', 'default2' mandatory autocreated protected multiple VERSION\n" + " queryops '=, <>, <, <=, >, >=, LIKE' nofulltext noqueryorder < 'constraint1', 'constraint2'" + "+ ex:node (ex:reqType1, ex:reqType2) = ex:defaultType mandatory autocreated protected sns version"; importer.importFrom(cnd, problems, "string"); if (!problems.isEmpty()) printProblems(); // Check the namespace ... context.getNamespaceRegistry().register("ex", "http://namespace.com/ns"); assertThat(importer.getNamespaces().size(), is(1)); NamespaceRegistry.Namespace ns = importer.getNamespaces().iterator().next(); assertThat(ns.getNamespaceUri(), is("http://namespace.com/ns")); List<NodeTypeDefinition> defns = importer.getNodeTypeDefinitions(); assertThat(defns.size(), is(1)); NodeTypeDefinition defn = defns.get(0); assertThat(defn.getName(), is("ex:NodeType")); assertThat(defn.isAbstract(), is(true)); assertThat(defn.hasOrderableChildNodes(), is(true)); assertThat(defn.isMixin(), is(true)); assertThat(defn.isQueryable(), is(false)); assertThat(defn.getPrimaryItemName(), is("ex:property")); String[] supertypeNames = defn.getDeclaredSupertypeNames(); assertThat(supertypeNames[0], is("ex:ParentType1")); assertThat(supertypeNames[1], is("ex:ParentType2")); PropertyDefinition[] propDefns = defn.getDeclaredPropertyDefinitions(); assertThat(propDefns.length, is(1)); PropertyDefinition propDefn = propDefns[0]; assertThat(propDefn.getName(), is("ex:property")); assertThat(propDefn.getRequiredType(), is(PropertyType.STRING)); assertThat(propDefn.isMandatory(), is(true)); assertThat(propDefn.isAutoCreated(), is(true)); assertThat(propDefn.isProtected(), is(true)); assertThat(propDefn.isMultiple(), is(true)); assertThat(propDefn.getOnParentVersion(), is(OnParentVersionAction.VERSION)); assertThat(propDefn.isFullTextSearchable(), is(false)); assertThat(propDefn.isQueryOrderable(), is(false)); Value[] defaultValues = propDefn.getDefaultValues(); assertThat(defaultValues[0].getString(), is("default1")); assertThat(defaultValues[1].getString(), is("default2")); String[] queryOps = propDefn.getAvailableQueryOperators(); assertThat(queryOps[0], is("=")); assertThat(queryOps[1], is("<>")); assertThat(queryOps[2], is("<")); assertThat(queryOps[3], is("<=")); assertThat(queryOps[4], is(">")); assertThat(queryOps[5], is(">=")); assertThat(queryOps[6], is("LIKE")); String[] constraints = propDefn.getValueConstraints(); assertThat(constraints[0], is("constraint1")); assertThat(constraints[1], is("constraint2")); NodeDefinition[] childDefns = defn.getDeclaredChildNodeDefinitions(); assertThat(childDefns.length, is(1)); NodeDefinition childDefn = childDefns[0]; assertThat(childDefn.getName(), is("ex:node")); assertThat(childDefn.getDefaultPrimaryTypeName(), is("ex:defaultType")); assertThat(childDefn.isMandatory(), is(true)); assertThat(childDefn.isAutoCreated(), is(true)); assertThat(childDefn.isProtected(), is(true)); assertThat(childDefn.allowsSameNameSiblings(), is(true)); assertThat(childDefn.getOnParentVersion(), is(OnParentVersionAction.VERSION)); String[] requiredTypeNames = childDefn.getRequiredPrimaryTypeNames(); assertThat(requiredTypeNames[0], is("ex:reqType1")); assertThat(requiredTypeNames[1], is("ex:reqType2")); } @Test public void shouldImportCndThatUsesExtensions() throws RepositoryException { // importer.setDebug(true); String cnd = "<ex = 'http://namespace.com/ns'>\n" + "[ex:NodeType] > ex:ParentType1, ex:ParentType2 abstract {mode:desc 'ex:NodeType description'} orderable mixin noquery primaryitem ex:property\n" + "- ex:property (STRING) = 'default1', 'default2' mandatory autocreated protected multiple VERSION\n" + " queryops '=, <>, <, <=, >, >=, LIKE' {mode:desc 'ex:property description'} {mode:altName Cool Property} nofulltext noqueryorder < 'constraint1', 'constraint2'" + "+ ex:node (ex:reqType1, ex:reqType2) = ex:defaultType {} mandatory autocreated protected sns version"; importer.importFrom(cnd, problems, "string"); // Check the namespace ... context.getNamespaceRegistry().register("ex", "http://namespace.com/ns"); assertThat(importer.getNamespaces().size(), is(1)); NamespaceRegistry.Namespace ns = importer.getNamespaces().iterator().next(); assertThat(ns.getNamespaceUri(), is("http://namespace.com/ns")); List<NodeTypeDefinition> defns = importer.getNodeTypeDefinitions(); assertThat(defns.size(), is(1)); NodeTypeDefinition defn = defns.get(0); assertThat(defn.getName(), is("ex:NodeType")); assertThat(defn.isAbstract(), is(true)); assertThat(defn.hasOrderableChildNodes(), is(true)); assertThat(defn.isMixin(), is(true)); assertThat(defn.isQueryable(), is(false)); assertThat(defn.getPrimaryItemName(), is("ex:property")); String[] supertypeNames = defn.getDeclaredSupertypeNames(); assertThat(supertypeNames[0], is("ex:ParentType1")); assertThat(supertypeNames[1], is("ex:ParentType2")); PropertyDefinition[] propDefns = defn.getDeclaredPropertyDefinitions(); assertThat(propDefns.length, is(1)); PropertyDefinition propDefn = propDefns[0]; assertThat(propDefn.getName(), is("ex:property")); assertThat(propDefn.getRequiredType(), is(PropertyType.STRING)); assertThat(propDefn.isMandatory(), is(true)); assertThat(propDefn.isAutoCreated(), is(true)); assertThat(propDefn.isProtected(), is(true)); assertThat(propDefn.isMultiple(), is(true)); assertThat(propDefn.getOnParentVersion(), is(OnParentVersionAction.VERSION)); assertThat(propDefn.isFullTextSearchable(), is(false)); assertThat(propDefn.isQueryOrderable(), is(false)); Value[] defaultValues = propDefn.getDefaultValues(); assertThat(defaultValues[0].getString(), is("default1")); assertThat(defaultValues[1].getString(), is("default2")); String[] queryOps = propDefn.getAvailableQueryOperators(); assertThat(queryOps[0], is("=")); assertThat(queryOps[1], is("<>")); assertThat(queryOps[2], is("<")); assertThat(queryOps[3], is("<=")); assertThat(queryOps[4], is(">")); assertThat(queryOps[5], is(">=")); assertThat(queryOps[6], is("LIKE")); String[] constraints = propDefn.getValueConstraints(); assertThat(constraints[0], is("constraint1")); assertThat(constraints[1], is("constraint2")); NodeDefinition[] childDefns = defn.getDeclaredChildNodeDefinitions(); assertThat(childDefns.length, is(1)); NodeDefinition childDefn = childDefns[0]; assertThat(childDefn.getName(), is("ex:node")); assertThat(childDefn.getDefaultPrimaryTypeName(), is("ex:defaultType")); assertThat(childDefn.isMandatory(), is(true)); assertThat(childDefn.isAutoCreated(), is(true)); assertThat(childDefn.isProtected(), is(true)); assertThat(childDefn.allowsSameNameSiblings(), is(true)); assertThat(childDefn.getOnParentVersion(), is(OnParentVersionAction.VERSION)); String[] requiredTypeNames = childDefn.getRequiredPrimaryTypeNames(); assertThat(requiredTypeNames[0], is("ex:reqType1")); assertThat(requiredTypeNames[1], is("ex:reqType2")); } @Test public void shouldImportCndThatIsOnOneLine() { String cnd = "<ns = 'http://namespace.com/ns'> " + "<ex = 'http://namespace.com/ex'>\n" + "[ns:NodeType] > ns:ParentType1, ns:ParentType2 abstract orderable mixin noquery primaryitem ex:property " + "- ex:property (STRING) = 'default1', 'default2' mandatory autocreated protected multiple VERSION < 'constraint1', 'constraint2' " + " queryops '=, <>, <, <=, >, >=, LIKE' nofulltext noqueryorder " + "+ ns:node (ns:reqType1, ns:reqType2) = ns:defaultType mandatory autocreated protected sns version"; importer.importFrom(cnd, problems, "string"); } @Test public void shouldImportCndThatHasNoChildren() { String cnd = "<ns = 'http://namespace.com/ns'>\n" + "<ex = 'http://namespace.com/ex'>\n" + "[ns:NodeType] > ns:ParentType1, ns:ParentType2 abstract orderable mixin noquery primaryitem ex:property\n" + "- ex:property (STRING) = 'default1', 'default2' mandatory autocreated protected multiple VERSION < 'constraint1', 'constraint2'\n" + " queryops '=, <>, <, <=, >, >=, LIKE' nofulltext noqueryorder"; importer.importFrom(cnd, problems, "string"); } @Test @FixFor( "MODE-1696" ) public void shouldRaiseProblemsImportingJcrBuiltinNodeTypesForJSR170() throws Exception { importer.importFrom(openCndFile("jcr-builtins-170.cnd"), problems); if (problems.size() != 0) printProblems(); registerImportedNamespaces(); //jsr 170 isn't supported, because of the MULTIPLE and PRIMARY attribute on properties assertThat(problems.size(), is(1)); } @Test public void shouldImportJcrBuiltinNodeTypesForJSR283() throws Exception { importer.importFrom(openCndFile("jcr-builtins-283-early-draft.cnd"), problems); if (problems.size() != 0) printProblems(); registerImportedNamespaces(); assertThat(problems.size(), is(0)); // [nt:base] // - jcr:primaryType (name) mandatory autocreated protected compute // - jcr:mixinTypes (name) protected multiple compute assertNodeType("nt:base", new String[] {"mode:defined"}, NO_PRIMARY_NAME, NodeOptions.Abstract, NodeOptions.Queryable); assertProperty("nt:base", "jcr:primaryType", "Name", NO_DEFAULTS, new PropertyOptions[] {PropertyOptions.Mandatory, PropertyOptions.Autocreated, PropertyOptions.Protected, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable}, OnParentVersion.Compute); assertProperty("nt:base", "jcr:mixinTypes", "Name", NO_DEFAULTS, new PropertyOptions[] {PropertyOptions.Multiple, PropertyOptions.Protected, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable}, OnParentVersion.Compute); // [nt:unstructured] // orderable // - * (undefined) multiple // - * (undefined) // + * (nt:base) = nt:unstructured multiple version assertNodeType("nt:unstructured", NO_SUPERTYPES, NO_PRIMARY_NAME, NodeOptions.Ordered, NodeOptions.Queryable); assertProperty("nt:unstructured", "*", "Undefined", NO_DEFAULTS, PropertyOptions.Multiple, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); // We should test for this, but we'd have to rewrite node() to look more like // RepositoryNodeTypeManager.findChildNodeDefinition // assertProperty("nt:unstructured", "*", "Undefined", NO_DEFAULTS); assertChild("nt:unstructured", "*", "nt:base", "nt:unstructured", OnParentVersion.Version, ChildOptions.Multiple, ChildOptions.Sns); // [mix:referenceable] // mixin // - jcr:uuid (string) mandatory autocreated protected initialize assertNodeType("mix:referenceable", NO_SUPERTYPES, NO_PRIMARY_NAME, NodeOptions.Mixin, NodeOptions.Queryable); assertProperty("mix:referenceable", "jcr:uuid", "String", NO_DEFAULTS, OnParentVersion.Initialize, PropertyOptions.Mandatory, PropertyOptions.Autocreated, PropertyOptions.Protected, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); // [mix:lockable] // mixin // - jcr:lockOwner (string) protected ignore // - jcr:lockIsDeep (boolean) protected ignore assertNodeType("mix:lockable", NO_SUPERTYPES, NO_PRIMARY_NAME, NodeOptions.Mixin, NodeOptions.Queryable); assertProperty("mix:lockable", "jcr:lockOwner", "String", NO_DEFAULTS, OnParentVersion.Ignore, PropertyOptions.Protected, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); assertProperty("mix:lockable", "jcr:lockIsDeep", "Boolean", NO_DEFAULTS, OnParentVersion.Ignore, PropertyOptions.Protected, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); // [nt:propertyDefinition] // - jcr:name (name) // - jcr:autoCreated (boolean) mandatory // - jcr:mandatory (boolean) mandatory // - jcr:onParentVersion (string) mandatory // < 'COPY', 'VERSION', 'INITIALIZE', 'COMPUTE', 'IGNORE', 'ABORT' // - jcr:protected (boolean) mandatory // - jcr:requiredType (string) mandatory // < 'STRING', 'BINARY', 'LONG', 'DOUBLE', 'BOOLEAN', 'DATE', 'NAME', 'PATH', 'REFERENCE', 'UNDEFINED' // - jcr:valueConstraints (string) multiple // - jcr:defaultValues (undefined) multiple // - jcr:multiple (boolean) mandatory assertNodeType("nt:propertyDefinition", NO_SUPERTYPES, NO_PRIMARY_NAME, NodeOptions.Queryable); assertProperty("nt:propertyDefinition", "jcr:name", "Name", NO_DEFAULTS, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); assertProperty("nt:propertyDefinition", "jcr:autoCreated", "Boolean", NO_DEFAULTS, PropertyOptions.Mandatory, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); assertProperty("nt:propertyDefinition", "jcr:mandatory", "Boolean", NO_DEFAULTS, PropertyOptions.Mandatory, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); assertProperty("nt:propertyDefinition", "jcr:onParentVersion", "String", NO_DEFAULTS, new PropertyOptions[] { PropertyOptions.Mandatory, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable}, null, new String[] { "COPY", "VERSION", "INITIALIZE", "COMPUTE", "IGNORE", "ABORT"}); assertProperty("nt:propertyDefinition", "jcr:protected", "Boolean", NO_DEFAULTS, PropertyOptions.Mandatory, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); assertProperty("nt:propertyDefinition", "jcr:requiredType", "String", NO_DEFAULTS, new PropertyOptions[] { PropertyOptions.Mandatory, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable}, null, new String[] { "STRING", "BINARY", "LONG", "DOUBLE", "BOOLEAN", "DATE", "NAME", "PATH", "REFERENCE", "UNDEFINED"}); assertProperty("nt:propertyDefinition", "jcr:valueConstraints", "String", NO_DEFAULTS, PropertyOptions.Multiple, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); assertProperty("nt:propertyDefinition", "jcr:defaultValues", "Undefined", NO_DEFAULTS, PropertyOptions.Multiple, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); assertProperty("nt:propertyDefinition", "jcr:multiple", "Boolean", NO_DEFAULTS, PropertyOptions.Mandatory, PropertyOptions.FullTextSearchable, PropertyOptions.QueryOrderable); } @Test public void shouldImportBuiltInNodeTypes() throws Exception { importer.importBuiltIns(problems); if (problems.size() != 0) printProblems(); assertThat(problems.size(), is(0)); // Verify a select few from the JCR and ModeShape builtin types ... registerImportedNamespaces(); assertNodeType("nt:base", new String[] {}, NO_PRIMARY_NAME, NodeOptions.Abstract, NodeOptions.Queryable); assertNodeType("mode:root", new String[] {"nt:base", "mix:referenceable"}, NO_PRIMARY_NAME, NodeOptions.Queryable, NodeOptions.Ordered); } @Test public void shouldImportCndThatIsEmpty() throws Exception { importer.importFrom(openCndFile("empty.cnd"), problems); if (problems.size() != 0) printProblems(); assertThat(problems.size(), is(0)); } @Test public void shouldImportCndForImageSequencer() throws Exception { importer.importFrom(openCndFile("images.cnd"), problems); if (problems.size() != 0) printProblems(); assertThat(problems.size(), is(0)); } @Test public void shouldImportCndForMp3Sequencer() throws Exception { importer.importFrom(openCndFile("mp3.cnd"), problems); if (problems.size() != 0) printProblems(); assertThat(problems.size(), is(0)); } @Test public void shouldImportCndForTeiidSequencer() throws Exception { importer.importFrom(openCndFile("teiid.cnd"), problems); if (problems.size() != 0) printProblems(); registerImportedNamespaces(); assertThat(problems.size(), is(0)); assertNodeType("relational:catalog", new String[] {"nt:unstructured", "relational:relationalEntity"}, NO_PRIMARY_NAME, NodeOptions.Queryable, NodeOptions.Ordered); } @Test public void shouldNotImportFileThatIsNotAValidCnd() throws Exception { importer.importFrom(openCndFile("invalid.cnd"), problems); assertThat(problems.size(), is(1)); } @Test public void shouldImportCndForAircraft() throws Exception { importer.importFrom(openCndFile("aircraft.cnd"), problems); if (problems.size() != 0) printProblems(); assertThat(problems.size(), is(0)); } @Test public void shouldImportCndForCars() throws Exception { importer.importFrom(openCndFile("cars.cnd"), problems); if (problems.size() != 0) printProblems(); assertThat(problems.size(), is(0)); } @Test public void shouldImportCndForJavaSequencer() throws Exception { importer.importFrom(openCndFile("javaSource.cnd"), problems); if (problems.size() != 0) printProblems(); assertThat(problems.size(), is(0)); } protected void registerImportedNamespaces() { for (NamespaceRegistry.Namespace ns : importer.getNamespaces()) { context.getNamespaceRegistry().register(ns.getPrefix(), ns.getNamespaceUri()); } } public static final String[] NO_DEFAULTS = {}; public static final String[] NO_SUPERTYPES = {}; public static final String[] NO_VALUE_CONSTRAINTS = {}; public static final String NO_PRIMARY_NAME = null; public static enum PropertyOptions { Mandatory, Autocreated, Protected, Multiple, FullTextSearchable, QueryOrderable } public static enum ChildOptions { Mandatory, Autocreated, Protected, Multiple, Sns } public static enum NodeOptions { Abstract, Mixin, Ordered, Queryable } public static enum OnParentVersion { Copy, Version, Initialize, Compute, Ignore, Abort } protected int opv( OnParentVersion onParentVersioning ) { int opv = OnParentVersionAction.COPY; if (onParentVersioning != null) { switch (onParentVersioning) { case Abort: opv = OnParentVersionAction.ABORT; break; case Compute: opv = OnParentVersionAction.COMPUTE; break; case Copy: opv = OnParentVersionAction.COPY; break; case Ignore: opv = OnParentVersionAction.IGNORE; break; case Initialize: opv = OnParentVersionAction.INITIALIZE; break; case Version: opv = OnParentVersionAction.VERSION; break; } } return opv; } protected NodeTypeDefinition defn( String name ) { NodeTypeDefinition result = null; for (NodeTypeDefinition defn : importer.getNodeTypeDefinitions()) { if (defn.getName().equals(name)) { result = defn; break; } } assertThat("Failed to find node type definition \"" + name + "\"", result, is(notNullValue())); return result; } protected PropertyDefinition propDefn( NodeTypeDefinition nodeType, String name ) { for (PropertyDefinition defn : nodeType.getDeclaredPropertyDefinitions()) { if (defn.getName().equals(name)) return defn; } assertThat("Failed to find property type definition \"" + name + "\"", false, is(true)); return null; } protected NodeDefinition childDefn( NodeTypeDefinition nodeType, String name ) { for (NodeDefinition defn : nodeType.getDeclaredChildNodeDefinitions()) { if (defn.getName().equals(name)) return defn; } assertThat("Failed to find child node definition \"" + name + "\"", false, is(true)); return null; } protected void assertNodeType( String name, String[] superTypes, String primaryItemName, NodeOptions... nodeOptions ) { Set<NodeOptions> options = new HashSet<NodeOptions>(); for (NodeOptions option : nodeOptions) options.add(option); NodeTypeDefinition defn = defn(name); assertThat(defn.getName(), is(name)); assertThat(defn.isAbstract(), is(options.contains(NodeOptions.Abstract))); assertThat(defn.hasOrderableChildNodes(), is(options.contains(NodeOptions.Ordered))); assertThat(defn.isMixin(), is(options.contains(NodeOptions.Mixin))); assertThat(defn.isQueryable(), is(options.contains(NodeOptions.Queryable))); assertThat(defn.getPrimaryItemName(), is(primaryItemName)); String[] supertypeNames = defn.getDeclaredSupertypeNames(); assertThat(supertypeNames, is(superTypes)); } protected void assertProperty( String nodeTypeName, String propertyName, String requiredType, String[] defaultValues, PropertyOptions... propertyOptions ) throws RepositoryException { assertProperty(nodeTypeName, propertyName, requiredType, defaultValues, propertyOptions, null); } protected void assertProperty( String nodeTypeName, String propertyName, String requiredType, String[] defaultValues, OnParentVersion onParentVersion, PropertyOptions... propertyOptions ) throws RepositoryException { assertProperty(nodeTypeName, propertyName, requiredType, defaultValues, propertyOptions, onParentVersion); } protected int jcrPropertyType( String typeName ) { org.modeshape.jcr.value.PropertyType type = org.modeshape.jcr.value.PropertyType.valueFor(typeName.toLowerCase()); return PropertyTypeUtil.jcrPropertyTypeFor(type); } protected void assertProperty( String nodeTypeName, String propertyName, String requiredType, String[] defaultValues, PropertyOptions[] propertyOptions, OnParentVersion onParentVersioning, String... valueConstraints ) throws RepositoryException { Set<PropertyOptions> options = new HashSet<PropertyOptions>(); for (PropertyOptions option : propertyOptions) options.add(option); NodeTypeDefinition defn = defn(nodeTypeName); PropertyDefinition propDefn = propDefn(defn, propertyName); assertThat(propDefn.getName(), is(propertyName)); assertThat(propDefn.getRequiredType(), is(jcrPropertyType(requiredType))); assertThat(propDefn.isMandatory(), is(options.contains(PropertyOptions.Mandatory))); assertThat(propDefn.isAutoCreated(), is(options.contains(PropertyOptions.Autocreated))); assertThat(propDefn.isProtected(), is(options.contains(PropertyOptions.Protected))); assertThat(propDefn.isMultiple(), is(options.contains(PropertyOptions.Multiple))); assertThat(propDefn.isFullTextSearchable(), is(options.contains(PropertyOptions.FullTextSearchable))); assertThat(propDefn.isQueryOrderable(), is(options.contains(PropertyOptions.QueryOrderable))); int opv = opv(onParentVersioning); assertThat(propDefn.getOnParentVersion(), is(opv)); if (defaultValues == null || defaultValues.length == 0) { assertThat(propDefn.getDefaultValues(), is(nullValue())); } else { int i = 0; for (Value defaultValue : propDefn.getDefaultValues()) { assertThat(defaultValues[i++], is(defaultValue.getString())); } } if (valueConstraints == null || valueConstraints.length == 0) { assertThat(propDefn.getValueConstraints(), is(nullValue())); } else { assertThat(propDefn.getValueConstraints(), is(valueConstraints)); } } protected void assertChild( String nodeTypeName, String childName, String requiredType, String defaultPrimaryType, ChildOptions[] childOptions, OnParentVersion onParentVersioning ) { assertChild(nodeTypeName, childName, new String[] {requiredType}, defaultPrimaryType, childOptions, onParentVersioning); } protected void assertChild( String nodeTypeName, String childName, String requiredType, String defaultPrimaryType, OnParentVersion onParentVersioning, ChildOptions... childOptions ) { assertChild(nodeTypeName, childName, new String[] {requiredType}, defaultPrimaryType, childOptions, onParentVersioning); } protected void assertChild( String nodeTypeName, String childName, String[] requiredTypes, String defaultPrimaryType, ChildOptions[] childOptions, OnParentVersion onParentVersioning ) { Set<ChildOptions> options = new HashSet<ChildOptions>(); for (ChildOptions option : childOptions) options.add(option); NodeTypeDefinition defn = defn(nodeTypeName); NodeDefinition childDefn = childDefn(defn, childName); assertThat(childDefn.getName(), is(childName)); assertThat(childDefn.getDefaultPrimaryTypeName(), is(defaultPrimaryType)); assertThat(childDefn.isMandatory(), is(options.contains(ChildOptions.Mandatory))); assertThat(childDefn.isAutoCreated(), is(options.contains(ChildOptions.Autocreated))); assertThat(childDefn.isProtected(), is(options.contains(ChildOptions.Protected))); assertThat(childDefn.allowsSameNameSiblings(), is(options.contains(ChildOptions.Sns))); assertThat(childDefn.getOnParentVersion(), is(opv(onParentVersioning))); assertThat(childDefn.getRequiredPrimaryTypeNames(), is(requiredTypes)); } }
package org.wikipedia.login; import android.accounts.AccountAuthenticatorResponse; import android.accounts.AccountManager; import android.app.ProgressDialog; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.support.annotation.NonNull; import android.support.annotation.Nullable; import android.support.design.widget.PasswordTextInput; import android.view.KeyEvent; import android.view.MenuItem; import android.view.View; import android.view.inputmethod.EditorInfo; import android.widget.EditText; import android.widget.TextView; import org.wikipedia.NonEmptyValidator; import org.wikipedia.R; import org.wikipedia.WikipediaApp; import org.wikipedia.activity.ActivityUtil; import org.wikipedia.activity.ThemedActionBarActivity; import org.wikipedia.analytics.LoginFunnel; import org.wikipedia.auth.AccountUtil; import org.wikipedia.createaccount.CreateAccountActivity; import org.wikipedia.util.FeedbackUtil; import org.wikipedia.util.log.L; import static org.wikipedia.util.DeviceUtil.hideSoftKeyboard; public class LoginActivity extends ThemedActionBarActivity { public static final int RESULT_LOGIN_SUCCESS = 1; public static final int RESULT_LOGIN_FAIL = 2; public static final String LOGIN_REQUEST_SOURCE = "login_request_source"; public static final String EDIT_SESSION_TOKEN = "edit_session_token"; public static final String ACTION_CREATE_ACCOUNT = "action_create_account"; private EditText usernameText; private EditText passwordText; private View loginButton; private ProgressDialog progressDialog; private LoginFunnel funnel; private String loginSource; private LoginClient loginClient; private boolean wentStraightToCreateAccount; public static Intent newIntent(@NonNull Context context, @NonNull String source) { return newIntent(context, source, null); } public static Intent newIntent(@NonNull Context context, @NonNull String source, @Nullable String token) { return new Intent(context, LoginActivity.class) .putExtra(LOGIN_REQUEST_SOURCE, source) .putExtra(EDIT_SESSION_TOKEN, token); } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_wiki_login); usernameText = (EditText) findViewById(R.id.login_username_text); passwordText = ((PasswordTextInput) findViewById(R.id.login_password_input)).getEditText(); View createAccountLink = findViewById(R.id.login_create_account_link); // Don't allow user to attempt login until they've put in a username and password new NonEmptyValidator(new NonEmptyValidator.ValidationChangedCallback() { @Override public void onValidationChanged(boolean isValid) { loginButton.setEnabled(isValid); } }, usernameText, passwordText); passwordText.setOnEditorActionListener(new TextView.OnEditorActionListener() { @Override public boolean onEditorAction(TextView textView, int actionId, KeyEvent keyEvent) { if (actionId == EditorInfo.IME_ACTION_DONE) { doLogin(); return true; } return false; } }); loginButton = findViewById(R.id.login_button); loginButton.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { doLogin(); } }); createAccountLink.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View view) { startCreateAccountActivity(); } }); progressDialog = new ProgressDialog(this); progressDialog.setMessage(getString(R.string.login_in_progress_dialog_message)); progressDialog.setCancelable(false); funnel = new LoginFunnel(WikipediaApp.getInstance()); loginSource = getIntent().getStringExtra(LOGIN_REQUEST_SOURCE); if (getIntent().getBooleanExtra(ACTION_CREATE_ACCOUNT, false)) { wentStraightToCreateAccount = true; startCreateAccountActivity(); } else if (savedInstanceState == null) { // Only send the login start log event if the activity is created for the first time logLoginStart(); } // Assume no login by default setResult(RESULT_LOGIN_FAIL); } public void showPrivacyPolicy(View v) { FeedbackUtil.showPrivacyPolicy(this); } @Override protected void setTheme() { setActionBarTheme(); } private void logLoginStart() { if (loginSource.equals(LoginFunnel.SOURCE_EDIT)) { funnel.logStart( LoginFunnel.SOURCE_EDIT, getIntent().getStringExtra(EDIT_SESSION_TOKEN) ); } else { funnel.logStart(loginSource); } } private void startCreateAccountActivity() { funnel.logCreateAccountAttempt(); Intent intent = new Intent(this, CreateAccountActivity.class); intent.putExtra(CreateAccountActivity.LOGIN_SESSION_TOKEN, funnel.getSessionToken()); intent.putExtra(CreateAccountActivity.LOGIN_REQUEST_SOURCE, loginSource); startActivityForResult(intent, CreateAccountActivity.ACTION_CREATE_ACCOUNT); } @Override protected void onActivityResult(int requestCode, int resultCode, Intent data) { super.onActivityResult(requestCode, resultCode, data); if (requestCode == CreateAccountActivity.ACTION_CREATE_ACCOUNT) { if (wentStraightToCreateAccount) { logLoginStart(); } if (resultCode == CreateAccountActivity.RESULT_ACCOUNT_CREATED) { usernameText.setText(data.getStringExtra("username")); passwordText.setText(data.getStringExtra("password")); funnel.logCreateAccountSuccess(); FeedbackUtil.showMessage(this, R.string.create_account_account_created_toast); doLogin(); } else { funnel.logCreateAccountFailure(); } } } private void doLogin() { final String username = usernameText.getText().toString(); final String password = passwordText.getText().toString(); if (loginClient == null) { loginClient = new LoginClient(); } progressDialog.show(); loginClient.request(WikipediaApp.getInstance().getWikiSite(), username, password, new LoginClient.LoginCallback() { @Override public void success(@NonNull LoginResult result) { if (!progressDialog.isShowing()) { // no longer attached to activity! return; } progressDialog.dismiss(); if (result.pass()) { funnel.logSuccess(); Bundle extras = getIntent().getExtras(); AccountAuthenticatorResponse response = extras == null ? null : extras.<AccountAuthenticatorResponse>getParcelable(AccountManager.KEY_ACCOUNT_AUTHENTICATOR_RESPONSE); AccountUtil.createAccount(response, username, password); hideSoftKeyboard(LoginActivity.this); setResult(RESULT_LOGIN_SUCCESS); finish(); } else if (result.fail()) { funnel.logError(result.getMessage()); handleError(result.getMessage()); } } @Override public void error(@NonNull Throwable caught) { if (!progressDialog.isShowing()) { // no longer attached to activity! return; } progressDialog.dismiss(); FeedbackUtil.showError(LoginActivity.this, caught); } }); } @Override public boolean onOptionsItemSelected(MenuItem item) { return ActivityUtil.defaultOnOptionsItemSelected(this, item) || super.onOptionsItemSelected(item); } @Override public void onBackPressed() { hideSoftKeyboard(this); super.onBackPressed(); } private void handleError(String message) { FeedbackUtil.showMessage(this, message); L.e("Login failed with result " + message); } @Override public void onStop() { if (progressDialog.isShowing()) { progressDialog.dismiss(); } super.onStop(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putBoolean("loginShowing", true); } }
package com.planet_ink.coffee_mud.Commands; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.core.collections.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Libraries.interfaces.DatabaseEngine; import com.planet_ink.coffee_mud.Libraries.interfaces.JournalsLibrary; import com.planet_ink.coffee_mud.Libraries.interfaces.DatabaseEngine.PlayerData; import com.planet_ink.coffee_mud.Libraries.interfaces.JournalsLibrary.CommandJournalFlags; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import com.planet_ink.coffee_mud.core.exceptions.HTTPRedirectException; import java.util.*; /* Copyright 2004-2015 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings({"unchecked","rawtypes"}) public class MOTD extends StdCommand { public MOTD(){} private final String[] access=I(new String[]{"MOTD","NEWS"}); @Override public String[] getAccessWords(){return access;} private static Vector<String> DEFAULT_CMD=new ReadOnlyVector<String>(new String[]{"MOTD","AGAIN"}); @Override public boolean execute(MOB mob, Vector commands, int metaFlags) throws java.io.IOException { boolean pause=false; String what="MOTD"; if((commands!=null)&&(commands.size()>0)) { final String firstWord=((String)commands.get(0)).toUpperCase(); if(CMParms.indexOf(this.getAccessWords(), firstWord)>0) what=firstWord; if((((String)commands.lastElement()).equalsIgnoreCase("PAUSE"))) { pause = true; commands.remove(commands.size()-1); } if(commands.size()==1) { commands.add("AGAIN"); } } else commands=DEFAULT_CMD; final String parm=CMParms.combine(commands,1); if((mob.playerStats()!=null) &&(parm.equalsIgnoreCase("AGAIN")||parm.equalsIgnoreCase("NEW"))) { final StringBuffer buf=new StringBuffer(""); try { String msg = new CMFile(Resources.buildResourcePath("text")+"motd.txt",null).text().toString(); if(msg.length()>0) { if(msg.startsWith("<cmvp>")) msg=new String(CMLib.webMacroFilter().virtualPageFilter(msg.substring(6).getBytes())); buf.append(msg+"\n\r--------------------------------------\n\r"); } final List<JournalsLibrary.JournalEntry> journal=new LinkedList<JournalsLibrary.JournalEntry>(); journal.addAll(CMLib.database().DBReadJournalMsgs("CoffeeMud News")); // deprecated journal.addAll(CMLib.database().DBReadJournalMsgs("SYSTEM_NEWS")); for(int which=0;which<journal.size();which++) { final JournalsLibrary.JournalEntry entry=journal.get(which); final String from=entry.from; final long last=entry.date; String to=entry.to; final String subject=entry.subj; String message=entry.msg; final long compdate=entry.update; if(compdate>mob.playerStats().getLastDateTime()) { boolean allMine=to.equalsIgnoreCase(mob.Name()) ||from.equalsIgnoreCase(mob.Name()); if(to.toUpperCase().trim().startsWith("MASK=")&&CMLib.masking().maskCheck(to.trim().substring(5),mob,true)) { allMine=true; to=CMLib.masking().maskDesc(to.trim().substring(5),true); } if(to.equalsIgnoreCase("ALL")||allMine) { if(message.startsWith("<cmvp>")) message=new String(CMLib.webMacroFilter().virtualPageFilter(message.substring(6).getBytes())); buf.append("\n\rNews: "+CMLib.time().date2String(last)+"\n\rFROM: "+CMStrings.padRight(from,15)+"\n\rTO : "+CMStrings.padRight(to,15)+"\n\rSUBJ: "+subject+"\n\r"+message); buf.append("\n\r--------------------------------------\n\r"); } } } final Vector postalChains=new Vector(); final Vector postalBranches=new Vector(); PostOffice P=null; for(final Enumeration e=CMLib.map().postOffices();e.hasMoreElements();) { P=(PostOffice)e.nextElement(); if(!postalChains.contains(P.postalChain())) postalChains.add(P.postalChain()); if(!postalBranches.contains(P.postalBranch())) postalBranches.add(P.postalBranch()); } if((postalChains.size()>0)&&(P!=null)) { List<PlayerData> V=CMLib.database().DBReadData(mob.Name(),postalChains); final Map<PostOffice,int[]> res=getPostalResults(V,mob.playerStats().getLastDateTime()); for(final Iterator<PostOffice> e=res.keySet().iterator();e.hasNext();) { P=e.next(); final int[] ct=res.get(P); buf.append("\n\r"+report("You have",P,ct)); } final Map<PostOffice,int[]> res2=new Hashtable(); for(final Pair<Clan,Integer> clanPair : CMLib.clans().findPrivilegedClans(mob, Clan.Function.WITHDRAW)) { if(clanPair!=null) { final Clan C=clanPair.first; if(C.getAuthority(clanPair.second.intValue(),Clan.Function.WITHDRAW)!=Clan.Authority.CAN_NOT_DO) { V=CMLib.database().DBReadData(C.name(),postalChains); if(V.size()>0) { res2.putAll(getPostalResults(V,mob.playerStats().getLastDateTime())); } } for(final Iterator<PostOffice> e=res2.keySet().iterator();e.hasNext();) { P=e.next(); final int[] ct=res2.get(P); buf.append("\n\r"+report("Your "+C.getGovernmentName()+" "+C.getName()+" has",P,ct)); } } } if((res.size()>0)||(res2.size()>0)) buf.append("\n\r--------------------------------------\n\r"); } final Vector<JournalsLibrary.CommandJournal> myEchoableCommandJournals=new Vector<JournalsLibrary.CommandJournal>(); for(final Enumeration<JournalsLibrary.CommandJournal> e=CMLib.journals().commandJournals();e.hasMoreElements();) { final JournalsLibrary.CommandJournal CMJ=e.nextElement(); if((CMJ.getFlag(JournalsLibrary.CommandJournalFlags.ADMINECHO)!=null) &&((CMSecurity.isJournalAccessAllowed(mob,CMJ.NAME())) ||CMSecurity.isAllowed(mob,mob.location(),CMSecurity.SecFlag.LISTADMIN))) myEchoableCommandJournals.add(CMJ); } boolean CJseparator=false; for(int cj=0;cj<myEchoableCommandJournals.size();cj++) { final JournalsLibrary.CommandJournal CMJ=myEchoableCommandJournals.get(cj); final List<JournalsLibrary.JournalEntry> items=CMLib.database().DBReadJournalMsgs("SYSTEM_"+CMJ.NAME()+"S"); if(items!=null) for(int i=0;i<items.size();i++) { final JournalsLibrary.JournalEntry entry=items.get(i); final String from=entry.from; final String message=entry.msg; final long compdate=entry.update; if(compdate>mob.playerStats().getLastDateTime()) { buf.append("\n\rNEW "+CMJ.NAME()+" from "+from+": "+message+"\n\r"); CJseparator=true; } } } if(CJseparator) buf.append("\n\r--------------------------------------\n\r"); if((!mob.isAttribute(MOB.Attrib.AUTOFORWARD)) &&(CMProps.getVar(CMProps.Str.MAILBOX).length()>0)) { final List<JournalsLibrary.JournalEntry> msgs=CMLib.database().DBReadJournalMsgs(CMProps.getVar(CMProps.Str.MAILBOX)); int mymsgs=0; for(int num=0;num<msgs.size();num++) { final JournalsLibrary.JournalEntry thismsg=msgs.get(num); final String to=thismsg.to; if(to.equalsIgnoreCase("all") ||to.equalsIgnoreCase(mob.Name()) ||(to.toUpperCase().trim().startsWith("MASK=")&&CMLib.masking().maskCheck(to.trim().substring(5),mob,true))) mymsgs++; } if(mymsgs>0) buf.append(L("\n\r^ZYou have mail waiting. Enter 'EMAIL BOX' to read.^?^.\n\r")); } if((CMSecurity.isAllowed(mob, mob.location(), CMSecurity.SecFlag.CMDPLAYERS)) &&(CMProps.getBoolVar(CMProps.Bool.ACCOUNTEXPIRATION))) { final List<String> l=CMLib.login().getExpiredList(); if(l.size()>0) { buf.append(L("\n\r^XThere are currently @x1 expired "+((CMProps.getIntVar(CMProps.Int.COMMONACCOUNTSYSTEM)>1)?"accounts":"characters"),""+l.size())); buf.append(L(". Enter LIST EXPIRED to view them.^?^.\n\r")); } } final List<Quest> qQVec=CMLib.quests().getPlayerPersistantQuests(mob); if(mob.session()!=null) if(buf.length()>0) { if(qQVec.size()>0) buf.append(L("\n\r^HYou are on @x1 quest(s). Enter QUESTS to see them!.^?^.\n\r",""+qQVec.size())); mob.session().wraplessPrintln("\n\r--------------------------------------\n\r"+buf.toString()); if(pause){ mob.session().prompt(L("\n\rPress ENTER: "),10000); mob.session().println("\n\r");} } else if(qQVec.size()>0) buf.append(L("\n\r^HYou are on @x1 quest(s). Enter QUESTS to see them!.^?^.\n\r",""+qQVec.size())); else if(CMParms.combine(commands,1).equalsIgnoreCase("AGAIN")) mob.session().println(L("No @x1 to re-read.",what)); } catch(final HTTPRedirectException e){} return false; } if(parm.equalsIgnoreCase("ON")) { if(mob.isAttribute(MOB.Attrib.DAILYMESSAGE)) { mob.setAttribute(MOB.Attrib.DAILYMESSAGE,false); mob.tell(L("The daily messages have been turned on.")); } else { mob.tell(L("The daily messages are already on.")); } } else if(parm.equalsIgnoreCase("OFF")) { if(!mob.isAttribute(MOB.Attrib.DAILYMESSAGE)) { mob.setAttribute(MOB.Attrib.DAILYMESSAGE,true); mob.tell(L("The daily messages have been turned off.")); } else { mob.tell(L("The daily messages are already off.")); } } else { mob.tell(L("'@x1' is not a valid parameter. Try ON, OFF, or AGAIN.",parm)); } return false; } private String report(String whom, PostOffice P, int[] ct) { String branchName=P.postalBranch(); if((P instanceof MOB)&&(((MOB)P).getStartRoom()!=null)) branchName=((MOB)P).getStartRoom().getArea().Name(); else { final int x=branchName.indexOf('#'); if(x>=0) branchName=branchName.substring(0,x); } if(ct[0]>0) return whom+" "+ct[0]+" new of "+ct[1]+" items at the "+branchName+" branch of the "+P.postalChain()+" post office."; return whom+" "+ct[1]+" items still waiting at the "+branchName+" branch of the "+P.postalChain()+" post office."; } private Map<PostOffice,int[]> getPostalResults(List<PlayerData> mailData, long newTimeDate) { final Hashtable<PostOffice,int[]> results=new Hashtable<PostOffice,int[]>(); PostOffice P=null; for(int i=0;i<mailData.size();i++) { final DatabaseEngine.PlayerData letter=mailData.get(i); final String chain=letter.section; String branch=letter.key; final int x=branch.indexOf(';'); if(x<0) continue; branch=branch.substring(0,x); P=CMLib.map().getPostOffice(chain,branch); if(P==null) continue; final PostOffice.MailPiece pieces=P.parsePostalItemData(letter.xml); int[] ct=results.get(P); if(ct==null) { ct=new int[2]; results.put(P,ct); } ct[1]++; if(CMath.s_long(pieces.time)>newTimeDate) ct[0]++; } return results; } @Override public boolean canBeOrdered(){return true;} }
/* * Copyright (C) 2009 JavaRosa * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package org.javarosa.xform.schema; import java.util.Hashtable; import java.util.List; import org.javarosa.core.model.Constants; import org.javarosa.core.model.FormDef; import org.javarosa.core.model.IFormElement; import org.javarosa.core.model.ItemsetBinding; import org.javarosa.core.model.QuestionDef; import org.javarosa.core.model.SelectChoice; import org.javarosa.core.model.instance.TreeElement; import org.javarosa.core.model.instance.TreeReference; import org.kxml2.kdom.Document; import org.kxml2.kdom.Element; import org.kxml2.kdom.Node; public class InstanceSchema { private static Hashtable choiceTypeMapping; public static Document generateInstanceSchema (FormDef f) { init(); Element schema = new Element(); schema.setName("schema"); schema.setNamespace("http://www.w3.org/2001/XMLSchema"); schema.setPrefix("", "http://www.w3.org/2001/XMLSchema"); schema.setPrefix("jr", "http://openrosa.org/javarosa"); if (f.getInstance().schema != null) { schema.setAttribute(null, "targetNamespace", f.getInstance().schema); } else { System.err.println("Warning: instance has no schema"); } schema.setAttribute(null, "elementFormDefault", "qualified"); String formVersion = f.getInstance().formVersion; String uiVersion = f.getInstance().uiVersion; if (formVersion != null) schema.setAttribute(null, "version", formVersion); if (uiVersion != null) schema.setAttribute(null, "uiVersion", uiVersion); processSelectChoices(schema, f, f); schema.addChild(Node.ELEMENT, schemizeInstance(f.getInstance().getRoot())); Document schemaXML = new Document(); schemaXML.addChild(Node.ELEMENT, schema); return schemaXML; } private static void init () { choiceTypeMapping = new Hashtable(); } private static Element schemizeInstance (TreeElement node) { String name = node.getName(); boolean terminal = node.isLeaf(); boolean repeatable = node.isRepeatable(); if (repeatable && node.getMult() != TreeReference.INDEX_TEMPLATE) { return null; } Element e = new Element(); e.setName("element"); e.setAttribute(null, "name", name); e.setAttribute(null, "minOccurs", "0"); //technically only needed if node has a 'relevant' attribute bound to it, but no easy way to tell if (repeatable) { e.setAttribute(null, "maxOccurs", "unbounded"); } if (!terminal) { Element ct = new Element(); ct.setName("complexType"); e.addChild(Node.ELEMENT, ct); Element seq = new Element(); seq.setName("sequence"); ct.addChild(Node.ELEMENT, seq); for (int i = 0; i < node.getNumChildren(); i++) { Element child = schemizeInstance((TreeElement)node.getChildAt(i)); if (child != null) { seq.addChild(Node.ELEMENT, child); } } } else { String type; switch (node.getDataType()) { case Constants.DATATYPE_NULL: case Constants.DATATYPE_TEXT: type = "string"; break; case Constants.DATATYPE_INTEGER: type = "integer"; break; case Constants.DATATYPE_LONG: type = "long"; break; case Constants.DATATYPE_DECIMAL: type = "decimal"; break; case Constants.DATATYPE_BOOLEAN: type = "boolean"; break; case Constants.DATATYPE_DATE: type = "date"; break; case Constants.DATATYPE_DATE_TIME: type = "dateTime"; break; case Constants.DATATYPE_TIME: type = "time"; break; case Constants.DATATYPE_CHOICE: case Constants.DATATYPE_CHOICE_LIST: type = (String)choiceTypeMapping.get(node); if (type == null) { System.err.println("can't find choices for select-type question [" + node.getName() + "]"); } break; case Constants.DATATYPE_GEOPOINT: type = "jr:geopoint"; break; case Constants.DATATYPE_GEOSHAPE: type = "jr:geoshape"; break; case Constants.DATATYPE_GEOTRACE: type = "jr:geotrace"; break; default: type = null; System.err.println("unrecognized type [" + node.getDataType() + ";" + node.getName() + "]"); break; } if (type != null) { e.setAttribute(null, "type", type); } } return e; } private static void processSelectChoices (Element e, IFormElement fe, FormDef form) { if (fe instanceof QuestionDef) { QuestionDef q = (QuestionDef)fe; int controlType = q.getControlType(); TreeReference ref = (TreeReference)q.getBind().getReference(); if (controlType == Constants.CONTROL_SELECT_ONE || controlType == Constants.CONTROL_SELECT_MULTI) { String choiceTypeName = getChoiceTypeName(ref); List<SelectChoice> choices; //Figure out the choices involved if they are complex ItemsetBinding itemset = q.getDynamicChoices(); if (itemset != null) { form.populateDynamicChoices(itemset, ref); choices = itemset.getChoices(); } else { //static choices choices = q.getChoices(); } writeChoices(e, choiceTypeName, choices); if (controlType == Constants.CONTROL_SELECT_MULTI) { writeListType(e, choiceTypeName); } choiceTypeMapping.put(form.getInstance().getTemplatePath(ref), (controlType == Constants.CONTROL_SELECT_MULTI ? "list." : "") + choiceTypeName); } } else { for (int i = 0; i < fe.getChildren().size(); i++) { processSelectChoices(e, fe.getChild(i), form); } } } private static String getChoiceTypeName (TreeReference ref) { return ref.toString(false).replace('/', '_'); } private static void writeChoices (Element e, String typeName, List<SelectChoice> choices) { Element st = new Element(); st.setName("simpleType"); st.setAttribute(null, "name", typeName); e.addChild(Node.ELEMENT, st); Element restr = new Element(); restr.setName("restriction"); restr.setAttribute(null, "base", "string"); st.addChild(Node.ELEMENT, restr); for (int i = 0; i < choices.size(); i++) { String value = choices.get(i).getValue(); Element choice = new Element(); choice.setName("enumeration"); choice.setAttribute(null, "value", value); restr.addChild(Node.ELEMENT, choice); } } private static void writeListType (Element e, String typeName) { Element st = new Element(); st.setName("simpleType"); st.setAttribute(null, "name", "list." + typeName); e.addChild(Node.ELEMENT, st); Element list = new Element(); list.setName("list"); list.setAttribute(null, "itemType", typeName); st.addChild(Node.ELEMENT, list); } }
/* This file was generated by SableCC's ObjectMacro. */ package org.sablecc.objectmacro.codegeneration.java.macro; import java.util.*; public class MNewDirective extends Macro { private DSeparator DirectiveNameSeparator; private DBeforeFirst DirectiveNameBeforeFirst; private DAfterLast DirectiveNameAfterLast; private DNone DirectiveNameNone; final List<String> list_DirectiveName; final Context DirectiveNameContext = new Context(); final StringValue DirectiveNameValue; private DSeparator IndexBuilderSeparator; private DBeforeFirst IndexBuilderBeforeFirst; private DAfterLast IndexBuilderAfterLast; private DNone IndexBuilderNone; final List<String> list_IndexBuilder; final Context IndexBuilderContext = new Context(); final StringValue IndexBuilderValue; private DSeparator TextPartsSeparator; private DBeforeFirst TextPartsBeforeFirst; private DAfterLast TextPartsAfterLast; private DNone TextPartsNone; final List<Macro> list_TextParts; final Context TextPartsContext = new Context(); final MacroValue TextPartsValue; private Map<Context, StringValue> list_ParamName = new LinkedHashMap<>(); MNewDirective( Macros macros) { setMacros(macros); this.list_DirectiveName = new LinkedList<>(); this.list_IndexBuilder = new LinkedList<>(); this.list_TextParts = new LinkedList<>(); this.list_ParamName = new LinkedHashMap<>(); this.DirectiveNameValue = new StringValue(this.list_DirectiveName, this.DirectiveNameContext); this.IndexBuilderValue = new StringValue(this.list_IndexBuilder, this.IndexBuilderContext); this.TextPartsValue = new MacroValue(this.list_TextParts, this.TextPartsContext); } MNewDirective( String pDirectiveName, String pIndexBuilder, List<Macro> pTextParts, Macros macros) { setMacros(macros); this.list_DirectiveName = new LinkedList<>(); this.list_IndexBuilder = new LinkedList<>(); this.list_TextParts = new LinkedList<>(); this.list_ParamName = new LinkedHashMap<>(); this.DirectiveNameValue = new StringValue(this.list_DirectiveName, this.DirectiveNameContext); this.IndexBuilderValue = new StringValue(this.list_IndexBuilder, this.IndexBuilderContext); this.TextPartsValue = new MacroValue(this.list_TextParts, this.TextPartsContext); if (pTextParts != null) { addAllTextParts(pTextParts); } if (pDirectiveName != null) { addDirectiveName(pDirectiveName); } if (pIndexBuilder != null) { addIndexBuilder(pIndexBuilder); } } public void addAllDirectiveName( List<String> strings) { if (this.macros == null) { throw ObjectMacroException.parameterNull("DirectiveName"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } for (String string : strings) { if (string == null) { throw ObjectMacroException.parameterNull("DirectiveName"); } this.list_DirectiveName.add(string); } } public void addDirectiveName( String string) { if (string == null) { throw ObjectMacroException.parameterNull("DirectiveName"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } this.list_DirectiveName.add(string); } public void addAllIndexBuilder( List<String> strings) { if (this.macros == null) { throw ObjectMacroException.parameterNull("IndexBuilder"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } for (String string : strings) { if (string == null) { throw ObjectMacroException.parameterNull("IndexBuilder"); } this.list_IndexBuilder.add(string); } } public void addIndexBuilder( String string) { if (string == null) { throw ObjectMacroException.parameterNull("IndexBuilder"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } this.list_IndexBuilder.add(string); } public void addAllTextParts( List<Macro> macros) { if (macros == null) { throw ObjectMacroException.parameterNull("TextParts"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } int i = 0; for (Macro macro : macros) { if (macro == null) { throw ObjectMacroException.macroNull(i, "TextParts"); } if (getMacros() != macro.getMacros()) { throw ObjectMacroException.diffMacros(); } verifyTypeTextParts(macro); this.list_TextParts.add(macro); this.children.add(macro); Macro.cycleDetector.detectCycle(this, macro); i++; } } void verifyTypeTextParts( Macro macro) { macro.apply(new InternalsInitializer("TextParts") { @Override void setStringPart( MStringPart mStringPart) { } @Override void setParamInsertPart( MParamInsertPart mParamInsertPart) { } @Override void setEolPart( MEolPart mEolPart) { } @Override void setInsertMacroPart( MInsertMacroPart mInsertMacroPart) { } }); } public void addTextParts( MStringPart macro) { if (macro == null) { throw ObjectMacroException.parameterNull("TextParts"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } if (getMacros() != macro.getMacros()) { throw ObjectMacroException.diffMacros(); } this.list_TextParts.add(macro); this.children.add(macro); Macro.cycleDetector.detectCycle(this, macro); } public void addTextParts( MParamInsertPart macro) { if (macro == null) { throw ObjectMacroException.parameterNull("TextParts"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } if (getMacros() != macro.getMacros()) { throw ObjectMacroException.diffMacros(); } this.list_TextParts.add(macro); this.children.add(macro); Macro.cycleDetector.detectCycle(this, macro); } public void addTextParts( MEolPart macro) { if (macro == null) { throw ObjectMacroException.parameterNull("TextParts"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } if (getMacros() != macro.getMacros()) { throw ObjectMacroException.diffMacros(); } this.list_TextParts.add(macro); this.children.add(macro); Macro.cycleDetector.detectCycle(this, macro); } public void addTextParts( MInsertMacroPart macro) { if (macro == null) { throw ObjectMacroException.parameterNull("TextParts"); } if (this.cacheBuilder != null) { throw ObjectMacroException .cannotModify(this.getClass().getSimpleName()); } if (getMacros() != macro.getMacros()) { throw ObjectMacroException.diffMacros(); } this.list_TextParts.add(macro); this.children.add(macro); Macro.cycleDetector.detectCycle(this, macro); } void setParamName( Context context, StringValue value) { if (value == null) { throw new RuntimeException("value cannot be null here"); } this.list_ParamName.put(context, value); } private String buildDirectiveName() { StringBuilder sb = new StringBuilder(); List<String> strings = this.list_DirectiveName; int i = 0; int nb_strings = strings.size(); for (String string : strings) { sb.append(string); i++; } return sb.toString(); } private String buildIndexBuilder() { StringBuilder sb = new StringBuilder(); List<String> strings = this.list_IndexBuilder; int i = 0; int nb_strings = strings.size(); for (String string : strings) { sb.append(string); i++; } return sb.toString(); } private String buildTextParts() { StringBuilder sb = new StringBuilder(); Context local_context = this.TextPartsContext; List<Macro> macros = this.list_TextParts; int i = 0; int nb_macros = macros.size(); String expansion = null; if (this.TextPartsSeparator == null) { initTextPartsDirectives(); } for (Macro macro : macros) { expansion = macro.build(local_context); expansion = this.TextPartsSeparator.apply(i, expansion, nb_macros); sb.append(expansion); i++; } return sb.toString(); } private String buildParamName( Context context) { StringValue stringValue = this.list_ParamName.get(context); return stringValue.build(); } StringValue getDirectiveName() { return this.DirectiveNameValue; } StringValue getIndexBuilder() { return this.IndexBuilderValue; } MacroValue getTextParts() { return this.TextPartsValue; } private StringValue getParamName( Context context) { return this.list_ParamName.get(context); } private void initTextPartsInternals( Context context) { for (Macro macro : this.list_TextParts) { macro.apply(new InternalsInitializer("TextParts") { @Override void setStringPart( MStringPart mStringPart) { } @Override void setParamInsertPart( MParamInsertPart mParamInsertPart) { } @Override void setEolPart( MEolPart mEolPart) { } @Override void setInsertMacroPart( MInsertMacroPart mInsertMacroPart) { } }); } } private void initDirectiveNameDirectives() { } private void initIndexBuilderDirectives() { } private void initTextPartsDirectives() { StringBuilder sb1 = new StringBuilder(); sb1.append(LINE_SEPARATOR); this.TextPartsSeparator = new DSeparator(sb1.toString()); this.TextPartsValue.setSeparator(this.TextPartsSeparator); } @Override void apply( InternalsInitializer internalsInitializer) { internalsInitializer.setNewDirective(this); } @Override String build( Context context) { CacheBuilder cache_builder = this.cacheBuilders.get(context); if (cache_builder == null) { cache_builder = new CacheBuilder(); } else if (cache_builder.getExpansion() == null) { throw new InternalException("Cycle detection detected lately"); } else { return cache_builder.getExpansion(); } this.cacheBuilders.put(context, cache_builder); List<String> indentations = new LinkedList<>(); initTextPartsInternals(context); initDirectiveNameDirectives(); initIndexBuilderDirectives(); initTextPartsDirectives(); StringBuilder sb0 = new StringBuilder(); sb0.append("StringBuilder sb"); sb0.append(buildIndexBuilder()); sb0.append(" = new StringBuilder();"); sb0.append(LINE_SEPARATOR); sb0.append(buildTextParts()); sb0.append(LINE_SEPARATOR); sb0.append("this."); sb0.append(buildParamName(context)); sb0.append(buildDirectiveName()); sb0.append(" = new D"); sb0.append(buildDirectiveName()); sb0.append("(sb"); sb0.append(buildIndexBuilder()); sb0.append(".toString());"); sb0.append(LINE_SEPARATOR); sb0.append("this."); sb0.append(buildParamName(context)); sb0.append("Value.set"); sb0.append(buildDirectiveName()); sb0.append("(this."); sb0.append(buildParamName(context)); sb0.append(buildDirectiveName()); sb0.append(");"); cache_builder.setExpansion(sb0.toString()); return sb0.toString(); } private void setMacros( Macros macros) { if (macros == null) { throw new InternalException("macros cannot be null"); } this.macros = macros; } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapred; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.Text; import org.xml.sax.SAXException; /** * Maintains a hierarchy of pools. */ public class PoolManager { public static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.PoolManager"); /** Time to wait between checks of the allocation file */ public static final long ALLOC_RELOAD_INTERVAL = 10 * 1000; /** * Time to wait after the allocation has been modified before reloading it * (this is done to prevent loading a file that hasn't been fully written). */ public static final long ALLOC_RELOAD_WAIT = 5 * 1000; // Map and reduce minimum allocations for each pool private Map<String, Integer> mapAllocs = new HashMap<String, Integer>(); private Map<String, Integer> reduceAllocs = new HashMap<String, Integer>(); // Max concurrent running jobs for each pool and for each user; in addition, // for users that have no max specified, we use the userMaxJobsDefault. private Map<String, Integer> poolMaxJobs = new HashMap<String, Integer>(); private Map<String, Integer> userMaxJobs = new HashMap<String, Integer>(); private int userMaxJobsDefault = Integer.MAX_VALUE; private String allocFile; // Path to XML file containing allocations private String poolNameProperty; // Jobconf property to use for determining // a // job's pool name (default: // mapred.job.queue.name) private Map<String, Pool> pools = new HashMap<String, Pool>(); private long lastReloadAttempt; // Last time we tried to reload the pools // file private long lastSuccessfulReload; // Last time we successfully reloaded // pools private boolean lastReloadAttemptFailed = false; public PoolManager(Configuration conf) throws IOException, SAXException, AllocationConfigurationException, ParserConfigurationException { this.poolNameProperty = conf.get("mapred.fairscheduler.poolnameproperty", "mapred.job.queue.name"); this.allocFile = conf.get("mapred.fairscheduler.allocation.file"); if (allocFile == null) { LOG.warn("No mapred.fairscheduler.allocation.file given in jobconf - " + "the fair scheduler will not use any queues."); } reloadAllocs(); lastSuccessfulReload = System.currentTimeMillis(); lastReloadAttempt = System.currentTimeMillis(); // Create the default pool so that it shows up in the web UI getPool(Pool.DEFAULT_POOL_NAME); } /** * Get a pool by name, creating it if necessary */ public synchronized Pool getPool(String name) { Pool pool = pools.get(name); if (pool == null) { pool = new Pool(name); pools.put(name, pool); } return pool; } /** * Reload allocations file if it hasn't been loaded in a while */ public void reloadAllocsIfNecessary() { long time = System.currentTimeMillis(); if (time > lastReloadAttempt + ALLOC_RELOAD_INTERVAL) { lastReloadAttempt = time; try { File file = new File(allocFile); long lastModified = file.lastModified(); if (lastModified > lastSuccessfulReload && time > lastModified + ALLOC_RELOAD_WAIT) { reloadAllocs(); lastSuccessfulReload = time; lastReloadAttemptFailed = false; } } catch (Exception e) { // Throwing the error further out here won't help - the RPC // thread // will catch it and report it in a loop. Instead, just log it // and // hope somebody will notice from the log. // We log the error only on the first failure so we don't fill // up the // JobTracker's log with these messages. if (!lastReloadAttemptFailed) { LOG.error("Failed to reload allocations file - " + "will use existing allocations.", e); } lastReloadAttemptFailed = true; } } } /** * Updates the allocation list from the allocation config file. This file is * expected to be in the following whitespace-separated format: * * <code> * poolName1 mapAlloc reduceAlloc * poolName2 mapAlloc reduceAlloc * ... * </code> * * Blank lines and lines starting with # are ignored. * * @throws IOException * if the config file cannot be read. * @throws AllocationConfigurationException * if allocations are invalid. * @throws ParserConfigurationException * if XML parser is misconfigured. * @throws SAXException * if config file is malformed. */ public void reloadAllocs() throws IOException, ParserConfigurationException, SAXException, AllocationConfigurationException { if (allocFile == null) return; // Create some temporary hashmaps to hold the new allocs, and we only // save // them in our fields if we have parsed the entire allocs file // successfully. Map<String, Integer> mapAllocs = new HashMap<String, Integer>(); Map<String, Integer> reduceAllocs = new HashMap<String, Integer>(); Map<String, Integer> poolMaxJobs = new HashMap<String, Integer>(); Map<String, Integer> userMaxJobs = new HashMap<String, Integer>(); int userMaxJobsDefault = Integer.MAX_VALUE; // Remember all pool names so we can display them on web UI, etc. List<String> poolNamesInAllocFile = new ArrayList<String>(); // Read and parse the allocations file. DocumentBuilderFactory docBuilderFactory = DocumentBuilderFactory.newInstance(); docBuilderFactory.setIgnoringComments(true); DocumentBuilder builder = docBuilderFactory.newDocumentBuilder(); Document doc = builder.parse(new File(allocFile)); Element root = doc.getDocumentElement(); if (!"allocations".equals(root.getTagName())) throw new AllocationConfigurationException("Bad allocations file: " + "top-level element not <allocations>"); NodeList elements = root.getChildNodes(); for (int i = 0; i < elements.getLength(); i++) { Node node = elements.item(i); if (!(node instanceof Element)) continue; Element element = (Element) node; if ("pool".equals(element.getTagName())) { String poolName = element.getAttribute("name"); poolNamesInAllocFile.add(poolName); NodeList fields = element.getChildNodes(); for (int j = 0; j < fields.getLength(); j++) { Node fieldNode = fields.item(j); if (!(fieldNode instanceof Element)) continue; Element field = (Element) fieldNode; if ("minMaps".equals(field.getTagName())) { String text = ((Text) field.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); mapAllocs.put(poolName, val); } else if ("minReduces".equals(field.getTagName())) { String text = ((Text) field.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); reduceAllocs.put(poolName, val); } else if ("maxRunningJobs".equals(field.getTagName())) { String text = ((Text) field.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); poolMaxJobs.put(poolName, val); } } } else if ("user".equals(element.getTagName())) { String userName = element.getAttribute("name"); NodeList fields = element.getChildNodes(); for (int j = 0; j < fields.getLength(); j++) { Node fieldNode = fields.item(j); if (!(fieldNode instanceof Element)) continue; Element field = (Element) fieldNode; if ("maxRunningJobs".equals(field.getTagName())) { String text = ((Text) field.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); userMaxJobs.put(userName, val); } } } else if ("userMaxJobsDefault".equals(element.getTagName())) { String text = ((Text) element.getFirstChild()).getData().trim(); int val = Integer.parseInt(text); userMaxJobsDefault = val; } else { LOG.warn("Bad element in allocations file: " + element.getTagName()); } } // Commit the reload; also create any pool defined in the alloc file // if it does not already exist, so it can be displayed on the web UI. synchronized (this) { this.mapAllocs = mapAllocs; this.reduceAllocs = reduceAllocs; this.poolMaxJobs = poolMaxJobs; this.userMaxJobs = userMaxJobs; this.userMaxJobsDefault = userMaxJobsDefault; for (String name : poolNamesInAllocFile) { getPool(name); } } } /** * Get the allocation for a particular pool */ public int getAllocation(String pool, TaskType taskType) { Map<String, Integer> allocationMap = (taskType == TaskType.MAP ? mapAllocs : reduceAllocs); Integer alloc = allocationMap.get(pool); return (alloc == null ? 0 : alloc); } /** * Add a job in the appropriate pool */ public synchronized void addJob(JobInProgress job) { getPool(getPoolName(job)).addJob(job); } /** * Remove a job */ public synchronized void removeJob(JobInProgress job) { getPool(getPoolName(job)).removeJob(job); } /** * Change the pool of a particular job */ public synchronized void setPool(JobInProgress job, String pool) { removeJob(job); job.getJobConf().set(poolNameProperty, pool); addJob(job); } /** * Get a collection of all pools */ public synchronized Collection<Pool> getPools() { return pools.values(); } /** * Get the pool name for a JobInProgress from its configuration. This uses * the "project" property in the jobconf by default, or the property set * with "mapred.fairscheduler.poolnameproperty". */ public String getPoolName(JobInProgress job) { JobConf conf = job.getJobConf(); return conf.get(poolNameProperty, Pool.DEFAULT_POOL_NAME).trim(); } /** * Get all pool names that have been seen either in the allocation file or * in a MapReduce job. */ public synchronized Collection<String> getPoolNames() { List<String> list = new ArrayList<String>(); for (Pool pool : getPools()) { list.add(pool.getName()); } Collections.sort(list); return list; } public int getUserMaxJobs(String user) { if (userMaxJobs.containsKey(user)) { return userMaxJobs.get(user); } else { return userMaxJobsDefault; } } public int getPoolMaxJobs(String pool) { if (poolMaxJobs.containsKey(pool)) { return poolMaxJobs.get(pool); } else { return Integer.MAX_VALUE; } } }
package awana.database; import java.awt.Color; import javax.swing.DefaultListModel; import javax.swing.JOptionPane; import javax.swing.ListModel; import javax.swing.event.ListDataEvent; import javax.swing.event.ListDataListener; /** * * @author Renlar <liddev.com> */ public class DirectoryPage extends javax.swing.JFrame implements ListDataListener { private DatabaseWrapper databaseWrapper; private Record selectedRecord; private DefaultListModel<Listing> masterListModel; private DefaultListModel<Listing> searchListModel; /** * Creates new form DirectoryPage * * @param databaseWrapper the connection to the database; */ public DirectoryPage(DatabaseWrapper databaseWrapper) { this.databaseWrapper = databaseWrapper; initComponents(); searchListModel = masterListModel; } /** * This method is called from within the constructor to initialize the form. WARNING: Do NOT modify this code. The * content of this method is always regenerated by the Form Editor. */ @SuppressWarnings("unchecked") // <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents private void initComponents() { recordData = new javax.swing.JTabbedPane(); recordScrollPane = new javax.swing.JScrollPane(); recordItemList = new javax.swing.JList(); newRecord = new javax.swing.JButton(); deleteRecord = new javax.swing.JButton(); searchBox = new javax.swing.JTextField(); setDefaultCloseOperation(javax.swing.WindowConstants.EXIT_ON_CLOSE); setTitle("Awana Database"); recordScrollPane.setHorizontalScrollBarPolicy(javax.swing.ScrollPaneConstants.HORIZONTAL_SCROLLBAR_NEVER); recordScrollPane.setName(""); // NOI18N recordItemList.setModel(getListModel()); recordItemList.setSelectionMode(javax.swing.ListSelectionModel.SINGLE_SELECTION); recordItemList.setName(""); // NOI18N recordItemList.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { listingSelected(evt); } }); recordItemList.addMouseMotionListener(new java.awt.event.MouseMotionAdapter() { public void mouseDragged(java.awt.event.MouseEvent evt) { listingSelected(evt); } }); recordScrollPane.setViewportView(recordItemList); newRecord.setText("New"); newRecord.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { newRecordHandler(evt); } }); deleteRecord.setText("Delete"); deleteRecord.addMouseListener(new java.awt.event.MouseAdapter() { public void mouseClicked(java.awt.event.MouseEvent evt) { deleteRecordHandler(evt); } }); searchBox.setForeground(new java.awt.Color(150, 150, 150)); searchBox.setText("Search"); searchBox.setName(""); // NOI18N searchBox.setPreferredSize(new java.awt.Dimension(200, 25)); searchBox.addFocusListener(new java.awt.event.FocusAdapter() { public void focusGained(java.awt.event.FocusEvent evt) { searchBoxFocusGainedHandler(evt); } public void focusLost(java.awt.event.FocusEvent evt) { searchBoxFocusLostHandler(evt); } }); searchBox.addKeyListener(new java.awt.event.KeyAdapter() { public void keyReleased(java.awt.event.KeyEvent evt) { searchBoxKeyReleased(evt); } }); javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane()); getContentPane().setLayout(layout); layout.setHorizontalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(layout.createSequentialGroup() .addComponent(newRecord, javax.swing.GroupLayout.PREFERRED_SIZE, 90, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(deleteRecord, javax.swing.GroupLayout.PREFERRED_SIZE, 100, javax.swing.GroupLayout.PREFERRED_SIZE)) .addComponent(searchBox, javax.swing.GroupLayout.PREFERRED_SIZE, 200, javax.swing.GroupLayout.PREFERRED_SIZE) .addComponent(recordScrollPane, javax.swing.GroupLayout.PREFERRED_SIZE, 200, javax.swing.GroupLayout.PREFERRED_SIZE)) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED) .addComponent(recordData, javax.swing.GroupLayout.DEFAULT_SIZE, 570, Short.MAX_VALUE) .addContainerGap()) ); layout.setVerticalGroup( layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup() .addContainerGap() .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING) .addComponent(recordData) .addGroup(layout.createSequentialGroup() .addComponent(searchBox, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addComponent(recordScrollPane, javax.swing.GroupLayout.DEFAULT_SIZE, 368, Short.MAX_VALUE) .addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED) .addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE) .addComponent(newRecord) .addComponent(deleteRecord)))) .addContainerGap()) ); recordScrollPane.getAccessibleContext().setAccessibleName(""); pack(); }// </editor-fold>//GEN-END:initComponents private void newRecordHandler(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_newRecordHandler saveCurrentRecord(); newRecord(); updateRecordData(); }//GEN-LAST:event_newRecordHandler private void deleteRecordHandler(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_deleteRecordHandler Listing delete = getSelectedListing(); String msgNoRecordSelected = "No Record Selected."; String[] confirmDeleteOptions = {"Delete", "Cancel"}; String msgConfirmDelete = "<html>Are You sure you want to delete,</html>\n<html>" + delete.getFullNameLastFirst() + "</html>.\n<html><b>This can not be undone.</b></html>"; if (delete == null) { JOptionPane.showMessageDialog(this, msgNoRecordSelected, "Null", JOptionPane.YES_NO_OPTION); return; } int choice = JOptionPane.showOptionDialog(this, msgConfirmDelete, "Confirm Delete", JOptionPane.YES_NO_OPTION, JOptionPane.WARNING_MESSAGE, null, confirmDeleteOptions, confirmDeleteOptions[1]); if (choice == JOptionPane.YES_OPTION) { databaseWrapper.deleteListing(delete); removeListing(delete); } clearRecordData(); }//GEN-LAST:event_deleteRecordHandler private void searchBoxFocusGainedHandler(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_searchBoxFocusGainedHandler searchBox.setForeground(java.awt.Color.BLACK); if (searchBox.getText().equals("Search")) { searchBox.setText(""); } }//GEN-LAST:event_searchBoxFocusGainedHandler private void searchBoxFocusLostHandler(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_searchBoxFocusLostHandler searchBox.setForeground(Color.GRAY); if (searchBox.getText() == null || searchBox.getText().isEmpty()) { searchBox.setText("Search"); } }//GEN-LAST:event_searchBoxFocusLostHandler private void listingSelected(java.awt.event.MouseEvent evt) {//GEN-FIRST:event_listingSelected if (recordItemList.getSelectedValue() != null) { Listing newSelection = (Listing) recordItemList.getSelectedValue(); if (selectedRecord == null || selectedRecord.getID() != newSelection.getID()) { saveCurrentRecord(); updateRecordData(); } } }//GEN-LAST:event_listingSelected private void searchBoxKeyReleased(java.awt.event.KeyEvent evt) {//GEN-FIRST:event_searchBoxKeyReleased String s = searchBox.getText(); char pressed = evt.getKeyChar(); if (s.equalsIgnoreCase(null) || s.isEmpty()) { searchListModel = masterListModel; } else if (pressed == '\b' || pressed == (char) 127) { searchListModel = searchRecords(masterListModel, s); } else { searchListModel = searchRecords(searchListModel, s); } recordItemList.setModel(searchListModel); }//GEN-LAST:event_searchBoxKeyReleased public void saveCurrentRecord() { if (selectedRecord != null) { databaseWrapper.saveRecord(selectedRecord); updateListings(); } } public void updateListings() { int index = masterListModel.indexOf(selectedRecord.createListing()); if(nameChanged()){ masterListModel.remove(index); addListing(selectedRecord.createListing()); } } public boolean nameChanged(){ int index = masterListModel.indexOf(selectedRecord.createListing()); Listing l = masterListModel.get(index); if(!stringsEqual(l.getFirstName(), selectedRecord.get("First Name").getData())){ return true; }else if(!stringsEqual(l.getLastName(), selectedRecord.get("Last Name").getData())){ return true; } return false; } public boolean stringsEqual(String a, String b){ if(b == null && a == null){ return true; }else if(b == null || a == null){ return false; }else{ return a.equals(b); } } public void updateRecordData() { clearRecordData(); selectedRecord = databaseWrapper.getRecord(((Listing) recordItemList.getSelectedValue()).getID()); selectedRecord.draw(recordData); } public void clearRecordData() { selectedRecord = null; recordData.removeAll(); } public void removeListing(Listing r) { masterListModel.removeElement(r); } public void newRecord() { Record s = databaseWrapper.newRecord(); Listing l = s.createListing(); addListing(l); selectListing(l); } public void addListing(Listing listing) { int insertLocation = getInsertLocation(listing); masterListModel.insertElementAt(listing, insertLocation); searchListModel = masterListModel; } public int getInsertLocation(Listing listing) { int loc = 0; boolean notFound = true; if (masterListModel.size() > 0) { loc = (masterListModel.size() - 1) / 2; int increment = loc; while (notFound) { increment /= 2; if (increment == 0) { increment++; } int compair = masterListModel.get(loc).compairName(listing); int compairBelow = -1; if (loc > 0 && loc < masterListModel.size()) { compairBelow = masterListModel.get(loc - 1).compairName(listing); } if (compair == 0) { notFound = false; } else if (compair == 1 && compairBelow == -1) { notFound = false; } else if (compair == 1) { loc -= increment; } else if (compair == -1) { loc += increment; } if (loc == masterListModel.size()) { notFound = false; } } } return loc; } private void SortRecordsAlphabeticlyQuickSort(DefaultListModel<Listing> list) { if (!list.isEmpty()) { quickSortAlphabeticly(list, 0, list.size() - 1); } } private void quickSortAlphabeticly(DefaultListModel<Listing> list, int left, int right) { int index = quickSortPartition(list, left, right); if (left < index - 1) { quickSortAlphabeticly(list, left, index - 1); } if (index < right) { quickSortAlphabeticly(list, index, right); } } private int quickSortPartition(DefaultListModel<Listing> list, int left, int right) { int i = left, j = right; Listing tmp1, tmp2; Listing pivot = list.get((left + right) / 2); while (i <= j) { while (list.get(i).compairName(pivot) == -1) { i++; } while (list.get(j).compairName(pivot) == 1) { j--; } if (i <= j) { tmp1 = list.get(i); tmp2 = list.get(j); list.remove(i); list.add(i, tmp2); list.remove(j); list.add(j, tmp1); i++; j--; } } return i; } public void selectListing(Listing l) { recordItemList.setSelectedValue(l, true); } // Variables declaration - do not modify//GEN-BEGIN:variables private javax.swing.JButton deleteRecord; private javax.swing.JButton newRecord; private javax.swing.JTabbedPane recordData; private javax.swing.JList recordItemList; private javax.swing.JScrollPane recordScrollPane; private javax.swing.JTextField searchBox; // End of variables declaration//GEN-END:variables private ListModel getListModel() { masterListModel = databaseWrapper.getRecordListingsAsDefaultListModel(); masterListModel.addListDataListener(this); SortRecordsAlphabeticlyQuickSort(masterListModel); return masterListModel; } private Listing getSelectedListing() { return (Listing) recordItemList.getSelectedValue(); } private DefaultListModel<Listing> searchRecords(DefaultListModel<Listing> searchSet, String text) { DefaultListModel<Listing> resultSet = new DefaultListModel<Listing>(); if (searchSet.isEmpty()) { return resultSet; } int counter = 0; while (counter < searchSet.size()) { Listing testee = searchSet.get(counter); if (testee.getFirstName() != null && testee.getLastName() != null && (testee.getLastName().contains(text) || testee.getFirstName().contains(text))) { resultSet.addElement(testee); } counter++; } return resultSet; } //TODO: put searching entries and loading data in seperate threads from application to eliminate temperary locking of application. @Override public void intervalAdded(ListDataEvent e) { //do-Nothing } @Override public void intervalRemoved(ListDataEvent e) { //do-Nothing } @Override public void contentsChanged(ListDataEvent e) { //do-Nothing } }
/** */ package CIM.IEC61970.Informative.InfWork.impl; import CIM.IEC61968.Common.DateTimeInterval; import CIM.IEC61968.Common.StreetAddress; import CIM.IEC61970.Informative.InfCommon.impl.ScheduledEventImpl; import CIM.IEC61970.Informative.InfERPSupport.ErpPerson; import CIM.IEC61970.Informative.InfERPSupport.InfERPSupportPackage; import CIM.IEC61970.Informative.InfOperations.CallBack; import CIM.IEC61970.Informative.InfOperations.InfOperationsPackage; import CIM.IEC61970.Informative.InfWork.Appointment; import CIM.IEC61970.Informative.InfWork.InfWorkPackage; import java.util.Collection; import org.eclipse.emf.common.notify.Notification; import org.eclipse.emf.common.notify.NotificationChain; import org.eclipse.emf.common.util.EList; import org.eclipse.emf.ecore.EClass; import org.eclipse.emf.ecore.InternalEObject; import org.eclipse.emf.ecore.impl.ENotificationImpl; import org.eclipse.emf.ecore.util.EObjectWithInverseResolvingEList; import org.eclipse.emf.ecore.util.InternalEList; /** * <!-- begin-user-doc --> * An implementation of the model object '<em><b>Appointment</b></em>'. * <!-- end-user-doc --> * <p> * The following features are implemented: * </p> * <ul> * <li>{@link CIM.IEC61970.Informative.InfWork.impl.AppointmentImpl#getRemark <em>Remark</em>}</li> * <li>{@link CIM.IEC61970.Informative.InfWork.impl.AppointmentImpl#getAddress <em>Address</em>}</li> * <li>{@link CIM.IEC61970.Informative.InfWork.impl.AppointmentImpl#getCallBack <em>Call Back</em>}</li> * <li>{@link CIM.IEC61970.Informative.InfWork.impl.AppointmentImpl#getMeetingInterval <em>Meeting Interval</em>}</li> * <li>{@link CIM.IEC61970.Informative.InfWork.impl.AppointmentImpl#getErpPersons <em>Erp Persons</em>}</li> * <li>{@link CIM.IEC61970.Informative.InfWork.impl.AppointmentImpl#isCallAhead <em>Call Ahead</em>}</li> * </ul> * * @generated */ public class AppointmentImpl extends ScheduledEventImpl implements Appointment { /** * The default value of the '{@link #getRemark() <em>Remark</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRemark() * @generated * @ordered */ protected static final String REMARK_EDEFAULT = null; /** * The cached value of the '{@link #getRemark() <em>Remark</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getRemark() * @generated * @ordered */ protected String remark = REMARK_EDEFAULT; /** * The cached value of the '{@link #getAddress() <em>Address</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getAddress() * @generated * @ordered */ protected StreetAddress address; /** * The cached value of the '{@link #getCallBack() <em>Call Back</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getCallBack() * @generated * @ordered */ protected CallBack callBack; /** * The cached value of the '{@link #getMeetingInterval() <em>Meeting Interval</em>}' reference. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getMeetingInterval() * @generated * @ordered */ protected DateTimeInterval meetingInterval; /** * The cached value of the '{@link #getErpPersons() <em>Erp Persons</em>}' reference list. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #getErpPersons() * @generated * @ordered */ protected EList<ErpPerson> erpPersons; /** * The default value of the '{@link #isCallAhead() <em>Call Ahead</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isCallAhead() * @generated * @ordered */ protected static final boolean CALL_AHEAD_EDEFAULT = false; /** * The cached value of the '{@link #isCallAhead() <em>Call Ahead</em>}' attribute. * <!-- begin-user-doc --> * <!-- end-user-doc --> * @see #isCallAhead() * @generated * @ordered */ protected boolean callAhead = CALL_AHEAD_EDEFAULT; /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ protected AppointmentImpl() { super(); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override protected EClass eStaticClass() { return InfWorkPackage.Literals.APPOINTMENT; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public String getRemark() { return remark; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setRemark(String newRemark) { String oldRemark = remark; remark = newRemark; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, InfWorkPackage.APPOINTMENT__REMARK, oldRemark, remark)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StreetAddress getAddress() { if (address != null && address.eIsProxy()) { InternalEObject oldAddress = (InternalEObject)address; address = (StreetAddress)eResolveProxy(oldAddress); if (address != oldAddress) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, InfWorkPackage.APPOINTMENT__ADDRESS, oldAddress, address)); } } return address; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public StreetAddress basicGetAddress() { return address; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setAddress(StreetAddress newAddress) { StreetAddress oldAddress = address; address = newAddress; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, InfWorkPackage.APPOINTMENT__ADDRESS, oldAddress, address)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CallBack getCallBack() { if (callBack != null && callBack.eIsProxy()) { InternalEObject oldCallBack = (InternalEObject)callBack; callBack = (CallBack)eResolveProxy(oldCallBack); if (callBack != oldCallBack) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, InfWorkPackage.APPOINTMENT__CALL_BACK, oldCallBack, callBack)); } } return callBack; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public CallBack basicGetCallBack() { return callBack; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public NotificationChain basicSetCallBack(CallBack newCallBack, NotificationChain msgs) { CallBack oldCallBack = callBack; callBack = newCallBack; if (eNotificationRequired()) { ENotificationImpl notification = new ENotificationImpl(this, Notification.SET, InfWorkPackage.APPOINTMENT__CALL_BACK, oldCallBack, newCallBack); if (msgs == null) msgs = notification; else msgs.add(notification); } return msgs; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCallBack(CallBack newCallBack) { if (newCallBack != callBack) { NotificationChain msgs = null; if (callBack != null) msgs = ((InternalEObject)callBack).eInverseRemove(this, InfOperationsPackage.CALL_BACK__APPOINTMENTS, CallBack.class, msgs); if (newCallBack != null) msgs = ((InternalEObject)newCallBack).eInverseAdd(this, InfOperationsPackage.CALL_BACK__APPOINTMENTS, CallBack.class, msgs); msgs = basicSetCallBack(newCallBack, msgs); if (msgs != null) msgs.dispatch(); } else if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, InfWorkPackage.APPOINTMENT__CALL_BACK, newCallBack, newCallBack)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DateTimeInterval getMeetingInterval() { if (meetingInterval != null && meetingInterval.eIsProxy()) { InternalEObject oldMeetingInterval = (InternalEObject)meetingInterval; meetingInterval = (DateTimeInterval)eResolveProxy(oldMeetingInterval); if (meetingInterval != oldMeetingInterval) { if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.RESOLVE, InfWorkPackage.APPOINTMENT__MEETING_INTERVAL, oldMeetingInterval, meetingInterval)); } } return meetingInterval; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public DateTimeInterval basicGetMeetingInterval() { return meetingInterval; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setMeetingInterval(DateTimeInterval newMeetingInterval) { DateTimeInterval oldMeetingInterval = meetingInterval; meetingInterval = newMeetingInterval; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, InfWorkPackage.APPOINTMENT__MEETING_INTERVAL, oldMeetingInterval, meetingInterval)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public EList<ErpPerson> getErpPersons() { if (erpPersons == null) { erpPersons = new EObjectWithInverseResolvingEList.ManyInverse<ErpPerson>(ErpPerson.class, this, InfWorkPackage.APPOINTMENT__ERP_PERSONS, InfERPSupportPackage.ERP_PERSON__APPOINTMENTS); } return erpPersons; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public boolean isCallAhead() { return callAhead; } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ public void setCallAhead(boolean newCallAhead) { boolean oldCallAhead = callAhead; callAhead = newCallAhead; if (eNotificationRequired()) eNotify(new ENotificationImpl(this, Notification.SET, InfWorkPackage.APPOINTMENT__CALL_AHEAD, oldCallAhead, callAhead)); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public NotificationChain eInverseAdd(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case InfWorkPackage.APPOINTMENT__CALL_BACK: if (callBack != null) msgs = ((InternalEObject)callBack).eInverseRemove(this, InfOperationsPackage.CALL_BACK__APPOINTMENTS, CallBack.class, msgs); return basicSetCallBack((CallBack)otherEnd, msgs); case InfWorkPackage.APPOINTMENT__ERP_PERSONS: return ((InternalEList<InternalEObject>)(InternalEList<?>)getErpPersons()).basicAdd(otherEnd, msgs); } return super.eInverseAdd(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) { switch (featureID) { case InfWorkPackage.APPOINTMENT__CALL_BACK: return basicSetCallBack(null, msgs); case InfWorkPackage.APPOINTMENT__ERP_PERSONS: return ((InternalEList<?>)getErpPersons()).basicRemove(otherEnd, msgs); } return super.eInverseRemove(otherEnd, featureID, msgs); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public Object eGet(int featureID, boolean resolve, boolean coreType) { switch (featureID) { case InfWorkPackage.APPOINTMENT__REMARK: return getRemark(); case InfWorkPackage.APPOINTMENT__ADDRESS: if (resolve) return getAddress(); return basicGetAddress(); case InfWorkPackage.APPOINTMENT__CALL_BACK: if (resolve) return getCallBack(); return basicGetCallBack(); case InfWorkPackage.APPOINTMENT__MEETING_INTERVAL: if (resolve) return getMeetingInterval(); return basicGetMeetingInterval(); case InfWorkPackage.APPOINTMENT__ERP_PERSONS: return getErpPersons(); case InfWorkPackage.APPOINTMENT__CALL_AHEAD: return isCallAhead(); } return super.eGet(featureID, resolve, coreType); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @SuppressWarnings("unchecked") @Override public void eSet(int featureID, Object newValue) { switch (featureID) { case InfWorkPackage.APPOINTMENT__REMARK: setRemark((String)newValue); return; case InfWorkPackage.APPOINTMENT__ADDRESS: setAddress((StreetAddress)newValue); return; case InfWorkPackage.APPOINTMENT__CALL_BACK: setCallBack((CallBack)newValue); return; case InfWorkPackage.APPOINTMENT__MEETING_INTERVAL: setMeetingInterval((DateTimeInterval)newValue); return; case InfWorkPackage.APPOINTMENT__ERP_PERSONS: getErpPersons().clear(); getErpPersons().addAll((Collection<? extends ErpPerson>)newValue); return; case InfWorkPackage.APPOINTMENT__CALL_AHEAD: setCallAhead((Boolean)newValue); return; } super.eSet(featureID, newValue); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public void eUnset(int featureID) { switch (featureID) { case InfWorkPackage.APPOINTMENT__REMARK: setRemark(REMARK_EDEFAULT); return; case InfWorkPackage.APPOINTMENT__ADDRESS: setAddress((StreetAddress)null); return; case InfWorkPackage.APPOINTMENT__CALL_BACK: setCallBack((CallBack)null); return; case InfWorkPackage.APPOINTMENT__MEETING_INTERVAL: setMeetingInterval((DateTimeInterval)null); return; case InfWorkPackage.APPOINTMENT__ERP_PERSONS: getErpPersons().clear(); return; case InfWorkPackage.APPOINTMENT__CALL_AHEAD: setCallAhead(CALL_AHEAD_EDEFAULT); return; } super.eUnset(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public boolean eIsSet(int featureID) { switch (featureID) { case InfWorkPackage.APPOINTMENT__REMARK: return REMARK_EDEFAULT == null ? remark != null : !REMARK_EDEFAULT.equals(remark); case InfWorkPackage.APPOINTMENT__ADDRESS: return address != null; case InfWorkPackage.APPOINTMENT__CALL_BACK: return callBack != null; case InfWorkPackage.APPOINTMENT__MEETING_INTERVAL: return meetingInterval != null; case InfWorkPackage.APPOINTMENT__ERP_PERSONS: return erpPersons != null && !erpPersons.isEmpty(); case InfWorkPackage.APPOINTMENT__CALL_AHEAD: return callAhead != CALL_AHEAD_EDEFAULT; } return super.eIsSet(featureID); } /** * <!-- begin-user-doc --> * <!-- end-user-doc --> * @generated */ @Override public String toString() { if (eIsProxy()) return super.toString(); StringBuffer result = new StringBuffer(super.toString()); result.append(" (remark: "); result.append(remark); result.append(", callAhead: "); result.append(callAhead); result.append(')'); return result.toString(); } } //AppointmentImpl
/* * Copyright 2010-2013 Ning, Inc. * * Ning licenses this file to you under the Apache License, version 2.0 * (the "License"); you may not use this file except in compliance with the * License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package org.killbill.billing.junction.plumbing.billing; import java.math.BigDecimal; import java.util.Iterator; import java.util.SortedSet; import java.util.TreeSet; import java.util.UUID; import javax.annotation.Nullable; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.mockito.Mockito; import org.testng.Assert; import org.testng.annotations.Test; import org.killbill.billing.account.api.Account; import org.killbill.billing.catalog.DefaultPrice; import org.killbill.billing.catalog.MockInternationalPrice; import org.killbill.billing.catalog.MockPlan; import org.killbill.billing.catalog.MockPlanPhase; import org.killbill.billing.catalog.api.BillingPeriod; import org.killbill.billing.catalog.api.Currency; import org.killbill.billing.catalog.api.PhaseType; import org.killbill.billing.catalog.api.Plan; import org.killbill.billing.catalog.api.PlanPhase; import org.killbill.billing.subscription.api.SubscriptionBaseTransitionType; import org.killbill.billing.subscription.api.SubscriptionBase; import org.killbill.billing.junction.JunctionTestSuiteNoDB; import org.killbill.billing.mock.MockAccountBuilder; import org.killbill.billing.junction.BillingEvent; public class TestDefaultBillingEvent extends JunctionTestSuiteNoDB { private static final UUID ID_ZERO = new UUID(0L, 0L); private static final UUID ID_ONE = new UUID(0L, 1L); private static final UUID ID_TWO = new UUID(0L, 2L); @Test(groups = "fast") public void testEntitlementEventsHappeningAtTheSameTimeAsOverdueEvents() throws Exception { final BillingEvent event0 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.START_BILLING_DISABLED); final BillingEvent event1 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final BillingEvent event2 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:05.000Z"), SubscriptionBaseTransitionType.CHANGE); final BillingEvent event3 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:05.000Z"), SubscriptionBaseTransitionType.END_BILLING_DISABLED); final SortedSet<BillingEvent> set = new TreeSet<BillingEvent>(); set.add(event0); set.add(event1); set.add(event2); set.add(event3); final Iterator<BillingEvent> it = set.iterator(); Assert.assertEquals(event1, it.next()); Assert.assertEquals(event0, it.next()); Assert.assertEquals(event3, it.next()); Assert.assertEquals(event2, it.next()); } @Test(groups = "fast") public void testEdgeCaseAllEventsHappenAtTheSameTime() throws Exception { final BillingEvent event0 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.START_BILLING_DISABLED); final BillingEvent event1 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE, 1); final BillingEvent event2 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.CHANGE, 2); // Note the time delta here. Having a blocking duration of zero and events at the same time won't work as the backing tree set does local // comparisons (and not global), making the END_BILLING_DISABLED start the first one in the set final BillingEvent event3 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:05.000Z"), SubscriptionBaseTransitionType.END_BILLING_DISABLED); final SortedSet<BillingEvent> set = new TreeSet<BillingEvent>(); set.add(event0); set.add(event1); set.add(event2); set.add(event3); final Iterator<BillingEvent> it = set.iterator(); Assert.assertEquals(event1, it.next()); Assert.assertEquals(event2, it.next()); Assert.assertEquals(event0, it.next()); Assert.assertEquals(event3, it.next()); } @Test(groups = "fast") public void testEventOrderingSubscription() { final BillingEvent event0 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final BillingEvent event1 = createEvent(subscription(ID_ONE), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final BillingEvent event2 = createEvent(subscription(ID_TWO), new DateTime("2012-01-31T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final SortedSet<BillingEvent> set = new TreeSet<BillingEvent>(); set.add(event2); set.add(event1); set.add(event0); final Iterator<BillingEvent> it = set.iterator(); Assert.assertEquals(event0, it.next()); Assert.assertEquals(event1, it.next()); Assert.assertEquals(event2, it.next()); } @Test(groups = "fast") public void testEventOrderingDate() { final BillingEvent event0 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-01T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final BillingEvent event1 = createEvent(subscription(ID_ZERO), new DateTime("2012-02-01T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final BillingEvent event2 = createEvent(subscription(ID_ZERO), new DateTime("2012-03-01T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final SortedSet<BillingEvent> set = new TreeSet<BillingEvent>(); set.add(event2); set.add(event1); set.add(event0); final Iterator<BillingEvent> it = set.iterator(); Assert.assertEquals(event0, it.next()); Assert.assertEquals(event1, it.next()); Assert.assertEquals(event2, it.next()); } @Test(groups = "fast") public void testEventTotalOrdering() { final BillingEvent event0 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-01T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE, 1L); final BillingEvent event1 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-01T00:02:04.000Z"), SubscriptionBaseTransitionType.CANCEL, 2L); final BillingEvent event2 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-01T00:02:04.000Z"), SubscriptionBaseTransitionType.RE_CREATE, 3L); final SortedSet<BillingEvent> set = new TreeSet<BillingEvent>(); set.add(event2); set.add(event1); set.add(event0); final Iterator<BillingEvent> it = set.iterator(); Assert.assertEquals(event0, it.next()); Assert.assertEquals(event1, it.next()); Assert.assertEquals(event2, it.next()); } @Test(groups = "fast") public void testEventOrderingMix() { final BillingEvent event0 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-01T00:02:04.000Z"), SubscriptionBaseTransitionType.CREATE); final BillingEvent event1 = createEvent(subscription(ID_ZERO), new DateTime("2012-01-02T00:02:04.000Z"), SubscriptionBaseTransitionType.CHANGE); final BillingEvent event2 = createEvent(subscription(ID_ONE), new DateTime("2012-01-01T00:02:04.000Z"), SubscriptionBaseTransitionType.CANCEL); final SortedSet<BillingEvent> set = new TreeSet<BillingEvent>(); set.add(event2); set.add(event1); set.add(event0); final Iterator<BillingEvent> it = set.iterator(); Assert.assertEquals(event0, it.next()); Assert.assertEquals(event1, it.next()); Assert.assertEquals(event2, it.next()); } @Test(groups = "fast") public void testToString() throws Exception { // Simple test to ensure we have an easy to read toString representation final BillingEvent event = createEvent(subscription(ID_ZERO), new DateTime("2012-01-01T00:02:04.000Z", DateTimeZone.UTC), SubscriptionBaseTransitionType.CREATE); Assert.assertEquals(event.toString(), "DefaultBillingEvent{type=CREATE, effectiveDate=2012-01-01T00:02:04.000Z, planPhaseName=Test-trial, subscriptionId=00000000-0000-0000-0000-000000000000, totalOrdering=1}"); } private BillingEvent createEvent(final SubscriptionBase sub, final DateTime effectiveDate, final SubscriptionBaseTransitionType type) { return createEvent(sub, effectiveDate, type, 1L); } private BillingEvent createEvent(final SubscriptionBase sub, final DateTime effectiveDate, final SubscriptionBaseTransitionType type, final long totalOrdering) { final int billCycleDay = 1; final Plan shotgun = new MockPlan(); final PlanPhase shotgunMonthly = createMockMonthlyPlanPhase(null, BigDecimal.ZERO, PhaseType.TRIAL); final Account account = new MockAccountBuilder().build(); return new DefaultBillingEvent(sub, effectiveDate, true, shotgun, shotgunMonthly, BigDecimal.ZERO, null, Currency.USD, BillingPeriod.NO_BILLING_PERIOD, billCycleDay, "Test Event 1", totalOrdering, type, DateTimeZone.UTC); } private MockPlanPhase createMockMonthlyPlanPhase(@Nullable final BigDecimal recurringRate, final BigDecimal fixedRate, final PhaseType phaseType) { return new MockPlanPhase(new MockInternationalPrice(new DefaultPrice(recurringRate, Currency.USD)), new MockInternationalPrice(new DefaultPrice(fixedRate, Currency.USD)), BillingPeriod.MONTHLY, phaseType); } private SubscriptionBase subscription(final UUID id) { final SubscriptionBase subscription = Mockito.mock(SubscriptionBase.class); Mockito.when(subscription.getId()).thenReturn(id); return subscription; } }
package org.ongawa.peru.chlorination.gui.manage; import java.awt.Desktop; import java.io.File; import java.io.IOException; import java.net.URL; import java.sql.Timestamp; import java.util.ArrayList; import java.util.Locale; import java.util.ResourceBundle; import javax.management.loading.ClassLoaderRepository; import org.ongawa.peru.chlorination.HelpStage; import org.ongawa.peru.chlorination.MainApp; import org.ongawa.peru.chlorination.gui.ClAlert; import org.ongawa.peru.chlorination.logic.DataCalculator; import org.ongawa.peru.chlorination.logic.DataLoader; import org.ongawa.peru.chlorination.logic.DataValidator; import org.ongawa.peru.chlorination.modules.reports.ManagementReport; import org.ongawa.peru.chlorination.persistence.DataSourceFactory; import org.ongawa.peru.chlorination.persistence.IDataSource; import org.ongawa.peru.chlorination.persistence.elements.ChlorineCalculation; import org.ongawa.peru.chlorination.persistence.elements.Community; import org.ongawa.peru.chlorination.persistence.elements.SubBasin; import org.ongawa.peru.chlorination.persistence.elements.WaterSystem; import com.itextpdf.text.DocumentException; import com.sun.javafx.image.impl.ByteIndexed.Getter; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.fxml.Initializable; import javafx.scene.Parent; import javafx.scene.Scene; import javafx.scene.control.Button; import javafx.scene.control.Label; import javafx.scene.control.TextArea; import javafx.scene.control.TextField; import javafx.stage.FileChooser; import javafx.stage.Stage; import javafx.event.ActionEvent; public class ChlorinationWindow implements Initializable { /* User inputs */ @FXML private TextField naturalCaudal; @FXML private TextField chlorableCaudal; @FXML private TextField clPurity; @FXML private TextField tankVolume; @FXML private TextField rechargeTime; @FXML private TextField dripTime; @FXML private TextField clDemand; @FXML private TextField clPrice; @FXML private Button saveButton; @FXML private Button printButton; /* Results */ @FXML private Label clKgQuin; @FXML private Label clKgMonth; @FXML private Label costQuin; @FXML private Label costMonth; @FXML private Label dripMin; @FXML private Label dripMlMin; private String basin; private String town; private String systemName; private double cost; private int familiesCount; private int inhabitants; private WaterSystem waterSystem; private ChlorineCalculation chlorineCalculation; @Override public void initialize(URL location, ResourceBundle resources) { try { // Load the data from a previous calculation, if possible IDataSource ds = DataSourceFactory.getInstance().getDefaultDataSource(); this.waterSystem = DataLoader.getDataLoader().getSelectedWaterSystem(); this.familiesCount = this.waterSystem.getFamiliesNum(); this.inhabitants = this.waterSystem.getPopulation(); ChlorineCalculation lastCalculation = ds.getLastChlorineCalculation(waterSystem); if (lastCalculation != null) { this.naturalCaudal.textProperty().set(String.format("%1$,.2f", lastCalculation.getNaturalFlow())); this.chlorableCaudal.textProperty().set(String.format("%1$,.2f", lastCalculation.getChlorinatedFlow())); this.clPurity.textProperty().set(String.format("%1$,.2f", lastCalculation.getChlorinePureness())); this.tankVolume.textProperty().set(String.format("%1$,.2f", lastCalculation.getTankVolume())); this.rechargeTime.textProperty().set(String.format("%1$,.2f", lastCalculation.getReloadTime())); this.dripTime.textProperty().set(String.format("%1$,.2f", lastCalculation.getDrippingHoursPerDay())); this.clDemand.textProperty().set(String.format("%1$,.2f", lastCalculation.getChlorineDemand())); this.clPrice.textProperty().set(String.format("%1$,.2f", lastCalculation.getChlorinePrice())); } } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } this.naturalCaudal.textProperty().addListener(new ChangeListener<String>() { @Override public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) { System.out.println("Old: " + oldValue); System.out.println("New" + newValue); } }); this.chlorableCaudal.textProperty().addListener(new ChangeListener<String>() { @Override public void changed(ObservableValue<? extends String> observable, String oldValue, String newValue) { // I can only chlorate a caudal smaller than the natural one. if (Double.valueOf(naturalCaudal.textProperty().get()) < Double.valueOf(newValue)) { chlorableCaudal.setStyle("-fx-text-fill: red;"); // TODO: Add a text alert } else { chlorableCaudal.setStyle("-fx-text-fill: black;"); } } }); } public String getBasin() { return basin; } public void setBasin(String basin) { this.basin = basin; } public String getTown() { return town; } public void setTown(String town) { this.town = town; } public void setWaterSystem(WaterSystem ws) { this.waterSystem = ws; } public String getSystemName() { return systemName; } public void setSystemName(String systemName) { this.systemName = systemName; } public TextField getNaturalCaudal() { return naturalCaudal; } public void setNaturalCaudal(TextField naturalCaudal) { this.naturalCaudal = naturalCaudal; } public void setFamiliesCount(int familiesCount) { this.familiesCount = familiesCount; } public int getFamiliesCount() { return this.familiesCount; } public int getInhabitants() { return inhabitants; } public void setInhabitants(int inhabitants) { this.inhabitants = inhabitants; } public void triggerInfo(ActionEvent event) throws Exception { String fxmlFile = "/fxml/helps/" + ((Button) event.getTarget()).getId() + ".fxml"; HelpStage help = new HelpStage(fxmlFile); // Create the loader and get the root node from the .fxml file // describing the scene FXMLLoader loader = new FXMLLoader(); Parent rootNode = (Parent) loader.load(getClass().getResourceAsStream(fxmlFile)); // Create the scene (maybe get the size from the stage? // Only after the .show(): stage.getWidth() Scene scene = new Scene(rootNode, 700, 600); scene.getStylesheets().add("/styles/styles.css"); // Set max size // help.setMaxHeight(700); help.setMaxWidth(1000); help.setTitle("Ayuda"); help.setScene(scene); help.show(); } /* General methods */ /** * Checks the values, and shows a popup is the data in the fields is not * valid. * * @return */ public String isDataValid() { ArrayList<String> fieldValues = new ArrayList<String>(); fieldValues.add(this.naturalCaudal.getText()); fieldValues.add(this.chlorableCaudal.getText()); fieldValues.add(this.clPurity.getText()); fieldValues.add(this.tankVolume.getText()); fieldValues.add(this.rechargeTime.getText()); fieldValues.add(this.dripTime.getText()); fieldValues.add(this.clDemand.getText()); fieldValues.add(this.clPrice.getText()); String formatError = DataValidator.checkChlorinationData(fieldValues); if (formatError.length() > 1) return formatError; String errorString = DataValidator.checkCaud(Double.valueOf(this.naturalCaudal.getText())); if (errorString.length() < 1) return errorString; errorString = DataValidator.checkCaud(Double.valueOf(this.chlorableCaudal.getText())); if (errorString.length() < 1) return errorString; errorString = DataValidator.checkHoraGote(Double.valueOf(this.dripTime.getText())); if (errorString.length() < 1) return errorString; errorString = DataValidator.checkTiemRecar(Double.valueOf(this.rechargeTime.getText())); if (errorString.length() < 1) return errorString; errorString = DataValidator.checkPure(Double.valueOf(this.clPurity.getText())); if (errorString.length() < 1) return errorString; errorString = DataValidator.checkPrecClor(Double.valueOf(this.clPrice.getText())); if (errorString.length() < 1) return errorString; return ""; } public void triggerCalculation() { String errorMessage = isDataValid(); if (errorMessage.length() < 1) { double[] clResults = DataCalculator.chlorination(this.chlorableCaudal.getText(), this.clPurity.getText(), this.tankVolume.getText(), this.rechargeTime.getText(), this.dripTime.getText(), this.clDemand.getText(), this.clPrice.getText()); this.clKgQuin.setText(String.format("%1$,.2f", clResults[1]) + " kg/periodo"); // TODO: Change the string to trecarga this.clKgMonth.setText(String.format("%1$,.2f", clResults[2]) + " kg/mes"); this.costQuin.setText(String.format("%1$,.2f", clResults[6]) + " soles/periodo"); this.costMonth.setText(String.format("%1$,.2f", clResults[7]) + " soles/mes"); this.dripMin.setText(String.format("%1$,.2f", clResults[5]) + " gotas/min"); this.dripMlMin.setText(String.format("%1$,.2f", clResults[4]) + " ml/min"); // Create the calculation Timestamp now = new Timestamp(System.currentTimeMillis()); this.chlorineCalculation = new ChlorineCalculation(now, this.waterSystem); // Set type as Solid for now this.chlorineCalculation.setChlorineType("Polvo"); // TODO: Put this on a selector. // Set number of families this.chlorineCalculation.setFamiliesNum(this.familiesCount); this.chlorineCalculation.setPopulation(this.inhabitants); this.chlorineCalculation.setNaturalFlow(Double.valueOf(this.naturalCaudal.getText())); this.chlorineCalculation.setChlorinatedFlow(Double.valueOf(this.chlorableCaudal.getText())); this.chlorineCalculation.setChlorinePureness(Double.valueOf(this.clPurity.getText())); this.chlorineCalculation.setTankVolume(Double.valueOf(this.tankVolume.getText())); this.chlorineCalculation.setReloadTime(Double.valueOf(this.rechargeTime.getText())); this.chlorineCalculation.setDrippingHoursPerDay(Double.valueOf(this.dripTime.getText())); System.out.println(Double.valueOf(this.clDemand.getText())); this.chlorineCalculation.setChlorineDemand(Double.valueOf(this.clDemand.getText())); this.chlorineCalculation.setChlorineDosePerFortnight(clResults[1]); this.chlorineCalculation.setChlorineDosePerMonth(clResults[2]); this.chlorineCalculation.setDrippingFlowInMl(clResults[4]); this.chlorineCalculation.setDrippingFlowInDrops(clResults[5]); this.chlorineCalculation.setChlorinePrice(Double.valueOf(this.clPrice.getText())); this.chlorineCalculation.setDate(now); this.chlorineCalculation.setChlorinationCostPeriod(clResults[6]); this.chlorineCalculation.setChlorinationCostMonth(clResults[7]); // Enable save this.saveButton.setDisable(false); } else { ClAlert alert = new ClAlert(errorMessage); try { alert.show(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } public void triggerSave() { try { IDataSource ds = DataSourceFactory.getInstance().getDefaultDataSource(); ds.addChlorineCalculation(chlorineCalculation); // Enable the printing this.printButton.setDisable(false); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public void triggerPrint() { // TODO: Print the results Stage stage = new Stage(); FileChooser fileChooser = new FileChooser(); File file = fileChooser.showSaveDialog(stage); if (file != null) { try { ManagementReport mreport = new ManagementReport(this.waterSystem, file, new Locale("es", "ES"), ""); mreport.createReport(); // Open the file with the default editor Thread t = new Thread(new Runnable() { @Override public void run() { try { Desktop.getDesktop().open(file); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }); t.start(); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | IOException | DocumentException e) { // TODO Auto-generated catch block e.printStackTrace(); } } } public void triggerBack() { // Add future Scene current = MainApp.getStage().getScene(); MainApp.pushFuture(this.getClass().getSimpleName(), current); Scene scene = MainApp.popHistory(); if (scene != null) MainApp.getStage().setScene(scene); } }
/** * Copyright 2014-2020 IBM Corp. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package com.ibm.datapower.er; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.PrintWriter; import java.util.HashMap; import java.util.zip.GZIPInputStream; import javax.xml.parsers.ParserConfigurationException; import org.apache.james.mime4j.parser.Field; import org.apache.james.mime4j.parser.MimeTokenStream; import org.xml.sax.SAXException; /** * Processor Engine for Error Reports. Allows easy association of customized * processors for each and all parts of an Error Report. * * <p>Sample usage - no custom processors, does nothing. * <pre> * ReportProcessor p = new ReportProcessor(); * p.load("errorreport.txt.gz"); * p.setOutputStream(System.out); * p.run(); * </pre> * * <p>Another sample - HTML processor set as default * <pre> * ReportProcessor p = new ReportProcessor(); * p.load("errorreport.txt.gz"); * p.addPartsProcessor(new PartsProcessorsHTML(),null); * p.setOutputStream(System.out); * p.run(); * </pre> * */ public class ReportProcessor { /** * Installs a custom processor for one or all types of MIME parts. * * If a null Content-ID is provided, the custom processor will be set * as the default processor. If a given part does not contain an associated * custom processor, the default processor is used. * * @param proc object instance to process MIME part(s) * @param cid MIME part Content-ID to be associate (or null) */ @SuppressWarnings("unchecked") void addPartsProcessor(IPartsProcessor proc, String cid) { if (cid == null || cid.isEmpty()) { mDefaultPartProcessor = proc; } else { mPartProcessors.put(cid,proc); } } /** * Installs a custom preprocessor for the entire report * @param proc object instance to process MIME part(s) * @param cid MIME part Content-ID to be associate (or null) */ void setPrePostProcessor(PrePostReportProcessor ppproc) { mPrePostProcessor = ppproc; } /** * Prepares the Error Report as an InputStream for processing * @param filename * @throws IOException */ void load(String filename) throws IOException { mReport = new FileInputStream(filename); if (filename.toLowerCase().endsWith(".gz")) mReport = new GZIPInputStream(mReport); } /** * Sets the output stream that will contain the processed Error Report. * * @param stream */ void setOutputStream(OutputStream stream) { mOutputStream = stream; } /** * Sets the output stream that will contain the processed Error Report. * * @param stream */ void setFilename(String filename) { mFilename = filename; } /** * Kicks off the Processor Engine. * * <p>Internal details: * The anatomy of a DataPower Error Report: * <pre> * T_START_MESSAGE -> Stream reading started * * T_START_HEADER -> Message part header started * T_FIELD (*) -> top part fields (for example MIME-Version) * T_END_HEADER * * T_START_MULTIPART -> A multipart body is being parsed * T_PREAMBLE -> Preamble Contents (body of the first part, before first boundary - "DataPower Error Report for domain default" * * T_START_BODYPART -> Beginning of a body part * T_START_HEADER -> Message part header started * T_FIELD (*) -> multipart part fields (Content-Type, Content-transfer-encoding, Content-ID) * T_END_HEADER * T_BODY -> Part itself * T_END_BODYPART * * T_EPILOGUE -> Multipart Epilogue (getInputStream) (empty in ErrorReport) * * T_END_MULTIPART * * T_END_MESSAGE -> Stream at the end of the message * T_END_OF_STREAM -> End of File * </pre> * @throws Exception * * @see http://james.apache.org/mime4j/apidocs/org/apache/james/mime4j/parser/MimeTokenStream.html */ @SuppressWarnings("unchecked") void run() throws Exception { int state; MimeTokenStream mtstream = null; HashMap headers = null; Field field = null; IPartInfo partInfo = null; PrintWriter sysout = null; //do the preprocessing mOutputStream = mPrePostProcessor.getOutputStreamFromFilename(mFilename); sysout = new PrintWriter(mOutputStream); mPrePostProcessor.preProcess(); // creates a MIME4J stream to parse all parts into tokens mtstream = new MimeTokenStream(); mtstream.parse(mReport); // run through parsed tokens for (state = mtstream.getState(); state != MimeTokenStream.T_END_OF_STREAM; state = mtstream.next()) { switch (state) { case MimeTokenStream.T_BODY: partInfo = new ReportProcessorPartInfo(IPartInfo.MIME_BODYPART, headers, mtstream.getInputStream(), mERDetails); process(partInfo,sysout); headers = null; partInfo = null; break; case MimeTokenStream.T_FIELD: field = mtstream.getField(); headers.put(field.getName(),field.getBody()); field = null; break; case MimeTokenStream.T_START_HEADER: headers = new HashMap(); break; case MimeTokenStream.T_END_HEADER: break; case MimeTokenStream.T_PREAMBLE: partInfo = new ReportProcessorPartInfo(IPartInfo.MIME_PREAMBLE, headers, mtstream.getInputStream(), mERDetails); process(partInfo,sysout); headers = null; partInfo = null; break; case MimeTokenStream.T_EPILOGUE: partInfo = new ReportProcessorPartInfo(IPartInfo.MIME_EPILOGUE, headers, mtstream.getInputStream(), mERDetails); process(partInfo,sysout); headers = null; partInfo = null; break; default: } } mPrePostProcessor.postProcess(); } /** * Locates and invokes the proper IPartsProcessor to taken on processing * of this MIME part * @param mimePart MIME document part information * @param writer output print writer * @throws IOException failure to read MIME part or write to output writer. * @throws ParserConfigurationException * @throws SAXException */ private void process(IPartInfo mimePart, PrintWriter writer) throws IOException, SAXException, ParserConfigurationException { String contentID = mimePart.getContentID(); IPartsProcessor partProcessor = null; if (contentID != null){ if(contentID.equals("<FirmwareVersion@datapower.ibm.com>")){ FirmwareInputStream fis = new FirmwareInputStream(mimePart.getBodyStream(), mERDetails); mimePart.setInputStream(fis); } partProcessor = (IPartsProcessor) mPartProcessors.get(contentID); } if (partProcessor == null){ partProcessor = mDefaultPartProcessor; } try { partProcessor.process(mimePart,writer); } finally { writer.flush(); } } public ReportProcessor() { // initialize mReport = null; mPartProcessors = new HashMap(); mERDetails = new ErrorReportDetails(); // instantiate the default parts processor, does nothing mDefaultPartProcessor = new IPartsProcessor() { public void process(IPartInfo mimePart, PrintWriter writer) throws IOException { String contentID = mimePart.getContentID(); if(contentID != null){ if(contentID.equals("<FirmwareVersion@datapower.ibm.com>")){ InputStream is = mimePart.getBodyStream(); byte[] b = new byte[4096]; is.read(b); } } } }; } private InputStream mReport = null; private IPartsProcessor mDefaultPartProcessor = null; private ErrorReportDetails mERDetails = null; private HashMap mPartProcessors; private OutputStream mOutputStream = null; private String mFilename = null; private PrePostReportProcessor mPrePostProcessor = null; }
package cl.uchile.dcc.blabel.lean; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.concurrent.Callable; import java.util.logging.Logger; import org.semanticweb.yars.nx.BNode; import org.semanticweb.yars.nx.Node; import org.semanticweb.yars.nx.NodeComparator; import org.semanticweb.yars.nx.Nodes; import org.semanticweb.yars.stats.Count; import cl.uchile.dcc.blabel.lean.GraphLeaning.GraphLeaningResult; import cl.uchile.dcc.blabel.lean.util.Bindings; import cl.uchile.dcc.blabel.lean.util.Edge; import cl.uchile.dcc.blabel.lean.util.NodeBindCount; import cl.uchile.dcc.blabel.lean.util.NodeBindCountPair; import cl.uchile.dcc.blabel.lean.util.NodeEdges; import cl.uchile.dcc.blabel.lean.util.PatternSelectivityEstimate; import cl.uchile.dcc.blabel.lean.util.VariableSelectivityEstimate; public abstract class GraphLeaning implements Callable<GraphLeaningResult>{ protected final Collection<Node[]> data; // filtering trivial non-lean blank nodes protected TreeSet<Node[]> filteredData; // trivial non-lean bnode map protected HashMap<BNode,Node> nlbnodeMap; // data where subject and object are bnodes protected Collection<Node[]> bnodeData; // query bnodes (connected, non ground) protected Set<BNode> queryBnodes; public static final Logger LOG = Logger.getLogger(GraphLeaning.class.getName()); // maps node to all incoming and outgoing edges protected Map<Node,NodeEdges> nodeToAllEdges; // maps incoming/outgoing edges to node protected Map<Edge,TreeSet<NodeEdges>> anyEdgeToNodes; // maps node to incoming and outgoing // ground edges protected Map<Node,NodeEdges> nodeToGroundEdges; // maps incoming/outgoing ground edge to node protected Map<Edge,TreeSet<NodeEdges>> groundEdgeToNodes; // all blank nodes protected TreeSet<BNode> bnodes; // based on ground edges, bnodes can only // be mapped to the nodes here // (if bnode not in map, no restriction) protected Map<BNode,Set<Node>> candidates; // P-O-S index protected Map<Node,Map<Node,Set<Node>>> posIndex; // P-S-O index protected Map<Node,Map<Node,Set<Node>>> psoIndex; // blank nodes that can only be // mapped to themselves based on ground edges protected TreeSet<BNode> fixedBnodes; // cardinalities for predicates protected Count<Node> predCard; // number of joins performed [for statistics] protected long joins = 0; // dummy blank node used for // ground edges where value is a // blank node public static final BNode DUMMY = new BNode("dummy"); // randomise bindings [ONLY USEFUL FOR BENCHMARKING] protected boolean randomiseBindings = false; public GraphLeaning(Collection<Node[]> data){ this(data,false); } /** * Randomising bindings only useful for benchmarking. * By default, algorithm chooses best bindings to try first. * * @param data * @param randomiseBindings set to true to randomise order bindings are visited */ public GraphLeaning(Collection<Node[]> data, boolean randomiseBindings){ this.data = data; this.randomiseBindings = randomiseBindings; bnodeData = new ArrayList<Node[]>(); queryBnodes = new HashSet<BNode>(); fixedBnodes = new TreeSet<BNode>(); } public GraphLeaningResult call() throws InterruptedException { return lean(); } private GraphLeaningResult lean() throws InterruptedException { // first we recursively remove all blank nodes where there // is another term with a superset of (exact) edges Collection<Node[]> inputData = data; nlbnodeMap = new HashMap<BNode,Node>(); int prevNlbnodes = 0; do{ // previous number of non lean bnodes prevNlbnodes = nlbnodeMap.size(); // index exact edges indexAllEdges(inputData); if(bnodes.size()==0){ // if there are no blank nodes (remaining) // we can return the lean graph GraphLeaningResult glr = new GraphLeaningResult(inputData); glr.setCoreMap(nlbnodeMap); return glr; } // removes blank nodes whose edges are a subset // (or equal, if multiple) another term filterTrivialNonLeanBnodes(inputData); inputData = filteredData; } while(prevNlbnodes!=nlbnodeMap.size()); // afterwards, for the remaining blank nodes // we will build a set of candidates based on // ground information; at the same time, we // will fix blank nodes with unique ground // information. int prevGroundBnodes = 0; do{ // we will do this iteratively since fixing // a blank node may lead to another blank node // being fixed prevGroundBnodes = fixedBnodes.size(); indexGroundEdges(filteredData); findGroundCandidates(); } while(fixedBnodes.size()!=prevGroundBnodes); // this will be the witness homomorphism/mapping HashMap<BNode,Node> coreMap = new HashMap<BNode,Node>(); if(!fixedBnodes.isEmpty()){ // we can create a map from fixed bnodes to // themselves to start with for(BNode b:fixedBnodes){ coreMap.put(b, b); } // we ignore the non-lean bnodes for the moment // since we will work with the filtered data } //fixedBnodes are like constants if(fixedBnodes.size() == bnodes.size()){ // if all bnodes are fixed due to having unique // ground information ... nothing to lean GraphLeaningResult glr = new GraphLeaningResult(filteredData); glr.setCoreMap(coreMap); if(!nlbnodeMap.isEmpty()){ // just add the non-lean blank nodes to the mapping // if any and map bnodes to themselves glr.coreMap.putAll(nlbnodeMap); } return glr; } // all we are left to take care of now are the // remaining connected blank nodes that are // not fixed indexBNodeGraph(); GraphLeaningResult glr = null; // glr.setCoreMap(coreMap); // we have connected blank nodes if(!bnodeData.isEmpty()){ // create a query (triples with connected non-fixed bnodes // created by indexBnodeGraph()) ArrayList<Node[]> query = new ArrayList<Node[]>(bnodeData); // and order by selectivity estimates ArrayList<Node[]> orderedQuery = orderPatterns(query); // now find a solution (e.g., using BFS or DFS) for // the query against the filtered graph GraphLeaningResult glrConnected = getCore(orderedQuery,coreMap); if(glrConnected.coreMap==null){ // if there's no proper homomorphism // filtered input is lean glr = new GraphLeaningResult(filteredData); // map blank nodes to themselves for(BNode qb:queryBnodes){ coreMap.put(qb, qb); } glr.setCoreMap(coreMap); } else{ // otherwise we found a proper homomoprhism // so let's map the data and set the mapping glr = new GraphLeaningResult(glrConnected.leanData); glr.setCoreMap(glrConnected.coreMap); } // add some other statistics if(glrConnected!=null){ glr.joins = glrConnected.joins; glr.depth = glrConnected.depth; glr.solCount = glrConnected.solCount; } } else{ // actually we should have processed all unconnected // bnodes by now ... so something is wrong LOG.warning("No unconnected bnodes but some bnodes not fixed or non-lean?"); } // we could ignore the trivial non-lean bnodes until now, // but let's add them back in at the end // to complete the witness mapping if(!nlbnodeMap.isEmpty()){ if(glr.coreMap==null) glr.coreMap = nlbnodeMap; else{ glr.coreMap.putAll(nlbnodeMap); // may need to compute the transitive closure again // since if a maps to b in non-lean bnodes and b // maps to c in connected homomorphism, we would like // to map a to c glr.coreMap = transitiveClosure(glr.coreMap); } } return glr; } public static TreeSet<Node[]> mapData(Collection<Node[]> data, Map<BNode,Node> map) throws InterruptedException{ TreeSet<Node[]> leanData = new TreeSet<Node[]>(NodeComparator.NC); boolean identity = isIdentityMap(map); for(Node[] triple:data){ if (Thread.interrupted()) { throw new InterruptedException(); } Node[] leanTriple = new Node[triple.length]; System.arraycopy(triple, 0, leanTriple, 0, triple.length); if(!identity){ leanTriple[0] = getMappedNode(triple[0], map); leanTriple[2] = getMappedNode(triple[2], map); } leanData.add(leanTriple); } return leanData; } private static boolean isIdentityMap(Map<BNode, Node> map) { if(map==null) return true; for(Map.Entry<BNode, Node> e:map.entrySet()){ if(!e.getKey().equals(e.getValue())) return false; } return true; } private static Node getMappedNode(Node n, Map<BNode,Node> homo){ if(n instanceof BNode){ Node m = homo.get((BNode)n); if(m==null){ // an unconnected blank node // we will deal with this at // the end return n; } return m; } return n; } private void indexBNodeGraph() throws InterruptedException { predCard = new Count<Node>(); posIndex = new HashMap<Node,Map<Node,Set<Node>>>(); psoIndex = new HashMap<Node,Map<Node,Set<Node>>>(); queryBnodes = new TreeSet<BNode>(); for(Node[] triple:filteredData){ if (Thread.interrupted()) { throw new InterruptedException(); } if(triple[0] instanceof BNode && !fixedBnodes.contains(triple[0]) && triple[2] instanceof BNode && !fixedBnodes.contains(triple[2])){ bnodeData.add(triple); queryBnodes.add((BNode)triple[0]); queryBnodes.add((BNode)triple[2]); } indexBNodeEdge(new Node[]{triple[1],triple[2],triple[0]},posIndex); indexBNodeEdge(new Node[]{triple[1],triple[0],triple[2]},psoIndex); predCard.add(triple[1]); } } private void filterTrivialNonLeanBnodes(Collection<Node[]> data) throws InterruptedException{ // this stores blank nodes that have the same edge set // only necessary to compute the mapping HashMap<BNode,TreeSet<BNode>> partition = new HashMap<BNode,TreeSet<BNode>>(); // we will map bnodes to the nodes that witness // their non-leanness HashMap<BNode,Node> map = new HashMap<BNode,Node>(); for(BNode bnode:bnodes){ if (Thread.interrupted()) { throw new InterruptedException(); } TreeSet<BNode> part = partition.get(bnode); if(part!=null){ // we already found a blank node with an // equal set of edges continue; } NodeEdges edges = nodeToAllEdges.get(bnode); if(edges!=null){ // find most selective edge TreeSet<NodeEdges> min = null; for(Edge e: edges.getEdges()){ TreeSet<NodeEdges> ne = anyEdgeToNodes.get(e); if(min == null || ne.size() < min.size()){ min = ne; } if(ne.size()==1){ // node can only be mapped to itself break; } } // nodes other than itself that has a superset of edges if(min.size()!=1){ // check each to make sure // blank node has subset of edges for(NodeEdges ne: min){ if(!ne.getNode().equals(bnode)){ ArrayList<TreeSet<Edge>> diff = diff(edges.getEdges(),ne.getEdges()); if(diff.get(0).isEmpty()){ // has a subset of edges: ne covers edges if(diff.get(1).isEmpty()){ // the sets are equal: ne equals edges // if the node in question is // IRI or seen blank node, // current blank node is redundant if(!(ne.getNode() instanceof BNode)){ map.put(bnode, ne.getNode()); break; } else { // node is a blank node, we add it // to the partition of bnodes with // equal edges part = new TreeSet<BNode>(); part.add(bnode); part.add((BNode)ne.getNode()); partition.put(bnode,part); partition.put((BNode)ne.getNode(),part); } } else { // ne proper superset of edges // current blank node is redundant map.put(bnode, ne.getNode()); break; } } } } } } } // for all blank nodes with same edges // chose first blank node to remain // but only if not in nonLeanBnodes // (the first will also be the first // encountered above ... we have the same // iteration order ... so we know it // will be mapped if possible) for(TreeSet<BNode> parts:partition.values()){ BNode first = parts.pollFirst(); Node mapped = map.get(first); if(mapped==null){ mapped = first; } for(BNode rest : parts){ map.put(rest,mapped); } } // now we compute the transitive closure of the // map to make sure we map to the final value // and not one that is non lean // // note we should not have cycles since all bnodes // with equal edges have been mapped to a single value // (... if we have cycles, this will loop forever) if(!map.isEmpty()){ nlbnodeMap.putAll(map); nlbnodeMap = transitiveClosure(nlbnodeMap); } bnodes.removeAll(nlbnodeMap.keySet()); filteredData = new TreeSet<Node[]>(NodeComparator.NC); for(Node[] triple:data){ if((!(triple[0] instanceof BNode) || !nlbnodeMap.containsKey(triple[0])) && (!(triple[2] instanceof BNode) || !nlbnodeMap.containsKey(triple[2]))){ filteredData.add(triple); } } } /** * If a maps to b and b maps to c, output will map a to c. * * NOTE: ASSUMES NO CYCLES, OTHERWISE THIS WILL NOT TERMINATE CORRECTLY!!!! * * @param map * @return * @throws InterruptedException */ private static HashMap<BNode,Node> transitiveClosure(HashMap<BNode,Node> map) throws InterruptedException{ boolean changed; int iters = 0; do{ if (Thread.interrupted()) { throw new InterruptedException(); } changed = false; HashMap<BNode,Node> nextMap = new HashMap<BNode,Node>(); for(Map.Entry<BNode, Node> m: map.entrySet()){ if(m.getValue() instanceof BNode){ BNode v = (BNode) m.getValue(); Node mv = map.get(v); if(mv!=null && !mv.equals(m.getValue())){ nextMap.put(m.getKey(), mv); changed = true; continue; } } nextMap.put(m.getKey(),m.getValue()); } map = nextMap; // without cycles, this should never happen iters++; if(iters>map.size()){ LOG.warning("Found a map with cycles!!! "+map); return map; } } while(changed); return map; } private void indexAllEdges(Collection<Node[]> data) throws InterruptedException{ nodeToAllEdges = new HashMap<Node,NodeEdges>(); anyEdgeToNodes = new HashMap<Edge,TreeSet<NodeEdges>>(); bnodes = new TreeSet<BNode>(); for(Node[] triple:data){ if (Thread.interrupted()) { throw new InterruptedException(); } if(triple.length<3){ LOG.warning("Not a triple: "+Nodes.toN3(triple)); } else { if(triple[0] instanceof BNode){ bnodes.add((BNode) triple[0]); } Edge inEdge = new Edge(triple[1],triple[0],false); indexEdge(triple[2],inEdge,nodeToAllEdges,anyEdgeToNodes); if(triple[2] instanceof BNode){ bnodes.add((BNode) triple[2]); } Edge outEdge = new Edge(triple[1],triple[2],true); indexEdge(triple[0],outEdge,nodeToAllEdges,anyEdgeToNodes); } } } private void indexGroundEdges(Collection<Node[]> data) throws InterruptedException{ nodeToGroundEdges = new HashMap<Node,NodeEdges>(); groundEdgeToNodes = new HashMap<Edge,TreeSet<NodeEdges>>(); for(Node[] triple: data){ if (Thread.interrupted()) { throw new InterruptedException(); } if(triple.length<3){ LOG.warning("Not a triple: "+Nodes.toN3(triple)); } else { if(!(triple[0] instanceof BNode && !fixedBnodes.contains(triple[0]))){ // term triple[0] is ground Edge edge = new Edge(triple[1],triple[0],false); indexEdge(triple[2],edge,nodeToGroundEdges,groundEdgeToNodes); } // will be used to check blank nodes // with unique set of predicates Edge dummyOut = new Edge(triple[1],DUMMY,false); indexEdge(triple[2],dummyOut,nodeToGroundEdges,groundEdgeToNodes); if(!(triple[2] instanceof BNode && !fixedBnodes.contains(triple[2]))){ // term triple[2] is ground Edge edge = new Edge(triple[1],triple[2],true); indexEdge(triple[0],edge,nodeToGroundEdges,groundEdgeToNodes); } Edge dummyIn = new Edge(triple[1],DUMMY,true); indexEdge(triple[0],dummyIn,nodeToGroundEdges,groundEdgeToNodes); } } } private void findGroundCandidates() throws InterruptedException{ candidates = new HashMap<BNode,Set<Node>>(); for(BNode bnode:bnodes){ if (Thread.interrupted()) { throw new InterruptedException(); } if(!fixedBnodes.contains(bnode)){ NodeEdges edges = nodeToGroundEdges.get(bnode); if(edges!=null){ // find most selective edge TreeSet<NodeEdges> min = null; for(Edge e: edges.getEdges()){ TreeSet<NodeEdges> ne = groundEdgeToNodes.get(e); if(min == null || ne.size() < min.size()){ min = ne; } if(ne.size()==1){ // node can only be mapped to itself break; } } // nodes other than itself to which // the bnode can be mapped TreeSet<Node> cans = new TreeSet<Node>(); if(min.size()!=1){ // check each to make sure // blank node has subset of edges for(NodeEdges ne: min){ if(!ne.getNode().equals(bnode)){ ArrayList<TreeSet<Edge>> diff = diff(edges.getEdges(),ne.getEdges()); if(diff.get(0).isEmpty()){ // all edges contained in ne cans.add(ne.getNode()); } } } } if(cans.isEmpty()){ fixedBnodes.add(bnode); } // add node itself back in cans.add(bnode); candidates.put(bnode,cans); } } } } /** * Will return a list of two sets where the first set has the elements * of a not in b, and the second set has the elements of b not in a. * * Assumes sorting is equal (and no dupes). * * @param a * @param b * @return */ public static <E extends Comparable<? super E>> ArrayList<TreeSet<E>> diff(TreeSet<E> a, TreeSet<E> b){ ArrayList<TreeSet<E>> diff = new ArrayList<TreeSet<E>>(); diff.add(new TreeSet<E>()); diff.add(new TreeSet<E>()); Iterator<E> aiter = a.iterator(); Iterator<E> biter = b.iterator(); E anext = null; E bnext = null; if(aiter.hasNext() && biter.hasNext()){ anext = aiter.next(); bnext = biter.next(); while(aiter.hasNext() && biter.hasNext()){ int comp = anext.compareTo(bnext); if(comp<0){ diff.get(0).add(anext); anext = aiter.next(); } else if(comp>0){ diff.get(1).add(bnext); bnext = biter.next(); } else{ anext = aiter.next(); bnext = biter.next(); } } int comp = anext.compareTo(bnext); // we are at the end of one set if(comp<0){ // if a is less, we can add a diff.get(0).add(anext); // we may be able to add b if a // is finished or a does not contain b later if(!aiter.hasNext() || !a.contains(bnext)) diff.get(1).add(bnext); } else if(comp>0){ // likewise swapping b and a diff.get(1).add(bnext); if(!biter.hasNext() || !b.contains(anext)) diff.get(0).add(anext); } } // we need to add all the elements of the // remaining set if(aiter.hasNext()){ while(aiter.hasNext()){ diff.get(0).add(aiter.next()); } // more efficient to try remove once // than do linear checks if(bnext!=null) diff.get(0).remove(bnext); } if(biter.hasNext()){ while(biter.hasNext()){ diff.get(1).add(biter.next()); } // more efficient to try remove once // than do linear checks if(anext!=null) diff.get(1).remove(anext); } return diff; } protected Bindings getBindings(Node[] current, HashMap<BNode,Node> partialSol) throws InterruptedException{ return getBindings(current, partialSol, null); } protected Bindings getBindings(Node[] current, HashMap<BNode,Node> partialSol, Count<Node> timesBound) throws InterruptedException{ if (Thread.interrupted()) { throw new InterruptedException(); } Node[] bound = new Node[current.length]; System.arraycopy(current, 0, bound, 0, bound.length); bound[0] = partialSol.get(current[0]); bound[2] = partialSol.get(current[2]); // these are the new blank nodes variables that // will be bound ArrayList<BNode> mask = new ArrayList<BNode>(); // bindings in same order as the mask ArrayList<Node[]> bindings = new ArrayList<Node[]>(); if(bound[0]==null && bound[2]==null){ // variable bnode in subject and object boolean sameVars = current[0].equals(current[2]); mask.add((BNode)current[0]); if(!sameVars){ mask.add((BNode)current[2]); } ArrayList<NodeBindCountPair> orderedBindings = new ArrayList<NodeBindCountPair>(); Map<Node,Set<Node>> osEdges = posIndex.get(current[1]); for(Node o : osEdges.keySet()){ // check that o is compatible with ground data if(compatibleWithGroundEdges(current[2],o)){ // number of times o value is already bound int boundO = getIntegerCount(o,timesBound); NodeBindCount nbco = new NodeBindCount(o,boundO,o.equals(current[2])); for(Node s : osEdges.get(o)){ // if o and s blank nodes are the same, binding // must be the same if(!sameVars || s.equals(o)){ // check that s is compatible with ground data if(compatibleWithGroundEdges(current[0],s)){ // number of times s value is already bound int boundS = getIntegerCount(s,timesBound); NodeBindCount nbcs = new NodeBindCount(s,boundS,s.equals(current[0])); orderedBindings.add(new NodeBindCountPair(nbcs,nbco)); } } } } } if(!orderedBindings.isEmpty()){ // will select mappings with ground terms // and then terms most frequently bound // and then non-self matches Collections.sort(orderedBindings); for(NodeBindCountPair nbcp : orderedBindings){ if(sameVars){ bindings.add(new Node[]{nbcp.getSubject().getNode()}); } else{ bindings.add(new Node[]{nbcp.getSubject().getNode(),nbcp.getObject().getNode()}); } } } else{ // dead end binding return new Bindings(mask,null); } } else if(bound[0]!=null && bound[2]!=null){ if(!getVals(bound[1],bound[0],psoIndex).contains(bound[2])){ // dead end binding return new Bindings(mask,null); } bindings.add(new Node[]{}); } else { int b = (bound[0]!=null) ? 0 : 2; int ub = (bound[0]!=null) ? 2 : 0; Map<Node,Map<Node,Set<Node>>> index = (bound[0]!=null) ? psoIndex : posIndex; mask.add((BNode)current[ub]); Set<Node> vals = getVals(bound[1],bound[b],index); ArrayList<NodeBindCount> newBindings = new ArrayList<NodeBindCount>(); if(vals!=null){ for(Node v:vals){ // check that v is compatible with ground data if(compatibleWithGroundEdges(current[ub],v)){ // number of times value is already bound int boundV = getIntegerCount(v,timesBound); NodeBindCount nbc = new NodeBindCount(v,boundV,v.equals(current[ub])); newBindings.add(nbc); } } } if(!newBindings.isEmpty()){ Collections.sort(newBindings); for(NodeBindCount nbc:newBindings){ bindings.add(new Node[]{nbc.getNode()}); } } else { // dead end binding return new Bindings(mask,null); } } // only for benchmark purposes to see // benefit of ordering bindings in DFS // SHOULD NOT BE SET OTHERWISE if(randomiseBindings){ Collections.shuffle(bindings); } return new Bindings(mask,bindings); } private static void indexEdge(Node n, Edge e, Map<Node, NodeEdges> nodeToGroundEdges, Map<Edge, TreeSet<NodeEdges>> groundEdgeToNodes){ NodeEdges edges = nodeToGroundEdges.get(n); if(edges==null){ edges = new NodeEdges(n); nodeToGroundEdges.put(n, edges); } edges.addEdge(e); TreeSet<NodeEdges> nodes = groundEdgeToNodes.get(e); if(nodes==null){ nodes = new TreeSet<NodeEdges>(); groundEdgeToNodes.put(e, nodes); } nodes.add(edges); } /** * * @param edge is POS or PSO order * @param map to index in */ private boolean indexBNodeEdge(Node[] edge, Map<Node,Map<Node,Set<Node>>> map){ Map<Node,Set<Node>> edges = map.get(edge[0]); if(edges==null){ edges = new HashMap<Node,Set<Node>>(); map.put(edge[0], edges); } Set<Node> vals = edges.get(edge[1]); if(vals==null){ vals = new HashSet<Node>(); edges.put(edge[1],vals); } return vals.add(edge[2]); } private ArrayList<Node[]> orderPatterns(ArrayList<Node[]> patterns){ // order patterns by selectivity ArrayList<PatternSelectivityEstimate> ordered = new ArrayList<PatternSelectivityEstimate>(); // keep track of variable selectivities as well Map<BNode,VariableSelectivityEstimate> vestimates = new HashMap<BNode,VariableSelectivityEstimate>(); for(Node[] pattern:patterns){ int pcard = predCard.get(pattern[1]); int scard = getSelectivityEstimate((BNode)pattern[0],pattern[1],psoIndex,vestimates); int ocard = getSelectivityEstimate((BNode)pattern[2],pattern[1],psoIndex,vestimates); ordered.add(new PatternSelectivityEstimate(pattern,scard,pcard,ocard)); } Collections.sort(ordered); // index patterns by variables HashMap<BNode,TreeSet<PatternSelectivityEstimate>> varToPattern = new HashMap<BNode,TreeSet<PatternSelectivityEstimate>>(); for(PatternSelectivityEstimate pse: ordered){ mapVarToPattern((BNode)pse.getPattern()[0],pse,varToPattern); mapVarToPattern((BNode)pse.getPattern()[2],pse,varToPattern); } // select patterns grouped by variables first, // ordered by selectivity second ArrayList<Node[]> rawOrdered = new ArrayList<Node[]>(); TreeSet<VariableSelectivityEstimate> queue = new TreeSet<VariableSelectivityEstimate>(); TreeSet<Node[]> done = new TreeSet<Node[]>(NodeComparator.NC); HashSet<Node> varsDone = new HashSet<Node>(); while(rawOrdered.size()<patterns.size()){ if(queue.isEmpty()){ // get the most selective pattern // not yet added PatternSelectivityEstimate first = null; boolean added = false; do { first = ordered.remove(0); if(added = done.add(first.getPattern())){ rawOrdered.add(first.getPattern()); addNewBNodesFromPattern(first.getPattern(),vestimates,varsDone,queue); } } while(!added); } else{ // get the most selective variable // already seen VariableSelectivityEstimate vse = queue.pollFirst(); TreeSet<PatternSelectivityEstimate> pses = varToPattern.get(vse.getVariable()); for(PatternSelectivityEstimate pse:pses){ // add all the unseen patterns in order // of selectivity for that variable if(done.add(pse.getPattern())){ rawOrdered.add(pse.getPattern()); addNewBNodesFromPattern(pse.getPattern(),vestimates,varsDone,queue); } } } } return rawOrdered; } private void addNewBNodesFromPattern(Node[] pattern, Map<BNode, VariableSelectivityEstimate> vestimates, HashSet<Node> varsDone, TreeSet<VariableSelectivityEstimate> queue) { addNewBNodeFromPattern(pattern[0],vestimates,varsDone,queue); addNewBNodeFromPattern(pattern[2],vestimates,varsDone,queue); } private void addNewBNodeFromPattern(Node node, Map<BNode, VariableSelectivityEstimate> vestimates, HashSet<Node> varsDone, TreeSet<VariableSelectivityEstimate> queue) { if(varsDone.add(node)){ queue.add(vestimates.get(node)); } } private int getSelectivityEstimate(BNode var, Node pred, Map<Node, Map<Node, Set<Node>>> index, Map<BNode, VariableSelectivityEstimate> vestimates) { int card = index.get(pred).size(); Set<Node> cands = candidates.get(var); if(cands != null){ card = Math.min(card,cands.size()); } VariableSelectivityEstimate vse = vestimates.get(var); if(vse==null){ vse = new VariableSelectivityEstimate(var,card); vestimates.put(var, vse); } else{ vse.updateCardinality(card); } return vse.getCard(); } private static boolean mapVarToPattern(BNode node, PatternSelectivityEstimate pse, HashMap<BNode, TreeSet<PatternSelectivityEstimate>> varToPattern) { TreeSet<PatternSelectivityEstimate> ts = varToPattern.get(node); if(ts == null){ ts = new TreeSet<PatternSelectivityEstimate>(); varToPattern.put(node, ts); } return ts.add(pse); } /** * Gets the homomorphism with the fewest blank nodes guaranteed * @param query * @return * @throws Exception */ protected abstract GraphLeaningResult getCore(ArrayList<Node[]> query, HashMap<BNode, Node> coreMap) throws InterruptedException; public static HashSet<BNode> getBNodeBindings(HashMap<BNode, Node> partialSol) { HashSet<BNode> bnodes = new HashSet<BNode>(); for(Node n:partialSol.values()){ if(n instanceof BNode){ bnodes.add((BNode)n); } } return bnodes; } protected boolean compatibleWithGroundEdges(Node var, Node bind){ if(var == bind) return true; Set<Node> cands = candidates.get(var); if(cands==null || cands.contains(bind)){ return true; } return false; } public static int getIntegerCount(Node n, Count<Node> counts){ if(counts==null) return 0; Integer co = counts.get(n); return (co==null) ? 0 : co; } public static <E> E getVals(Node pred, Node node, Map<Node, Map<Node,E>> index) { Map<Node,E> edges = index.get(pred); if(edges!=null){ return edges.get(node); } return null; } protected static String toN3(Collection<Node[]> data){ StringBuilder sb = new StringBuilder(); for(Node[] t:data){ sb.append(Nodes.toN3(t)+"\n"); } return sb.toString(); } public static class GraphLeaningResult{ protected Collection<Node[]> leanData; protected HashMap<BNode,Node> coreMap; protected long joins; protected int depth; protected long solCount; GraphLeaningResult(Collection<Node[]> leanData){ this.leanData = leanData; } public Collection<Node[]> getLeanData(){ return leanData; } public String toString(){ return toN3(leanData); } /** * * @return core solution mapping, or null if nothing changes (only automorphisms found) */ public Map<BNode, Node> getCoreMap() { return coreMap; } public void setCoreMap(HashMap<BNode, Node> coreMap) { this.coreMap = coreMap; } public long getJoins() { return joins; } public void setJoins(long joins) { this.joins = joins; } public int getDepth() { return depth; } public void setDepth(int depth) { this.depth = depth; } public long getSolutionCount() { return solCount; } public void setSolutionCount(long solCount) { this.solCount = solCount; } } }
/* * Copyright (c) 2015 EMC Corporation * All Rights Reserved */ package com.emc.vipr.client.core; import static com.emc.vipr.client.core.util.ResourceUtils.defaultList; import java.net.URI; import java.util.List; import com.emc.storageos.model.BulkIdParam; import com.emc.storageos.model.NamedRelatedResourceRep; import com.emc.storageos.model.SnapshotList; import com.emc.storageos.model.TaskList; import com.emc.storageos.model.block.BlockConsistencyGroupSnapshotCreate; import com.emc.storageos.model.block.BlockSnapshotBulkRep; import com.emc.storageos.model.block.BlockSnapshotRestRep; import com.emc.storageos.model.block.VolumeDeleteTypeEnum; import com.emc.storageos.model.block.VolumeFullCopyCreateParam; import com.emc.storageos.model.block.VolumeSnapshotParam; import com.emc.storageos.model.block.export.ITLRestRep; import com.emc.storageos.model.block.export.ITLRestRepList; import com.emc.vipr.client.Task; import com.emc.vipr.client.Tasks; import com.emc.vipr.client.ViPRCoreClient; import com.emc.vipr.client.core.filters.ResourceFilter; import com.emc.vipr.client.core.impl.PathConstants; import com.emc.vipr.client.impl.RestClient; /** * Block snapshot resources. * <p> * Base URL: <tt>/block/snapshots</tt> */ public class BlockSnapshots extends ProjectResources<BlockSnapshotRestRep> implements TaskResources<BlockSnapshotRestRep> { public BlockSnapshots(ViPRCoreClient parent, RestClient client) { super(parent, client, BlockSnapshotRestRep.class, PathConstants.BLOCK_SNAPSHOT_URL); } @Override public BlockSnapshots withInactive(boolean inactive) { return (BlockSnapshots) super.withInactive(inactive); } @Override public BlockSnapshots withInternal(boolean internal) { return (BlockSnapshots) super.withInternal(internal); } @Override protected List<BlockSnapshotRestRep> getBulkResources(BulkIdParam input) { BlockSnapshotBulkRep response = client.post(BlockSnapshotBulkRep.class, input, getBulkUrl()); return defaultList(response.getBlockSnapshots()); } /** * Gets a list of block snapshot references from the given URL (path + args). * * @param path * the path to get. * @param args * the arguments for the path. * @return the list of snapshot references. */ protected List<NamedRelatedResourceRep> getList(String path, Object... args) { SnapshotList response = client.get(SnapshotList.class, path, args); return defaultList(response.getSnapList()); } @Override public Tasks<BlockSnapshotRestRep> getTasks(URI id) { return doGetTasks(id); } @Override public Task<BlockSnapshotRestRep> getTask(URI id, URI taskId) { return doGetTask(id, taskId); } /** * Begins activating a given block snapshot by ID. * <p> * API Call: <tt>POST /block/snapshots/{id}/activate</tt> * * @param id * the ID of the snapshot to activate. * @return a task for monitoring the progress of the operation. */ public Task<BlockSnapshotRestRep> activate(URI id) { return postTask(getIdUrl() + "/activate", id); } /** * Begins restoring a given block snapshot by ID. * <p> * API Call: <tt>POST /block/snapshots/{id}/restore</tt> * * @param id * the ID of the snapshot to restore. * @return a task for monitoring the progress of the operation. */ public Task<BlockSnapshotRestRep> restore(URI id) { return postTask(getIdUrl() + "/restore", id); } /** * Begins creating group synchronization between * volume group and snapshot group. * <p> * API Call: <tt>POST /block/snapshots/{id}/start</tt> * * @param id * the ID of the snapshot. * @return a task for monitoring the progress of the operation. */ public Task<BlockSnapshotRestRep> start(URI id) { return postTask(getIdUrl() + "/start", id); } /** * Begins deactivating a given block snapshot by ID. * <p> * API Call: <tt>POST /block/snapshots/{id}/deactivate</tt> * * @param id * the ID of the snapshot to deactivate. * * @return a task for monitoring the progress of the operation. */ public Tasks<BlockSnapshotRestRep> deactivate(URI id) { return deactivate(id, VolumeDeleteTypeEnum.FULL); } /** * Begins deactivating a given block snapshot by ID. * <p> * API Call: <tt>POST /block/snapshots/{id}/deactivate</tt> * * @param id * the ID of the snapshot to deactivate. * * @param type * {@code FULL} or {@code VIPR_ONLY} * * @return a task for monitoring the progress of the operation. */ public Tasks<BlockSnapshotRestRep> deactivate(URI id, VolumeDeleteTypeEnum type) { URI uri = client.uriBuilder(getDeactivateUrl()).queryParam("type", type).build(id); TaskList tasks = client.postURI(TaskList.class, uri); return new Tasks<>(client, tasks.getTaskList(), resourceClass); } /** * Gets the list of exports (initiator-target-lun) for a given block snapshot by ID. * <p> * API Call: <tt>GET /block/snapshots/{id}/exports</tt> * * @param id * the ID of the snapshot. * @return the list of exports for a snapshot. */ public List<ITLRestRep> listExports(URI id) { ITLRestRepList response = client.get(ITLRestRepList.class, getIdUrl() + "/exports", id); return defaultList(response.getExportList()); } /** * Gets the URL for looking up block snapshots by volume: <tt>/block/volumes/{volumeId}/protection/snapshots</tt> * * @return the URL for snapshots by volume. */ protected String getByVolumeUrl() { return PathConstants.BLOCK_VOLUMES_URL + "/{volumeId}/protection/snapshots"; } /** * Lists the block snapshots for a given block volume. * <p> * API Call: <tt>GET /block/volumes/{volumeId}/protection/snapshots</tt> * * @param volumeId * the ID of the block volume. * @return the list of snapshot references for the volume. */ public List<NamedRelatedResourceRep> listByVolume(URI volumeId) { return getList(getByVolumeUrl(), volumeId); } /** * Gets the block snapshots for a given block volume. * * @param volumeId * the ID of the block volume. * @return the list of block snapshots for the volume. * * @see #listByVolume(URI) * @see #getByRefs(java.util.Collection) */ public List<BlockSnapshotRestRep> getByVolume(URI volumeId) { return getByVolume(volumeId, null); } /** * Gets the block snapshots for a given block volume, optionally filtering the results. * * @param volumeId * the ID of the block volume. * @param filter * the filter to apply (may be null, for no filtering). * @return the list of block snapshots for the volume. * * @see #listByVolume(URI) * @see #getByRefs(java.util.Collection, ResourceFilter) */ public List<BlockSnapshotRestRep> getByVolume(URI volumeId, ResourceFilter<BlockSnapshotRestRep> filter) { List<NamedRelatedResourceRep> refs = listByVolume(volumeId); return getByRefs(refs, filter); } /** * Begins creating a snapshot (or snapshots) of a given block volume by ID. * <p> * API Call: <tt>POST /block/volumes/{volumeId}/protection/snapshots</tt> * * @param volumeId * the ID of the block volume to snapshot. * @param input * the snapshot configuration. * @return tasks for monitoring the progress each snapshot creation. */ public Tasks<BlockSnapshotRestRep> createForVolume(URI volumeId, VolumeSnapshotParam input) { return postTasks(input, getByVolumeUrl(), volumeId); } /** * Gets the URL for listing block snapshots by consistency group: * <tt>/block/consistency-groups/{consistencyGroupId}/protection/snapshots</tt> * * @return the URL for listing by consistency group. */ protected String getByConsistencyGroupUrl() { return PathConstants.BLOCK_CONSISTENCY_GROUP_URL + "/{consistencyGroupId}/protection/snapshots"; } /** * Lists the block snapshots for a consistency group by ID. * <p> * API Call: <tt>GET /block/consistency-groups/{consistencyGroupId}/protection/snapshots</tt> * * @param consistencyGroupId * the ID of the consistency group. * @return the list of block snapshot references. */ public List<NamedRelatedResourceRep> listByConsistencyGroup(URI consistencyGroupId) { return getList(getByConsistencyGroupUrl(), consistencyGroupId); } /** * Gets the block snapshots for a consistency group by ID. * * @param consistencyGroupId * the ID of the consistency group. * @return the list of consistency groups. * * @see #listByConsistencyGroup(URI) * @see #getByRefs(java.util.Collection) */ public List<BlockSnapshotRestRep> getByConsistencyGroup(URI consistencyGroupId) { return getByConsistencyGroup(consistencyGroupId, null); } /** * Gets the block snapshots for a consistency group by ID, optionally filtering the results. * * @param consistencyGroupId * the ID of the consistency group. * @param filter * the filter to apply (may be null, for no filtering). * @return the list of consistency groups. * * @see #listByConsistencyGroup(URI) * @see #getByRefs(java.util.Collection, ResourceFilter) */ public List<BlockSnapshotRestRep> getByConsistencyGroup(URI consistencyGroupId, ResourceFilter<BlockSnapshotRestRep> filter) { List<NamedRelatedResourceRep> refs = listByConsistencyGroup(consistencyGroupId); return getByRefs(refs, filter); } /** * Begins creating a block snapshot for the given consistency group by ID. * <p> * API Call: <tt>POST /block/consistency-groups/{consistencyGroupId}/protection/snapshots</tt> * * @param consistencyGroupId * the ID of the consistency group. * @param input * the create configuration. * @return tasks for monitoring the progress of the block snapshot creation. */ public Tasks<BlockSnapshotRestRep> createForConsistencyGroup(URI consistencyGroupId, BlockConsistencyGroupSnapshotCreate input) { return postTasks(input, getByConsistencyGroupUrl(), consistencyGroupId); } /** * Gets a particular block snapshot for a given consistency group by ID. * <p> * API Call: <tt>GET /block/consistency-groups/{consistencyGroupId}/protection/snapshots/{id}</tt> * * @param consistencyGroupId * the ID of the consistency group. * @param id * the ID of the block snapshot. * @return the block snapshot. */ public BlockSnapshotRestRep getForConsistencyGroup(URI consistencyGroupId, URI id) { return client.get(BlockSnapshotRestRep.class, getByConsistencyGroupUrl() + "/{id}", consistencyGroupId, id); } /** * Begins activating a block snapshot for a given consistency group by ID. * <p> * API Call: <tt>POST /block/consistency-groups/{consistencyGroupId}/protection/snapshots/{id}/activate</tt> * * @param consistencyGroupId * the ID of the consistency group. * @param id * the ID of the block snapshot to activate. * @return a task for monitoring the progress of the operation. */ public Task<BlockSnapshotRestRep> activateForConsistencyGroup(URI consistencyGroupId, URI id) { return postTask(getByConsistencyGroupUrl() + "/{id}/activate", consistencyGroupId, id); } /** * Begins deactivating a block snapshot for a consistency group by ID. * <p> * API Call: <tt>POST /block/consistency-groups/{consistencyGroupId}/protection/snapshots/{id}/deactivate</tt> * * @param consistencyGroupId * the ID of the consistency group. * @param id * the ID of the block snapshot to deactivate. * @return tasks for monitoring the progress of the operation. */ public Tasks<BlockSnapshotRestRep> deactivateForConsistencyGroup(URI consistencyGroupId, URI id) { return postTasks(getByConsistencyGroupUrl() + "/{id}/deactivate", consistencyGroupId, id); } /** * Begins restoring a block snapshot for a consistency group by ID * <p> * API Call: <tt>POST /block/consistency-groups/{consistencyGroupId}/protection/snapshots/{id}/restore</tt> * * @param consistencyGroupId * the ID of the consistency group. * @param id * the ID of the block snapshot to restore. * @return a task for monitoring the progress of the operation. */ public Task<BlockSnapshotRestRep> restoreForConsistencyGroup(URI consistencyGroupId, URI id) { return postTask(getByConsistencyGroupUrl() + "/{id}/restore", consistencyGroupId, id); } /** * Creates a new VPLEX volume using the target volume associated with * BlockSnapshot instance with the passed id as the source side * backend volume for the VPLEX volume. * <p> * API Call: <tt>POST /block/snapshots/{id}/expose</tt> * * @param id the URI of the block snapshot * @return a task for monitoring the progress of the operation. */ public Task<BlockSnapshotRestRep> expose(URI id) { return postTask(getIdUrl() + "/expose", id); } public Tasks<BlockSnapshotRestRep> createFullCopy(URI id, VolumeFullCopyCreateParam input) { return postTasks(input, getFullCopyUrl(), id); } /** * Gets the base URL for full copies for a single block snapshot: <tt>/block/snapshots/{id}/protection/full-copies</tt> * * @return the base full copy URL. */ protected String getFullCopyUrl() { return getIdUrl() + "/protection/full-copies"; } /** * Begins resynchronizing a blocksnapshot of the given block volume. * <p> * API Call: <tt>POST /block/snapshots/{id}/resynchronize</tt> * * @param snapshotId * the ID of the block snapshot to resynchronize. * @return a task for monitoring the progress of the operation. */ public Task<BlockSnapshotRestRep> resynchronizeBlockSnapshot(URI snapshotId) { return postTask(getIdUrl() + "/resynchronize", snapshotId); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.activemq.artemis.utils; import java.nio.ByteBuffer; import java.nio.ByteOrder; import java.nio.ReadOnlyBufferException; import java.util.ArrayList; import java.util.Arrays; import java.util.LinkedHashMap; import java.util.Map; import io.netty.util.internal.PlatformDependent; import org.jboss.logging.Logger; import org.junit.Assert; import org.junit.Assume; import org.junit.Test; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; public class ByteUtilTest { private static Logger log = Logger.getLogger(ByteUtilTest.class); @Test public void testBytesToString() { byte[] byteArray = new byte[]{0, 1, 2, 3}; testEquals("0001 0203", ByteUtil.bytesToHex(byteArray, 2)); testEquals("00 01 02 03", ByteUtil.bytesToHex(byteArray, 1)); testEquals("000102 03", ByteUtil.bytesToHex(byteArray, 3)); } @Test public void testNonASCII() { assertEquals("aA", ByteUtil.toSimpleString(new byte[]{97, 0, 65, 0})); assertEquals(ByteUtil.NON_ASCII_STRING, ByteUtil.toSimpleString(new byte[]{0, 97, 0, 65})); log.debug(ByteUtil.toSimpleString(new byte[]{0, 97, 0, 65})); } @Test public void testMaxString() { byte[] byteArray = new byte[20 * 1024]; log.debug(ByteUtil.maxString(ByteUtil.bytesToHex(byteArray, 2), 150)); } void testEquals(String string1, String string2) { if (!string1.equals(string2)) { Assert.fail("String are not the same:=" + string1 + "!=" + string2); } } @Test public void testTextBytesToLongBytes() { long[] factor = new long[] {1, 5, 10}; String[] type = new String[]{"", "b", "k", "m", "g"}; long[] size = new long[]{1, 1, 1024, 1024 * 1024, 1024 * 1024 * 1024}; for (int i = 0; i < 3; i++) { for (int j = 0; j < type.length; j++) { assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + type[j])); assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + " " + type[j])); assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + type[j].toUpperCase())); assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + " " + type[j].toUpperCase())); if (j >= 2) { assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + type[j] + "b")); assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + " " + type[j] + "b")); assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + type[j].toUpperCase() + "B")); assertEquals(factor[i] * size[j], ByteUtil.convertTextBytes(factor[i] + " " + type[j].toUpperCase() + "B")); } } } } @Test public void testUnsafeUnalignedByteArrayHashCode() { Assume.assumeTrue(PlatformDependent.hasUnsafe()); Assume.assumeTrue(PlatformDependent.isUnaligned()); Map<byte[], Integer> map = new LinkedHashMap<>(); map.put(new byte[0], 1); map.put(new byte[]{1}, 32); map.put(new byte[]{2}, 33); map.put(new byte[]{0, 1}, 962); map.put(new byte[]{1, 2}, 994); if (ByteOrder.nativeOrder() == ByteOrder.BIG_ENDIAN) { map.put(new byte[]{0, 1, 2, 3, 4, 5}, 63504931); map.put(new byte[]{6, 7, 8, 9, 0, 1}, -1603953111); map.put(new byte[]{-1, -1, -1, (byte) 0xE1}, 1); } else { map.put(new byte[]{0, 1, 2, 3, 4, 5}, 1250309600); map.put(new byte[]{6, 7, 8, 9, 0, 1}, -417148442); map.put(new byte[]{-1, -1, -1, (byte) 0xE1}, -503316450); } for (Map.Entry<byte[], Integer> e : map.entrySet()) { assertEquals("input = " + Arrays.toString(e.getKey()), e.getValue().intValue(), ByteUtil.hashCode(e.getKey())); } } @Test public void testNoUnsafeAlignedByteArrayHashCode() { Assume.assumeFalse(PlatformDependent.hasUnsafe()); Assume.assumeFalse(PlatformDependent.isUnaligned()); ArrayList<byte[]> inputs = new ArrayList<>(); inputs.add(new byte[0]); inputs.add(new byte[]{1}); inputs.add(new byte[]{2}); inputs.add(new byte[]{0, 1}); inputs.add(new byte[]{1, 2}); inputs.add(new byte[]{0, 1, 2, 3, 4, 5}); inputs.add(new byte[]{6, 7, 8, 9, 0, 1}); inputs.add(new byte[]{-1, -1, -1, (byte) 0xE1}); inputs.forEach(input -> assertEquals("input = " + Arrays.toString(input), Arrays.hashCode(input), ByteUtil.hashCode(input))); } @Test public void testTextBytesToLongBytesNegative() { try { ByteUtil.convertTextBytes("x"); fail(); } catch (Exception e) { assertTrue(e instanceof IllegalArgumentException); } } private static byte[] duplicateRemaining(ByteBuffer buffer, int offset, int bytes) { final int end = offset + bytes; final int expectedRemaining = buffer.capacity() - end; //it is handling the case of <0 just to allow from to > capacity if (expectedRemaining <= 0) { return null; } final byte[] remaining = new byte[expectedRemaining]; final ByteBuffer duplicate = buffer.duplicate(); duplicate.clear().position(end); duplicate.get(remaining, 0, expectedRemaining); return remaining; } private static byte[] duplicateBefore(ByteBuffer buffer, int offset) { if (offset <= 0) { return null; } final int size = Math.min(buffer.capacity(), offset); final byte[] remaining = new byte[size]; final ByteBuffer duplicate = buffer.duplicate(); duplicate.clear(); duplicate.get(remaining, 0, size); return remaining; } private static void shouldZeroesByteBuffer(ByteBuffer buffer, int offset, int bytes) { final byte[] originalBefore = duplicateBefore(buffer, offset); final byte[] originalRemaining = duplicateRemaining(buffer, offset, bytes); final int position = buffer.position(); final int limit = buffer.limit(); ByteUtil.zeros(buffer, offset, bytes); Assert.assertEquals(position, buffer.position()); Assert.assertEquals(limit, buffer.limit()); final byte[] zeros = new byte[bytes]; final byte[] content = new byte[bytes]; final ByteBuffer duplicate = buffer.duplicate(); duplicate.clear().position(offset); duplicate.get(content, 0, bytes); Assert.assertArrayEquals(zeros, content); if (originalRemaining != null) { final byte[] remaining = new byte[duplicate.remaining()]; //duplicate position has been moved of bytes duplicate.get(remaining); Assert.assertArrayEquals(originalRemaining, remaining); } if (originalBefore != null) { final byte[] before = new byte[offset]; //duplicate position has been moved of bytes: need to reset it duplicate.position(0); duplicate.get(before); Assert.assertArrayEquals(originalBefore, before); } } private ByteBuffer fill(ByteBuffer buffer, int offset, int length, byte value) { for (int i = 0; i < length; i++) { buffer.put(offset + i, value); } return buffer; } @Test public void shouldZeroesDirectByteBuffer() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 32; final int offset = 1; final ByteBuffer buffer = ByteBuffer.allocateDirect(capacity); try { fill(buffer, 0, capacity, one); shouldZeroesByteBuffer(buffer, offset, bytes); } finally { if (PlatformDependent.hasUnsafe()) { PlatformDependent.freeDirectBuffer(buffer); } } } @Test public void shouldZeroesLimitedDirectByteBuffer() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 32; final int offset = 1; final ByteBuffer buffer = ByteBuffer.allocateDirect(capacity); try { fill(buffer, 0, capacity, one); buffer.limit(0); shouldZeroesByteBuffer(buffer, offset, bytes); } finally { if (PlatformDependent.hasUnsafe()) { PlatformDependent.freeDirectBuffer(buffer); } } } @Test public void shouldZeroesHeapByteBuffer() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 32; final int offset = 1; final ByteBuffer buffer = ByteBuffer.allocate(capacity); fill(buffer, 0, capacity, one); shouldZeroesByteBuffer(buffer, offset, bytes); } @Test public void shouldZeroesLimitedHeapByteBuffer() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 32; final int offset = 1; final ByteBuffer buffer = ByteBuffer.allocate(capacity); fill(buffer, 0, capacity, one); buffer.limit(0); shouldZeroesByteBuffer(buffer, offset, bytes); } @Test(expected = ReadOnlyBufferException.class) public void shouldFailWithReadOnlyHeapByteBuffer() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 32; final int offset = 1; ByteBuffer buffer = ByteBuffer.allocate(capacity); fill(buffer, 0, capacity, one); buffer = buffer.asReadOnlyBuffer(); shouldZeroesByteBuffer(buffer, offset, bytes); } @Test(expected = IndexOutOfBoundsException.class) public void shouldFailIfOffsetIsGreaterOrEqualHeapByteBufferCapacity() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 0; final int offset = 64; ByteBuffer buffer = ByteBuffer.allocate(capacity); fill(buffer, 0, capacity, one); try { shouldZeroesByteBuffer(buffer, offset, bytes); } catch (IndexOutOfBoundsException expectedEx) { //verify that the buffer hasn't changed final byte[] originalContent = duplicateRemaining(buffer, 0, 0); final byte[] expectedContent = new byte[capacity]; Arrays.fill(expectedContent, one); Assert.assertArrayEquals(expectedContent, originalContent); throw expectedEx; } } @Test(expected = IllegalArgumentException.class) public void shouldFailIfOffsetIsNegative() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 1; final int offset = -1; ByteBuffer buffer = ByteBuffer.allocate(capacity); fill(buffer, 0, capacity, one); try { shouldZeroesByteBuffer(buffer, offset, bytes); } catch (IndexOutOfBoundsException expectedEx) { //verify that the buffer hasn't changed final byte[] originalContent = duplicateRemaining(buffer, 0, 0); final byte[] expectedContent = new byte[capacity]; Arrays.fill(expectedContent, one); Assert.assertArrayEquals(expectedContent, originalContent); throw expectedEx; } } @Test(expected = IllegalArgumentException.class) public void shouldFailIfBytesIsNegative() { final byte one = (byte) 1; final int capacity = 64; final int bytes = -1; final int offset = 0; ByteBuffer buffer = ByteBuffer.allocate(capacity); fill(buffer, 0, capacity, one); try { shouldZeroesByteBuffer(buffer, offset, bytes); } catch (IndexOutOfBoundsException expectedEx) { //verify that the buffer hasn't changed final byte[] originalContent = duplicateRemaining(buffer, 0, 0); final byte[] expectedContent = new byte[capacity]; Arrays.fill(expectedContent, one); Assert.assertArrayEquals(expectedContent, originalContent); throw expectedEx; } } @Test(expected = IndexOutOfBoundsException.class) public void shouldFailIfExceedingHeapByteBufferCapacity() { final byte one = (byte) 1; final int capacity = 64; final int bytes = 65; final int offset = 1; ByteBuffer buffer = ByteBuffer.allocate(capacity); fill(buffer, 0, capacity, one); try { shouldZeroesByteBuffer(buffer, offset, bytes); } catch (IndexOutOfBoundsException expectedEx) { //verify that the buffer hasn't changed final byte[] originalContent = duplicateRemaining(buffer, 0, 0); final byte[] expectedContent = new byte[capacity]; Arrays.fill(expectedContent, one); Assert.assertArrayEquals(expectedContent, originalContent); throw expectedEx; } } @Test public void testIntToBytes() { internalIntToBytesTest(RandomUtil.randomInt(), null); internalIntToBytesTest(0, new byte[]{0, 0, 0, 0}); internalIntToBytesTest(-1, new byte[] {(byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF}); internalIntToBytesTest(Integer.MIN_VALUE, new byte[] {(byte)0x80, (byte)0x00, (byte)0x00, (byte)0x00}); internalIntToBytesTest(Integer.MAX_VALUE, new byte[] {(byte)0x7F, (byte)0xFF, (byte)0xFF, (byte)0xFF}); } private void internalIntToBytesTest(int intValue, byte[] manualExpect) { byte[] expected = ByteBuffer.allocate(4).putInt(intValue).array(); byte[] actual = ByteUtil.intToBytes(intValue); if (manualExpect != null) { Assert.assertEquals(4, manualExpect.length); assertArrayEquals(manualExpect, actual); } assertArrayEquals(expected, actual); assertEquals(intValue, ByteUtil.bytesToInt(expected)); assertEquals(intValue, ByteUtil.bytesToInt(actual)); } @Test public void testLongToBytes() { internalLongToBytesTest(RandomUtil.randomLong(), null); internalLongToBytesTest(0, new byte[] {0, 0, 0, 0, 0, 0, 0, 0}); internalLongToBytesTest(-1, new byte[] {(byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF}); internalLongToBytesTest(Long.MIN_VALUE, new byte[] {(byte)0x80, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00, (byte)0x00}); internalLongToBytesTest(Long.MAX_VALUE, new byte[] {(byte)0x7F, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF, (byte)0xFF}); } private void internalLongToBytesTest(long longValue, byte[] manualExpected) { byte[] expected = ByteBuffer.allocate(8).putLong(longValue).array(); byte[] actual = ByteUtil.longToBytes(longValue); if (manualExpected != null) { Assert.assertEquals(8, manualExpected.length); assertArrayEquals(manualExpected, actual); } assertArrayEquals(expected, actual); assertEquals(longValue, ByteUtil.bytesToLong(expected)); assertEquals(longValue, ByteUtil.bytesToLong(actual)); } @Test public void testDoubleLongToBytes() { long randomLong1 = RandomUtil.randomLong(); long randomLong2 = RandomUtil.randomLong(); ByteBuffer buffer = ByteBuffer.allocate(16); buffer.putLong(randomLong1); buffer.putLong(randomLong2); byte[] assertContent = buffer.array(); byte[] convertedContent = ByteUtil.doubleLongToBytes(randomLong1, randomLong2); assertArrayEquals(assertContent, convertedContent); } }
// Copyright 2014 The Bazel Authors. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.cpp; import static com.google.devtools.build.lib.rules.cpp.CcLibraryHelper.SourceCategory; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import com.google.common.collect.Maps; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.analysis.AnalysisEnvironment; import com.google.devtools.build.lib.analysis.FileProvider; import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.TransitiveInfoCollection; import com.google.devtools.build.lib.cmdline.Label; import com.google.devtools.build.lib.cmdline.PackageIdentifier; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.packages.BuildType; import com.google.devtools.build.lib.packages.RuleClass; import com.google.devtools.build.lib.rules.apple.Platform; import com.google.devtools.build.lib.rules.cpp.CcToolchainFeatures.FeatureConfiguration; import com.google.devtools.build.lib.rules.cpp.CppConfiguration.DynamicMode; import com.google.devtools.build.lib.rules.cpp.CppConfiguration.HeadersCheckingMode; import com.google.devtools.build.lib.rules.cpp.LinkerInputs.LibraryToLink; import com.google.devtools.build.lib.rules.test.InstrumentedFilesCollector; import com.google.devtools.build.lib.rules.test.InstrumentedFilesCollector.LocalMetadataCollector; import com.google.devtools.build.lib.rules.test.InstrumentedFilesProvider; import com.google.devtools.build.lib.rules.test.InstrumentedFilesProviderImpl; import com.google.devtools.build.lib.shell.ShellUtils; import com.google.devtools.build.lib.syntax.Type; import com.google.devtools.build.lib.util.FileType; import com.google.devtools.build.lib.util.Pair; import com.google.devtools.build.lib.util.Preconditions; import com.google.devtools.build.lib.vfs.PathFragment; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.regex.Pattern; import java.util.regex.PatternSyntaxException; /** * Common parts of the implementation of cc rules. */ public final class CcCommon { private static final String NO_COPTS_ATTRIBUTE = "nocopts"; /** * Collects all metadata files generated by C++ compilation actions that output the .o files * on the input. */ private static final LocalMetadataCollector CC_METADATA_COLLECTOR = new LocalMetadataCollector() { @Override public void collectMetadataArtifacts(Iterable<Artifact> objectFiles, AnalysisEnvironment analysisEnvironment, NestedSetBuilder<Artifact> metadataFilesBuilder) { for (Artifact artifact : objectFiles) { Action action = analysisEnvironment.getLocalGeneratingAction(artifact); if (action instanceof CppCompileAction) { addOutputs(metadataFilesBuilder, action, CppFileTypes.COVERAGE_NOTES); } } } }; /** * Features we request to enable unless a rule explicitly doesn't support them. */ private static final ImmutableSet<String> DEFAULT_FEATURES = ImmutableSet.of( CppRuleClasses.MODULE_MAPS, CppRuleClasses.MODULE_MAP_HOME_CWD, CppRuleClasses.HEADER_MODULE_INCLUDES_DEPENDENCIES, CppRuleClasses.INCLUDE_PATHS); /** C++ configuration */ private final CppConfiguration cppConfiguration; private final RuleContext ruleContext; public CcCommon(RuleContext ruleContext) { this.ruleContext = ruleContext; this.cppConfiguration = ruleContext.getFragment(CppConfiguration.class); } /** * Returns our own linkopts from the rule attribute. This determines linker * options to use when building this target and anything that depends on it. */ public ImmutableList<String> getLinkopts() { Preconditions.checkState(hasAttribute("linkopts", Type.STRING_LIST)); List<String> ourLinkopts = ruleContext.attributes().get("linkopts", Type.STRING_LIST); List<String> result = new ArrayList<>(); if (ourLinkopts != null) { boolean allowDashStatic = !cppConfiguration.forceIgnoreDashStatic() && (cppConfiguration.getDynamicMode() != DynamicMode.FULLY); for (String linkopt : ourLinkopts) { if (linkopt.equals("-static") && !allowDashStatic) { continue; } CppHelper.expandAttribute(ruleContext, result, "linkopts", linkopt, true); } } if (Platform.isApplePlatform(cppConfiguration.getTargetCpu()) && result.contains("-static")) { ruleContext.attributeError( "linkopts", "Apple builds do not support statically linked binaries"); } return ImmutableList.copyOf(result); } public ImmutableList<String> getCopts() { Preconditions.checkState(hasAttribute("copts", Type.STRING_LIST)); // TODO(bazel-team): getAttributeCopts should not tokenize the strings. Make a warning for now. List<String> tokens = new ArrayList<>(); for (String str : ruleContext.attributes().get("copts", Type.STRING_LIST)) { tokens.clear(); try { ShellUtils.tokenize(tokens, str); if (tokens.size() > 1) { ruleContext.attributeWarning("copts", "each item in the list should contain only one option"); } } catch (ShellUtils.TokenizationException e) { // ignore, the error is reported in the getAttributeCopts call } } Pattern nocopts = getNoCopts(ruleContext); if (nocopts != null && nocopts.matcher("-Wno-future-warnings").matches()) { ruleContext.attributeWarning("nocopts", "Regular expression '" + nocopts.pattern() + "' is too general; for example, it matches " + "'-Wno-future-warnings'. Thus it might *re-enable* compiler warnings we wish to " + "disable globally. To disable all compiler warnings, add '-w' to copts instead"); } return ImmutableList.<String>builder() .addAll(getPackageCopts(ruleContext)) .addAll(CppHelper.getAttributeCopts(ruleContext, "copts")) .build(); } private boolean hasAttribute(String name, Type<?> type) { return ruleContext.attributes().has(name, type); } /** * Collects all .dwo artifacts in this target's transitive closure. */ public static DwoArtifactsCollector collectTransitiveDwoArtifacts( RuleContext ruleContext, CcCompilationOutputs compilationOutputs) { ImmutableList.Builder<TransitiveInfoCollection> deps = ImmutableList.<TransitiveInfoCollection>builder(); deps.addAll(ruleContext.getPrerequisites("deps", Mode.TARGET)); if (ruleContext.attributes().has("malloc", BuildType.LABEL)) { deps.add(CppHelper.mallocForTarget(ruleContext)); } return compilationOutputs == null // Possible in LIPO collection mode (see initializationHook). ? DwoArtifactsCollector.emptyCollector() : DwoArtifactsCollector.transitiveCollector(compilationOutputs, deps.build()); } public TransitiveLipoInfoProvider collectTransitiveLipoLabels(CcCompilationOutputs outputs) { if (CppHelper.getFdoSupport(ruleContext).getFdoRoot() == null || !cppConfiguration.isLipoContextCollector()) { return TransitiveLipoInfoProvider.EMPTY; } NestedSetBuilder<IncludeScannable> scannableBuilder = NestedSetBuilder.stableOrder(); CppHelper.addTransitiveLipoInfoForCommonAttributes(ruleContext, outputs, scannableBuilder); return new TransitiveLipoInfoProvider(scannableBuilder.build()); } /** * Returns a list of ({@link Artifact}, {@link Label}) pairs. Each pair represents an input * source file and the label of the rule that generates it (or the label of the source file * itself if it is an input file). */ List<Pair<Artifact, Label>> getSources() { Map<Artifact, Label> map = Maps.newLinkedHashMap(); Iterable<FileProvider> providers = ruleContext.getPrerequisites("srcs", Mode.TARGET, FileProvider.class); for (FileProvider provider : providers) { for (Artifact artifact : provider.getFilesToBuild()) { // TODO(bazel-team): We currently do not produce an error for duplicate headers and other // non-source artifacts with different labels, as that would require cleaning up the code // base without significant benefit; we should eventually make this consistent one way or // the other. Label oldLabel = map.put(artifact, provider.getLabel()); boolean isHeader = CppFileTypes.CPP_HEADER.matches(artifact.getExecPath()); if (!isHeader && SourceCategory.CC_AND_OBJC.getSourceTypes().matches(artifact.getExecPathString()) && oldLabel != null && !oldLabel.equals(provider.getLabel())) { ruleContext.attributeError("srcs", String.format( "Artifact '%s' is duplicated (through '%s' and '%s')", artifact.getExecPathString(), oldLabel, provider.getLabel())); } } } ImmutableList.Builder<Pair<Artifact, Label>> result = ImmutableList.builder(); for (Map.Entry<Artifact, Label> entry : map.entrySet()) { result.add(Pair.of(entry.getKey(), entry.getValue())); } return result.build(); } /** * Returns the files from headers and does some sanity checks. Note that this method reports * warnings to the {@link RuleContext} as a side effect, and so should only be called once for any * given rule. */ public static List<Pair<Artifact, Label>> getHeaders(RuleContext ruleContext) { Map<Artifact, Label> map = Maps.newLinkedHashMap(); for (TransitiveInfoCollection target : ruleContext.getPrerequisitesIf("hdrs", Mode.TARGET, FileProvider.class)) { FileProvider provider = target.getProvider(FileProvider.class); for (Artifact artifact : provider.getFilesToBuild()) { if (CppRuleClasses.DISALLOWED_HDRS_FILES.matches(artifact.getFilename())) { ruleContext.attributeWarning("hdrs", "file '" + artifact.getFilename() + "' from target '" + target.getLabel() + "' is not allowed in hdrs"); continue; } Label oldLabel = map.put(artifact, provider.getLabel()); if (oldLabel != null && !oldLabel.equals(provider.getLabel())) { ruleContext.attributeWarning( "hdrs", String.format( "Artifact '%s' is duplicated (through '%s' and '%s')", artifact.getExecPathString(), oldLabel, provider.getLabel())); } } } ImmutableList.Builder<Pair<Artifact, Label>> result = ImmutableList.builder(); for (Map.Entry<Artifact, Label> entry : map.entrySet()) { result.add(Pair.of(entry.getKey(), entry.getValue())); } return result.build(); } /** * Returns the files from headers and does some sanity checks. Note that this method reports * warnings to the {@link RuleContext} as a side effect, and so should only be called once for any * given rule. */ public List<Pair<Artifact, Label>> getHeaders() { return getHeaders(ruleContext); } private static ImmutableList<String> getPackageCopts(RuleContext ruleContext) { List<String> unexpanded = ruleContext.getRule().getPackage().getDefaultCopts(); return ImmutableList.copyOf(CppHelper.expandMakeVariables(ruleContext, "copts", unexpanded)); } Pattern getNoCopts() { return getNoCopts(ruleContext); } /** * Returns nocopts pattern built from the make variable expanded nocopts * attribute. */ private static Pattern getNoCopts(RuleContext ruleContext) { Pattern nocopts = null; if (ruleContext.getRule().isAttrDefined(NO_COPTS_ATTRIBUTE, Type.STRING)) { String nocoptsAttr = ruleContext.expandMakeVariables(NO_COPTS_ATTRIBUTE, ruleContext.attributes().get(NO_COPTS_ATTRIBUTE, Type.STRING)); try { nocopts = Pattern.compile(nocoptsAttr); } catch (PatternSyntaxException e) { ruleContext.attributeError(NO_COPTS_ATTRIBUTE, "invalid regular expression '" + nocoptsAttr + "': " + e.getMessage()); } } return nocopts; } // TODO(bazel-team): calculating nocopts every time is not very efficient, // fix this after the rule migration. The problem is that in some cases we call this after // the RCT is created (so RuleContext is not accessible), in some cases during the creation. // It would probably make more sense to use TransitiveInfoProviders. /** * Returns true if the rule context has a nocopts regex that matches the given value, false * otherwise. */ static boolean noCoptsMatches(String option, RuleContext ruleContext) { Pattern nocopts = getNoCopts(ruleContext); return nocopts == null ? false : nocopts.matcher(option).matches(); } private static final String DEFINES_ATTRIBUTE = "defines"; /** * Returns a list of define tokens from "defines" attribute. * * <p>We tokenize the "defines" attribute, to ensure that the handling of * quotes and backslash escapes is consistent Bazel's treatment of the "copts" attribute. * * <p>But we require that the "defines" attribute consists of a single token. */ public List<String> getDefines() { List<String> defines = new ArrayList<>(); for (String define : ruleContext.attributes().get(DEFINES_ATTRIBUTE, Type.STRING_LIST)) { List<String> tokens = new ArrayList<>(); try { ShellUtils.tokenize(tokens, ruleContext.expandMakeVariables(DEFINES_ATTRIBUTE, define)); if (tokens.size() == 1) { defines.add(tokens.get(0)); } else if (tokens.isEmpty()) { ruleContext.attributeError(DEFINES_ATTRIBUTE, "empty definition not allowed"); } else { ruleContext.attributeError(DEFINES_ATTRIBUTE, "definition contains too many tokens (found " + tokens.size() + ", expecting exactly one)"); } } catch (ShellUtils.TokenizationException e) { ruleContext.attributeError(DEFINES_ATTRIBUTE, e.getMessage()); } } return defines; } /** * Determines a list of loose include directories that are only allowed to be referenced when * headers checking is {@link HeadersCheckingMode#LOOSE} or {@link HeadersCheckingMode#WARN}. */ List<PathFragment> getLooseIncludeDirs() { List<PathFragment> result = new ArrayList<>(); // The package directory of the rule contributes includes. Note that this also covers all // non-subpackage sub-directories. PathFragment rulePackage = ruleContext.getLabel().getPackageIdentifier().getPathFragment(); result.add(rulePackage); // Gather up all the dirs from the rule's srcs as well as any of the srcs outputs. if (hasAttribute("srcs", BuildType.LABEL_LIST)) { for (FileProvider src : ruleContext.getPrerequisites("srcs", Mode.TARGET, FileProvider.class)) { PathFragment packageDir = src.getLabel().getPackageIdentifier().getPathFragment(); for (Artifact a : src.getFilesToBuild()) { result.add(packageDir); // Attempt to gather subdirectories that might contain include files. result.add(a.getRootRelativePath().getParentDirectory()); } } } // Add in any 'includes' attribute values as relative path fragments if (ruleContext.getRule().isAttributeValueExplicitlySpecified("includes")) { PathFragment packageFragment = ruleContext.getLabel().getPackageIdentifier() .getPathFragment(); // For now, anything with an 'includes' needs a blanket declaration result.add(packageFragment.getRelative("**")); } return result; } List<PathFragment> getSystemIncludeDirs() { // Add in any 'includes' attribute values as relative path fragments if (!ruleContext.getRule().isAttributeValueExplicitlySpecified("includes") || !cppConfiguration.useIsystemForIncludes()) { return ImmutableList.of(); } return getIncludeDirsFromIncludesAttribute(); } List<PathFragment> getIncludeDirs() { if (!ruleContext.getRule().isAttributeValueExplicitlySpecified("includes") || cppConfiguration.useIsystemForIncludes()) { return ImmutableList.of(); } return getIncludeDirsFromIncludesAttribute(); } private List<PathFragment> getIncludeDirsFromIncludesAttribute() { List<PathFragment> result = new ArrayList<>(); PackageIdentifier packageIdentifier = ruleContext.getLabel().getPackageIdentifier(); PathFragment packageFragment = packageIdentifier.getPathFragment(); for (String includesAttr : ruleContext.attributes().get("includes", Type.STRING_LIST)) { includesAttr = ruleContext.expandMakeVariables("includes", includesAttr); if (includesAttr.startsWith("/")) { ruleContext.attributeWarning("includes", "ignoring invalid absolute path '" + includesAttr + "'"); continue; } PathFragment includesPath = packageFragment.getRelative(includesAttr).normalize(); if (!includesPath.isNormalized()) { ruleContext.attributeError("includes", "Path references a path above the execution root."); } if (includesPath.segmentCount() == 0) { ruleContext.attributeError( "includes", "'" + includesAttr + "' resolves to the workspace root, which would allow this rule and all of its " + "transitive dependents to include any file in your workspace. Please include only" + " what you need"); } else if (!includesPath.startsWith(packageFragment)) { ruleContext.attributeWarning( "includes", "'" + includesAttr + "' resolves to '" + includesPath + "' not below the relative path of its package '" + packageFragment + "'. This will be an error in the future"); // TODO(janakr): Add a link to a page explaining the problem and fixes? } else if (packageIdentifier.getRepository().isMain() && !includesPath.startsWith(RuleClass.THIRD_PARTY_PREFIX)) { ruleContext.attributeWarning( "includes", "'" + includesAttr + "' resolves to '" + includesPath + "' not in '" + RuleClass.THIRD_PARTY_PREFIX + "'. This will be an error in the future"); } result.add(includesPath); result.add(ruleContext.getConfiguration().getGenfilesFragment().getRelative(includesPath)); } return result; } /** * Collects compilation prerequisite artifacts. */ static NestedSet<Artifact> collectCompilationPrerequisites( RuleContext ruleContext, CppCompilationContext context) { // TODO(bazel-team): Use context.getCompilationPrerequisites() instead; note that this will // need cleaning up the prerequisites, as the compilation context currently collects them // transitively (to get transitive headers), but source files are not transitive compilation // prerequisites. NestedSetBuilder<Artifact> prerequisites = NestedSetBuilder.stableOrder(); if (ruleContext.attributes().has("srcs", BuildType.LABEL_LIST)) { for (FileProvider provider : ruleContext.getPrerequisites("srcs", Mode.TARGET, FileProvider.class)) { prerequisites.addAll( FileType.filter( provider.getFilesToBuild(), SourceCategory.CC_AND_OBJC.getSourceTypes())); } } prerequisites.addTransitive(context.getDeclaredIncludeSrcs()); prerequisites.addTransitive(context.getAdditionalInputs(CppHelper.usePic(ruleContext, false))); return prerequisites.build(); } /** * Replaces shared library artifact with mangled symlink and creates related * symlink action. For artifacts that should retain filename (e.g. libraries * with SONAME tag), link is created to the parent directory instead. * * This action is performed to minimize number of -rpath entries used during * linking process (by essentially "collecting" as many shared libraries as * possible in the single directory), since we will be paying quadratic price * for each additional entry on the -rpath. * * @param library Shared library artifact that needs to be mangled * @param preserveName true if filename should be preserved, false - mangled. * @return mangled symlink artifact. */ public LibraryToLink getDynamicLibrarySymlink(Artifact library, boolean preserveName) { return SolibSymlinkAction.getDynamicLibrarySymlink( ruleContext, library, preserveName, true, ruleContext.getConfiguration()); } /** * Returns any linker scripts found in the dependencies of the rule. */ Iterable<Artifact> getLinkerScripts() { return FileType.filter(ruleContext.getPrerequisiteArtifacts("deps", Mode.TARGET).list(), CppFileTypes.LINKER_SCRIPT); } /** * Provides support for instrumentation. */ public InstrumentedFilesProvider getInstrumentedFilesProvider(Iterable<Artifact> files, boolean withBaselineCoverage) { return cppConfiguration.isLipoContextCollector() ? InstrumentedFilesProviderImpl.EMPTY : InstrumentedFilesCollector.collect( ruleContext, CppRuleClasses.INSTRUMENTATION_SPEC, CC_METADATA_COLLECTOR, files, withBaselineCoverage); } /** * Creates the feature configuration for a given rule. * * @param ruleContext the context of the rule we want the feature configuration for. * @param ruleSpecificRequestedFeatures features that will be requested, and thus be always * enabled if the toolchain supports them. * @param ruleSpecificUnsupportedFeatures features that are not supported in the current context. * @return the feature configuration for the given {@code ruleContext}. */ public static FeatureConfiguration configureFeatures( RuleContext ruleContext, Set<String> ruleSpecificRequestedFeatures, Set<String> ruleSpecificUnsupportedFeatures, CcToolchainProvider toolchain) { ImmutableSet.Builder<String> unsupportedFeaturesBuilder = ImmutableSet.builder(); unsupportedFeaturesBuilder.addAll(ruleSpecificUnsupportedFeatures); if (!toolchain.supportsHeaderParsing()) { // TODO(bazel-team): Remove once supports_header_parsing has been removed from the // cc_toolchain rule. unsupportedFeaturesBuilder.add(CppRuleClasses.PARSE_HEADERS); unsupportedFeaturesBuilder.add(CppRuleClasses.PREPROCESS_HEADERS); } if (toolchain.getCppCompilationContext().getCppModuleMap() == null) { unsupportedFeaturesBuilder.add(CppRuleClasses.MODULE_MAPS); } Set<String> unsupportedFeatures = unsupportedFeaturesBuilder.build(); ImmutableSet.Builder<String> requestedFeatures = ImmutableSet.builder(); for (String feature : Iterables.concat( ImmutableSet.of(toolchain.getCompilationMode().toString()), DEFAULT_FEATURES, ruleContext.getFeatures())) { if (!unsupportedFeatures.contains(feature)) { requestedFeatures.add(feature); } } requestedFeatures.addAll(ruleSpecificRequestedFeatures); FeatureConfiguration configuration = toolchain.getFeatures().getFeatureConfiguration(requestedFeatures.build()); for (String feature : unsupportedFeatures) { if (configuration.isEnabled(feature)) { ruleContext.ruleError("The C++ toolchain '" + ruleContext.getPrerequisite(":cc_toolchain", Mode.TARGET).getLabel() + "' unconditionally implies feature '" + feature + "', which is unsupported by this rule. " + "This is most likely a misconfiguration in the C++ toolchain."); } } return configuration; } /** * Creates a feature configuration for a given rule. * * @param ruleContext the context of the rule we want the feature configuration for. * @param toolchain the toolchain we want the feature configuration for. * @return the feature configuration for the given {@code ruleContext}. */ public static FeatureConfiguration configureFeatures( RuleContext ruleContext, CcToolchainProvider toolchain) { return configureFeatures( ruleContext, ImmutableSet.<String>of(), ImmutableSet.<String>of(), toolchain); } /** * Creates a feature configuration for a given rule. * * @param ruleContext the context of the rule we want the feature configuration for. * @return the feature configuration for the given {@code ruleContext}. */ public static FeatureConfiguration configureFeatures(RuleContext ruleContext) { return configureFeatures(ruleContext, CppHelper.getToolchain(ruleContext)); } }
package liquibase.util; import liquibase.ExtensibleObject; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeSet; import java.util.regex.Pattern; /** * Various utility methods for working with strings. */ public class StringUtil { private static final Pattern upperCasePattern = Pattern.compile(".*[A-Z].*"); private static final Pattern lowerCasePattern = Pattern.compile(".*[a-z].*"); private static final SecureRandom rnd = new SecureRandom(); /** * Returns the trimmed (left and right) version of the input string. If null is passed, an empty string is returned. * * @param string the input string to trim * @return the trimmed string, or an empty string if the input was null. */ public static String trimToEmpty(String string) { if (string == null) { return ""; } return string.trim(); } /** * Returns the trimmed (left and right) form of the input string. If the string is empty after trimming (or null * was passed in the first place), null is returned, i.e. the input string is reduced to nothing. * @param string the string to trim * @return the trimmed string or null */ public static String trimToNull(String string) { if (string == null) { return null; } String returnString = string.trim(); if (returnString.isEmpty()) { return null; } else { return returnString; } } /** * Removes any comments from multiple line SQL using {@link #stripComments(String)} * and then extracts each individual statement using {@link #splitSQL(String, String)}. * * @param multiLineSQL A String containing all the SQL statements * @param stripComments If true then comments will be stripped, if false then they will be left in the code */ public static String[] processMutliLineSQL(String multiLineSQL, boolean stripComments, boolean splitStatements, String endDelimiter) { StringClauses parsed = SqlParser.parse(multiLineSQL, true, !stripComments); List<String> returnArray = new ArrayList<>(); StringBuilder currentString = new StringBuilder(); String previousPiece = null; boolean previousDelimiter = false; List<Object> parsedArray = Arrays.asList(parsed.toArray(true)); for (Object piece : mergeTokens(parsedArray, endDelimiter)) { if (splitStatements && (piece instanceof String) && isDelimiter((String) piece, previousPiece, endDelimiter)) { String trimmedString = StringUtil.trimToNull(currentString.toString()); if (trimmedString != null) { returnArray.add(trimmedString); } currentString = new StringBuilder(); previousDelimiter = true; } else { if (!previousDelimiter || (StringUtil.trimToNull((String) piece) != null)) { //don't include whitespace after a delimiter if ((currentString.length() > 0) || (StringUtil.trimToNull((String) piece) != null)) { //don't include whitespace before the statement currentString.append(piece); } } previousDelimiter = false; } previousPiece = (String) piece; } String trimmedString = StringUtil.trimToNull(currentString.toString()); if (trimmedString != null) { returnArray.add(trimmedString); } return returnArray.toArray(new String[returnArray.size()]); } /** * Delimiters like "//" may span multiple tokens. Look for them and combine them */ private static List<Object> mergeTokens(List<Object> parsedArray, String endDelimiter) { if (endDelimiter == null) { return parsedArray; } List<Object> returnList = new ArrayList<>(); List<String> possibleMerge = new ArrayList<>(); for (Object obj : parsedArray) { if (possibleMerge.size() == 0) { if ((obj instanceof String) && endDelimiter.startsWith((String) obj)) { possibleMerge.add((String) obj); } else { returnList.add(obj); } } else { String possibleMergeString = StringUtil.join(possibleMerge, "") + obj.toString(); if (endDelimiter.equals(possibleMergeString)) { returnList.add(possibleMergeString); possibleMerge.clear(); } else if (endDelimiter.startsWith(possibleMergeString)) { possibleMerge.add(obj.toString()); } else { returnList.addAll(possibleMerge); returnList.add(obj); possibleMerge.clear(); } } } return returnList; } /** * Returns true if the input is a delimiter in one of the popular RDBMSs. Recognized delimiters are: semicolon (;), * a slash (as the only content) or the word GO (as the only content). * @param piece the input line to test * @param previousPiece the characters in the input stream that came before piece * @param endDelimiter ??? (need to see this in a debugger to find out) */ protected static boolean isDelimiter(String piece, String previousPiece, String endDelimiter) { if (endDelimiter == null) { return ";".equals(piece) || (("go".equalsIgnoreCase(piece) || "/".equals(piece)) && ((previousPiece == null) || previousPiece.endsWith("\n"))); } else { if (endDelimiter.length() == 1) { if ("/".equals(endDelimiter)) { if (previousPiece != null && !previousPiece.endsWith("\n")) { return false; } } return piece.toLowerCase().equalsIgnoreCase(endDelimiter.toLowerCase()); } else { return piece.toLowerCase().matches(endDelimiter.toLowerCase()) || (previousPiece+piece).toLowerCase().matches("[\\s\n\r]*"+endDelimiter.toLowerCase()); } } } /** * Splits a candidate multi-line SQL statement along ;'s and "go"'s. */ public static String[] splitSQL(String multiLineSQL, String endDelimiter) { return processMutliLineSQL(multiLineSQL, false, true, endDelimiter); } /** * Searches through a String which contains SQL code and strips out * any comments that are between \/**\/ or anything that matches * SP--SP<text>\n (to support the ANSI standard commenting of -- * at the end of a line). * * @return The String without the comments in */ public static String stripComments(String multiLineSQL) { return SqlParser.parse(multiLineSQL, true, false).toString().trim(); } public static String join(Object[] array, String delimiter, StringUtilFormatter formatter) { if (array == null) { return null; } return join(Arrays.asList(array), delimiter, formatter); } public static String join(String[] array, String delimiter) { return join(Arrays.asList(array), delimiter); } public static String join(Collection<String> collection, String delimiter) { return join(collection, delimiter, new ToStringFormatter()); } public static String join(Collection collection, String delimiter, StringUtilFormatter formatter) { if (collection == null) { return null; } if (collection.isEmpty()) { return ""; } StringBuilder buffer = new StringBuilder(); for (Object val : collection) { buffer.append(formatter.toString(val)).append(delimiter); } String returnString = buffer.toString(); return returnString.substring(0, returnString.length() - delimiter.length()); } public static String join(Collection collection, String delimiter, StringUtilFormatter formatter, boolean sorted) { if (sorted) { TreeSet<String> sortedSet = new TreeSet<>(); for (Object obj : collection) { sortedSet.add(formatter.toString(obj)); } return join(sortedSet, delimiter); } return join(collection, delimiter, formatter); } public static String join(Collection<String> collection, String delimiter, boolean sorted) { if (sorted) { return join(new TreeSet<>(collection), delimiter); } else { return join(collection, delimiter); } } public static String join(Map map, String delimiter) { return join(map, delimiter, new ToStringFormatter()); } public static String join(Map map, String delimiter, StringUtilFormatter formatter) { List<String> list = new ArrayList<>(); for (Map.Entry entry : (Set<Map.Entry>) map.entrySet()) { list.add(entry.getKey().toString()+"="+formatter.toString(entry.getValue())); } return join(list, delimiter); } public static String join(ExtensibleObject extensibleObject, String delimiter) { return join(extensibleObject, delimiter, new ToStringFormatter()); } public static String join(ExtensibleObject extensibleObject, String delimiter, StringUtilFormatter formatter) { List<String> list = new ArrayList<>(); for (String attribute : new TreeSet<>(extensibleObject.getAttributes())) { String formattedValue = formatter.toString(extensibleObject.get(attribute, Object.class)); if (formattedValue != null) { list.add(attribute + "=" + formattedValue); } } return join(list, delimiter); } public static List<String> splitAndTrim(String s, String regex) { if (s == null) { return null; } List<String> returnList = new ArrayList<>(); for (String string : s.split(regex)) { returnList.add(string.trim()); } return returnList; } public static String repeat(String string, int times) { StringBuilder result = new StringBuilder(string.length() * times); for (int i = 0; i < times; i++) { result.append(string); } return result.toString(); } public static String join(Integer[] array, String delimiter) { if (array == null) { return null; } int[] ints = new int[array.length]; for (int i=0; i < ints.length; i++) { ints[i] = array[i]; } return StringUtil.join(ints, delimiter); } public static String join(int[] array, String delimiter) { if (array == null) { return null; } if (array.length == 0) { return ""; } StringBuilder buffer = new StringBuilder(); for (int val : array) { buffer.append(val).append(delimiter); } String returnString = buffer.toString(); return returnString.substring(0, returnString.length() - delimiter.length()); } public static String indent(String string) { return indent(string, 4); } public static String indent(String string, int padding) { String pad = StringUtil.repeat(" ", padding); return pad+(string.replaceAll("\n", "\n" + pad)); } public static String lowerCaseFirst(String string) { return string.substring(0, 1).toLowerCase()+string.substring(1); } public static String upperCaseFirst(String string) { return string.substring(0, 1).toUpperCase()+string.substring(1); } public static boolean hasUpperCase(String string) { return upperCasePattern.matcher(string).matches(); } public static boolean hasLowerCase(String string) { return lowerCasePattern.matcher(string).matches(); } public static String standardizeLineEndings(String string) { if (string == null) { return null; } return string.replace("\r\n", "\n").replace("\r", "\n"); } public static boolean isAscii(String string) { if (string == null) { return true; } for (char c : string.toCharArray()) { if (!isAscii(c)) { return false; } } return true; } /** * Returns true if ch is a "7-bit-clean" ASCII character (ordinal number < 128). * @param ch the character to test * @return true if 7 bit-clean, false otherwise. */ public static boolean isAscii(char ch) { return ch < 128; } public static String escapeHtml(String str) { StringBuilder out = new StringBuilder(); int len = str.length(); for (int i = 0; i < len; i++) { char c = str.charAt(i); if (c > 0x7F) { out.append("&#"); out.append(Integer.toString(c, 10)); out.append(';'); } else { out.append(c); } } return out.toString(); } /** * Adds spaces to the right of the input value until the string has reached the given length. Nothing is done * if the string already has the given length or if the string is even longer. * @param value The string to pad (if necessary) * @param length the desired length * @return the input string, padded if necessary. */ public static String pad(String value, int length) { value = StringUtil.trimToEmpty(value); if (value.length() >= length) { return value; } return value + StringUtil.repeat(" ", length - value.length()); } /** * Adds spaces to the left of the input value until the string has reached the given length. Nothing is done * if the string already has the given length or if the string is even longer. * @param value The string to pad (if necessary) * @param length the desired length * @return the input string, padded if necessary. */ public static String leftPad(String value, int length) { value = StringUtil.trimToEmpty(value); if (value.length() >= length) { return value; } return StringUtil.repeat(" ", length - value.length()) + value; } /** * Returns true if the input string is the empty string (null-safe). * * @param value String to be checked * @return true if String is null or empty */ public static boolean isEmpty(String value) { return (value == null) || value.isEmpty(); } /** * Returns true if the input string is NOT the empty string. If the string is null, false is returned. * * @param value String to be checked * @return true if string is not null and not empty (length > 0) */ public static boolean isNotEmpty(String value) { return !isEmpty(value); } /** * Checks if <code>value</code> starts with <code>startsWith</code>. * @param value * @param startsWith * @return true if <code>value</code> starts with <code>startsWith</code>, otherwise false. If any of arguments is null returns false */ public static boolean startsWith(String value, String startsWith) { if((value == null) || (startsWith == null)){ return false; } return value.startsWith(startsWith); } /** * Returns true if the given string only consists of whitespace characters (null-safe) * @param string the string to test * @return true if the string is null or only consists of whitespaces. */ public static boolean isWhitespace(CharSequence string) { if (string == null) { return true; } return StringUtil.trimToNull(string.toString()) == null; } /** * Compares a minimum version number given in string form (only the first three parts are considered) with a * candidate version given as the three ints major, minor and patch. * * @param minimumVersion The minimum version that is required, given as a string with up to 3 parts, e.g. "7.4" or "9.6.3" * @param candidateMajor the version number to be tested, major part * @param candidateMinor the version number to be tested, minor part * @param candidatePatch the version number to be tested, patch part * @return true if candidateMajor.candidateMinor.candidatePatch >= minimumVersion or false if not */ public static boolean isMinimumVersion(String minimumVersion, int candidateMajor, int candidateMinor, int candidatePatch) { String[] parts = minimumVersion.split("\\.", 3); int minMajor = Integer.parseInt(parts[0]); int minMinor = (parts.length > 1) ? Integer.parseInt(parts[1]) : 0; int minPatch = (parts.length > 2) ? Integer.parseInt(parts[2]) : 0; if (minMajor > candidateMajor) { return false; } if ((minMajor == candidateMajor) && (minMinor > candidateMinor)) { return false; } return !((minMajor == candidateMajor) && (minMinor == candidateMinor) && (minPatch > candidatePatch)); } public static String limitSize(String string, int maxLength) { if (string.length() > maxLength) { return string.substring(0, maxLength - 3) + "..."; } return string; } /** * Produce a random identifer of the given length, consisting only of uppercase letters. * @param len desired length of the string * @return an identifier of the desired length */ public static String randomIdentifer(int len) { final String AB = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"; StringBuilder sb = new StringBuilder( len ); for (int i = 0; i < len; i++) sb.append( AB.charAt( rnd.nextInt(AB.length()) ) ); return sb.toString(); } public interface StringUtilFormatter<Type> { String toString(Type obj); } public static class ToStringFormatter implements StringUtilFormatter { @Override public String toString(Object obj) { if (obj == null) { return null; } return obj.toString(); } } public static class DefaultFormatter implements StringUtilFormatter { @Override public String toString(Object obj) { if (obj == null) { return null; } else if (obj instanceof Class) { return ((Class) obj).getName(); } else if (obj instanceof Object[]) { if (((Object[]) obj).length == 0) { return null; } else { return "[" + StringUtil.join((Object[]) obj, ", ", this) + "]"; } } else if (obj instanceof Collection) { if (((Collection) obj).size() == 0) { return null; } else { return "[" + StringUtil.join((Collection) obj, ", ", this) + "]"; } } return obj.toString(); } } /** * Returns if two strings are equal, ignoring: * <ul> * <li>case (uppercase/lowercase)</li> * <li>difference between null, and empty string, and a string that only has spaces</li> * </ul> * * @param s1 the first String to compare (or null) * @param s2 the second String to compare (or null) * @return true if the Strings are equal by the above criteria, false in all other cases */ public static boolean equalsIgnoreCaseAndEmpty(String s1, String s2) { String clean1 = trimToNull(s1); String clean2 = trimToNull(s2); if (clean1 == null && clean2 == null) { return true; } else { // Both cannot be null at this point if (clean1 == null || clean2 == null) { return false; } } return clean1.equalsIgnoreCase(clean2); } /** * Trims {@link Character#isWhitespace(char) whitespace} characters from the * end of specified <code>string</code> * @param string String to trim * @return new String without the whitespace at the end */ public static String trimRight(String string) { int i = string.length()-1; while (i >= 0 && Character.isWhitespace(string.charAt(i))) { i--; } return string.substring(0,i+1); } /** * * @param sqlString * @return the last block comment from a Sql string if any */ public static String getLastBlockComment(String sqlString) { if (isEmpty(sqlString) || sqlString.length() < 4) { return null; } StringBuilder reversedSqlStringBuilder = new StringBuilder(sqlString).reverse(); String reversedString = reversedSqlStringBuilder.toString(); int idxClosingLastChar = -1, idxOpeningFirstChar = -1; for (int i = 0; i < reversedString.length(); i++) { if (idxClosingLastChar < 0) { // we have not found the start of the pair (reversed) yet) char c = reversedString.charAt(i); if (c == '/') { // check the second one char s = reversedString.charAt(i + 1); if (s == '*') { idxClosingLastChar = i; } } else if (!Character.isWhitespace(c)){ // does not look like it ends with block comment, return null return null; } } else { // look for closing pair (reversed) char c = reversedString.charAt(i); if (c == '/') { // check the previous one char s = reversedString.charAt(i - 1); char e = reversedString.charAt(i + 1); // if it was not escaped if (s == '*' && e != '\\') { idxOpeningFirstChar = i; break; } } } } // reverse the index to get the start of the last comment block int idxOfLastBlockComment = sqlString.length() - (idxOpeningFirstChar + 1); return sqlString.substring(idxOfLastBlockComment); } /** * * @param sqlString * @return the last line comment from a Sql string if any */ public static String getLastLineComment(String sqlString) { if (isEmpty(sqlString) || sqlString.length() < 2) { return null; } boolean startOfNewLine = false; int idxOfDoubleDash = -1; for (int i = 0; i < sqlString.length(); i++) { char c = sqlString.charAt(i); // we have not found the start of the line comment yet if (c == '-') { // check the next one char s = sqlString.charAt(i + 1); if (s == '-') { if (idxOfDoubleDash < 0) { idxOfDoubleDash = i; } startOfNewLine = false; } } else if (!Character.isWhitespace(c)) { if (startOfNewLine) { // new line started and we found some other character, reset the index, idxOfDoubleDash = -1; } } else if (c == '\r' || c == '\n') { // new line found startOfNewLine = true; } } if (idxOfDoubleDash < 0) { return null; } return sqlString.substring(idxOfDoubleDash); } /** * Strips the comments and whitespaces from the end of given sql string. * @param sqlString * @return */ public static String stripSqlCommentsAndWhitespacesFromTheEnd(String sqlString) { if (isEmpty(sqlString)) { return sqlString; } StringBuilder str = new StringBuilder(sqlString); boolean strModified = true; while (strModified) { strModified = false; // first check for last block comment // since line comments could be inside block comments, we want to // remove them first. String lastBlockComment = getLastBlockComment(str.toString()); if (isNotEmpty(lastBlockComment)) { str.setLength(str.length() - lastBlockComment.length()); // we just modified the end of the string, // do another loop to check for next block or line comments strModified = true; } // now check for the line comments String lastLineComment = getLastLineComment(str.toString()); if (isNotEmpty(lastLineComment)) { str.setLength(str.length() - lastLineComment.length()); // we just modified the end of the string, // do another loop to check for next block or line comments strModified = true; } } return trimRight(str.toString()); } /** * From commonslang3 -> StringUtil * <p>Gets a substring from the specified String avoiding exceptions.</p> * * <p>A negative start position can be used to start/end {@code n} * characters from the end of the String.</p> * * <p>The returned substring starts with the character in the {@code start} * position and ends before the {@code end} position. All position counting is * zero-based -- i.e., to start at the beginning of the string use * {@code start = 0}. Negative start and end positions can be used to * specify offsets relative to the end of the String.</p> * * <p>If {@code start} is not strictly to the left of {@code end}, "" * is returned.</p> * * <pre> * StringUtil.substring(null, *, *) = null * StringUtil.substring("", * , *) = ""; * StringUtil.substring("abc", 0, 2) = "ab" * StringUtil.substring("abc", 2, 0) = "" * StringUtil.substring("abc", 2, 4) = "c" * StringUtil.substring("abc", 4, 6) = "" * StringUtil.substring("abc", 2, 2) = "" * StringUtil.substring("abc", -2, -1) = "b" * StringUtil.substring("abc", -4, 2) = "ab" * </pre> * * @param str the String to get the substring from, may be null * @param start the position to start from, negative means * count back from the end of the String by this many characters * @param end the position to end at (exclusive), negative means * count back from the end of the String by this many characters * @return substring from start position to end position, * {@code null} if null String input */ public static String substring(final String str, int start, int end) { if (str == null) { return null; } // handle negatives if (end < 0) { end = str.length() + end; // remember end is negative } if (start < 0) { start = str.length() + start; // remember start is negative } // check length next if (end > str.length()) { end = str.length(); } // if start is greater than end, return "" if (start > end) { return ""; } if (start < 0) { start = 0; } if (end < 0) { end = 0; } return str.substring(start, end); } //from https://stackoverflow.com/a/48588062/45756 public static String escapeXml(CharSequence s) { StringBuilder sb = new StringBuilder(); int len = s.length(); for (int i=0;i<len;i++) { int c = s.charAt(i); if (c >= 0xd800 && c <= 0xdbff && i + 1 < len) { c = ((c-0xd7c0)<<10) | (s.charAt(++i)&0x3ff); // UTF16 decode } if (c < 0x80) { // ASCII range: test most common case first if (c < 0x20 && (c != '\t' && c != '\r' && c != '\n')) { // Illegal XML character, even encoded. Skip or substitute sb.append("&#xfffd;"); // Unicode replacement character } else { switch(c) { case '&': sb.append("&amp;"); break; case '>': sb.append("&gt;"); break; case '<': sb.append("&lt;"); break; // Uncomment next two if encoding for an XML attribute // case '\'' sb.append("&apos;"); break; // case '\"' sb.append("&quot;"); break; // Uncomment next three if you prefer, but not required // case '\n' sb.append("&#10;"); break; // case '\r' sb.append("&#13;"); break; // case '\t' sb.append("&#9;"); break; default: sb.append((char)c); } } } else if ((c >= 0xd800 && c <= 0xdfff) || c == 0xfffe || c == 0xffff) { // Illegal XML character, even encoded. Skip or substitute sb.append("&#xfffd;"); // Unicode replacement character } else { sb.append("&#x"); sb.append(Integer.toHexString(c)); sb.append(';'); } } return sb.toString(); } /** * Concatenates the addition string to the baseString string, adjusting the case of "addition" to match the base string. * If the string is all caps, append addition in all caps. If all lower case, append in all lower case. If baseString is mixed case, make no changes to addition. */ public static String concatConsistentCase(String baseString, String addition) { boolean hasLowerCase = hasLowerCase(baseString); boolean hasUpperCase = hasUpperCase(baseString); if ((hasLowerCase && hasUpperCase) || (!hasLowerCase && !hasUpperCase)) { //mixed case || no letters return baseString + addition; } else if (hasLowerCase) { return baseString + addition.toLowerCase(); } else { return baseString + addition.toUpperCase(); } } }
/* * www.javagl.de - JglTF * * Copyright 2015-2016 Marco Hutter - http://www.javagl.de * * Permission is hereby granted, free of charge, to any person * obtaining a copy of this software and associated documentation * files (the "Software"), to deal in the Software without * restriction, including without limitation the rights to use, * copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following * conditions: * * The above copyright notice and this permission notice shall be * included in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR * OTHER DEALINGS IN THE SOFTWARE. */ package de.javagl.jgltf.browser; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.nio.ByteBuffer; import java.text.NumberFormat; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Objects; import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.LongConsumer; import java.util.logging.Logger; import de.javagl.jgltf.model.io.Buffers; import de.javagl.jgltf.model.io.GltfAsset; import de.javagl.jgltf.model.io.GltfAssetReader; import de.javagl.jgltf.model.io.GltfReference; import de.javagl.jgltf.model.io.IO; import de.javagl.jgltf.model.io.JsonError; import de.javagl.jgltf.model.io.ProgressInputStream; import de.javagl.swing.tasks.ProgressListener; import de.javagl.swing.tasks.executors.GenericProgressTask; import de.javagl.swing.tasks.executors.ObservableExecutorService; import de.javagl.swing.tasks.executors.ObservableExecutors; /** * A reader that loads {@link GltfAsset} using multiple threads, * and publishes progress information. */ class GltfAssetReaderThreaded { /** * The logger used in this class */ private static final Logger logger = Logger.getLogger(GltfAssetReaderThreaded.class.getName()); /** * The {@link GltfAssetReader} that does the initial read step */ private final GltfAssetReader gltfAssetReader; /** * The list of {@link ProgressListener}s that have been attached to * this reader */ private final List<ProgressListener> progressListeners; /** * A {@link ProgressListener} that will forward all information to * the registered {@link #progressListeners} */ private final ProgressListener forwardingProgressListener = new ProgressListener() { @Override public void progressChanged(double progress) { fireProgressChanged(progress); } @Override public void messageChanged(String message) { fireMessageChanged(message); } }; /** * The number of threads, as given in the constructor */ private final int numThreads; /** * The {@link ObservableExecutorService} that processes the loading tasks */ private ObservableExecutorService observableExecutorService; /** * The URI that the data is currently read from */ private URI uri; /** * The {@link GltfAsset} that is currently being read */ private GltfAsset gltfAsset; /** * Creates a threaded glTF data reader * * @param numThreads The number of threads to use. If this is less than * 0, then a cached thread pool will be used, taking as many threads as * necessary. If this is 0, then the number of threads will be the same * as the number of available processors. */ GltfAssetReaderThreaded(int numThreads) { this.numThreads = numThreads; this.progressListeners = new CopyOnWriteArrayList<ProgressListener>(); this.gltfAssetReader = new GltfAssetReader(); } /** * Set the given consumer to receive {@link JsonError}s that may * occur when a glTF is read * * @param jsonErrorConsumer The {@link JsonError} consumer */ void setJsonErrorConsumer( Consumer<? super JsonError> jsonErrorConsumer) { gltfAssetReader.setJsonErrorConsumer(jsonErrorConsumer); } /** * Creates the executor service for the given number of threads, as * given in the constructor. * * @param numThreads The number of threads * @return The executor service */ private static ObservableExecutorService createObservableExecutorService( int numThreads) { ObservableExecutorService executorService = null; if (numThreads < 0) { executorService = ObservableExecutors.newCachedThreadPool(); } else if (numThreads == 0) { executorService = ObservableExecutors.newFixedThreadPool( Runtime.getRuntime().availableProcessors()); } else { executorService = ObservableExecutors.newFixedThreadPool(numThreads); } executorService.setKeepAliveTime(3, TimeUnit.SECONDS); executorService.allowCoreThreadTimeOut(true); return executorService; } /** * Cancel all tasks that are currently executed */ void cancel() { if (observableExecutorService != null) { observableExecutorService.shutdownNow(); observableExecutorService = null; } } /** * Returns the {@link ObservableExecutorService} that executes the * loading tasks. (Note that the instance that is returned here * may change when {@link #cancel()} was called between two calls * to this method) * * @return The {@link ObservableExecutorService} */ final ObservableExecutorService getObservableExecutorService() { if (observableExecutorService == null) { observableExecutorService = createObservableExecutorService(numThreads); } return observableExecutorService; } /** * Add the given progress listener to be informed about changes in * the loading progress * * @param progressListener The {@link ProgressListener} to add */ final void addProgressListener(ProgressListener progressListener) { if (progressListener != null) { this.progressListeners.add(progressListener); } } /** * Remove the given progress listener * * @param progressListener The {@link ProgressListener} to remove */ final void removeProgressListener(ProgressListener progressListener) { this.progressListeners.remove(progressListener); } /** * Dispatches the given message to all {@link ProgressListener}s * * @param message The message */ private void fireMessageChanged(String message) { for (ProgressListener progressListener : progressListeners) { progressListener.messageChanged(message); } } /** * Dispatches the given progress to all {@link ProgressListener}s * * @param progress The progress */ private void fireProgressChanged(double progress) { for (ProgressListener progressListener : progressListeners) { progressListener.progressChanged(progress); } } /** * Read the {@link GltfAsset} from the given URI * * @param uri The URI * @return The {@link GltfAsset} * @throws IOException If an IO error occurs */ GltfAsset readGltfAsset(URI uri) throws IOException { Objects.requireNonNull(uri, "The URI may not be null"); this.uri = uri; Callable<Void> gltfAssetLoadingTask = createGltfAssetLoadingTask(); fireMessageChanged("Loading glTF asset"); Throwable gltfLoadingError = ExecutorServiceUtils.invokeAll( getObservableExecutorService(), progress -> fireProgressChanged(progress), Collections.singletonList(gltfAssetLoadingTask)); if (gltfLoadingError != null) { throw new IOException(gltfLoadingError); } if (gltfAsset == null) { throw new IOException("Could not load glTF asset"); } List<Callable<Void>> loadingTasks = new ArrayList<Callable<Void>>(); List<GltfReference> references = gltfAsset.getReferences(); for (GltfReference reference : references) { Callable<Void> loadingTask = createReferenceLoadingTask(reference); loadingTasks.add(loadingTask); } // Schedule the loading tasks fireMessageChanged("Loading references"); Throwable uriLoadingError = ExecutorServiceUtils.invokeAll( getObservableExecutorService(), progress -> fireProgressChanged(progress), loadingTasks); if (uriLoadingError != null) { throw new IOException(uriLoadingError); } // Clean up and return the result fireMessageChanged("Done"); GltfAsset result = gltfAsset; setGltfAsset(null); return result; } /** * Set the {@link GltfAsset} that is currently being read * * @param gltfAsset The {@link GltfAsset} */ private void setGltfAsset(GltfAsset gltfAsset) { this.gltfAsset = gltfAsset; } /** * Create the task for loading the {@link GltfAsset} from the current URI * * @return The task * @throws IOException If the task cannot be created */ @SuppressWarnings("resource") private Callable<Void> createGltfAssetLoadingTask() throws IOException { InputStream uriInputStream = uri.toURL().openStream(); ProgressInputStream progressInputStream = new ProgressInputStream(uriInputStream); Callable<Void> basicTask = () -> { // Only one glTF is loaded at a time. Forward the progress // information to the progress listener LongConsumer progressForwarder = createTotalNumBytesReadConsumer(uri, forwardingProgressListener); progressInputStream.addTotalNumBytesReadConsumer( progressForwarder); GltfAsset loadedGltfAsset = gltfAssetReader.readWithoutReferences(progressInputStream); setGltfAsset(loadedGltfAsset); tryClose(progressInputStream); return null; }; String description = "glTF data from " + IO.extractFileName(uri); GenericProgressTask<Void> loadingTask = new GenericProgressTask<Void>(description); loadingTask.setCallable(basicTask); attach(progressInputStream, loadingTask); return loadingTask; } /** * Create the task for loading the data of the given {@link GltfReference} * * @param reference The {@link GltfReference} * @return The task * @throws IOException If the task cannot be created */ @SuppressWarnings("resource") private Callable<Void> createReferenceLoadingTask(GltfReference reference) throws IOException { String name = reference.getName(); String uriString = reference.getUri(); URI baseUri = IO.getParent(uri); URI absoluteUri = IO.makeAbsolute(baseUri, uriString); InputStream inputStream = IO.createInputStream(absoluteUri); ProgressInputStream progressInputStream = new ProgressInputStream(inputStream); Callable<Void> basicTask = () -> { try { load(reference, progressInputStream); } finally { tryClose(progressInputStream); } return null; }; String taskName = name + " from "; if (IO.isDataUriString(uriString)) { taskName += "data URI"; } else { taskName += uriString; } GenericProgressTask<Void> loadingTask = new GenericProgressTask<Void>(taskName); loadingTask.setCallable(basicTask); attach(progressInputStream, loadingTask); return loadingTask; } /** * Load the data of the given {@link GltfReference} from the given * input stream. The caller is responsible for closing the stream. * * @param reference The {@link GltfReference} * @param inputStream The input stream * @throws IOException If an IO error occurs */ private static void load(GltfReference reference, InputStream inputStream) throws IOException { String name = reference.getName(); Consumer<ByteBuffer> target = reference.getTarget(); logger.fine("Reading " + name); byte data[] = IO.readStream(inputStream); ByteBuffer byteBuffer = Buffers.create(data); logger.fine("Reading " + name + " DONE"); target.accept(byteBuffer); } /** * Try to close the given closeable, and print a warning if this should * ever cause an exception. * * @param closeable The closeable */ private static void tryClose(Closeable closeable) { try { closeable.close(); } catch (IOException e) { logger.warning( "Could not close " + closeable + " : "+ e.getMessage()); } } /** * Connect the progress of reading from the given stream to the given * task. The progress from the stream will afterwards be forwarded * to all progress listeners that are attached to the given task * * @param progressInputStream The {@link ProgressInputStream} * @param progressTask The {@link GenericProgressTask} */ private static void attach( ProgressInputStream progressInputStream, GenericProgressTask<?> progressTask) { ProgressListener progressListener = progressTask.getDispatchingProgressListener(); progressInputStream.addTotalNumBytesReadConsumer(t -> { String numBytesString = NumberFormat.getNumberInstance().format(t); String message = " (" + numBytesString + " bytes)"; progressListener.messageChanged(message); }); } /** * Create a consumer for the number of bytes that have been read from * the given URI, forwarding the information as a progress value in * [0.0, 1.0] to the given progress listener. If the total size can not * be obtained from the given URI, then no information will be forwarded. * * @param uri The URI * @param progressListener The progress listener * @return The consumer */ private static LongConsumer createTotalNumBytesReadConsumer( URI uri, ProgressListener progressListener) { long contentLength = IO.getContentLength(uri); if (contentLength <= 0) { return t -> { progressListener.progressChanged(-1.0); }; } return totalNumBytesRead -> { double progress = (double)totalNumBytesRead / contentLength; progressListener.progressChanged( Math.max(0.0, Math.min(1.0, progress))); }; } }
/* * Licensed to Metamarkets Group Inc. (Metamarkets) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. Metamarkets licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package io.druid.metadata; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Suppliers; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Iterables; import io.druid.java.util.emitter.EmittingLogger; import io.druid.java.util.common.Intervals; import io.druid.java.util.common.StringUtils; import io.druid.segment.TestHelper; import io.druid.server.metrics.NoopServiceEmitter; import io.druid.timeline.DataSegment; import io.druid.timeline.partition.NoneShardSpec; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import java.io.IOException; public class SQLMetadataSegmentManagerTest { @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); private SQLMetadataSegmentManager manager; private SQLMetadataSegmentPublisher publisher; private final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); private final DataSegment segment1 = new DataSegment( "wikipedia", Intervals.of("2012-03-15T00:00:00.000/2012-03-16T00:00:00.000"), "2012-03-16T00:36:30.848Z", ImmutableMap.<String, Object>of( "type", "s3_zip", "bucket", "test", "key", "wikipedia/index/y=2012/m=03/d=15/2012-03-16T00:36:30.848Z/0/index.zip" ), ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("count", "value"), NoneShardSpec.instance(), 0, 1234L ); private final DataSegment segment2 = new DataSegment( "wikipedia", Intervals.of("2012-01-05T00:00:00.000/2012-01-06T00:00:00.000"), "2012-01-06T22:19:12.565Z", ImmutableMap.<String, Object>of( "type", "s3_zip", "bucket", "test", "key", "wikipedia/index/y=2012/m=01/d=05/2012-01-06T22:19:12.565Z/0/index.zip" ), ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("count", "value"), NoneShardSpec.instance(), 0, 1234L ); @Before public void setUp() throws Exception { TestDerbyConnector connector = derbyConnectorRule.getConnector(); manager = new SQLMetadataSegmentManager( jsonMapper, Suppliers.ofInstance(new MetadataSegmentManagerConfig()), derbyConnectorRule.metadataTablesConfigSupplier(), connector ); publisher = new SQLMetadataSegmentPublisher( jsonMapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), connector ); connector.createSegmentTable(); publisher.publishSegment(segment1); publisher.publishSegment(segment2); } @After public void teardown() { if (manager.isStarted()) { manager.stop(); } } @Test public void testPoll() { manager.start(); manager.poll(); Assert.assertTrue(manager.isStarted()); Assert.assertEquals( ImmutableList.of("wikipedia"), manager.getAllDatasourceNames() ); Assert.assertEquals( ImmutableSet.of(segment1, segment2), ImmutableSet.copyOf(manager.getInventoryValue("wikipedia").getSegments()) ); } @Test public void testPollWithCurroptedSegment() { //create a corrupted segment entry in segments table, which tests //that overall loading of segments from database continues to work //even in one of the entries are corrupted. publisher.publishSegment( "corrupt-segment-id", "corrupt-datasource", "corrupt-create-date", "corrupt-start-date", "corrupt-end-date", true, "corrupt-version", true, StringUtils.toUtf8("corrupt-payload") ); EmittingLogger.registerEmitter(new NoopServiceEmitter()); manager.start(); manager.poll(); Assert.assertTrue(manager.isStarted()); Assert.assertEquals( "wikipedia", Iterables.getOnlyElement(manager.getInventory()).getName() ); } @Test public void testGetUnusedSegmentsForInterval() throws Exception { manager.start(); manager.poll(); Assert.assertTrue(manager.isStarted()); Assert.assertTrue(manager.removeDatasource("wikipedia")); Assert.assertEquals( ImmutableList.of(segment2.getInterval()), manager.getUnusedSegmentIntervals("wikipedia", Intervals.of("1970/3000"), 1) ); Assert.assertEquals( ImmutableList.of(segment2.getInterval(), segment1.getInterval()), manager.getUnusedSegmentIntervals("wikipedia", Intervals.of("1970/3000"), 5) ); } @Test public void testRemoveDataSource() throws IOException { manager.start(); manager.poll(); Assert.assertTrue(manager.isStarted()); final String newDataSource = "wikipedia2"; final DataSegment newSegment = new DataSegment( newDataSource, Intervals.of("2017-10-15T00:00:00.000/2017-10-16T00:00:00.000"), "2017-10-15T20:19:12.565Z", ImmutableMap.of( "type", "s3_zip", "bucket", "test", "key", "wikipedia2/index/y=2017/m=10/d=15/2017-10-16T20:19:12.565Z/0/index.zip" ), ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("count", "value"), NoneShardSpec.instance(), 0, 1234L ); publisher.publishSegment(newSegment); Assert.assertNull(manager.getInventoryValue(newDataSource)); Assert.assertTrue(manager.removeDatasource(newDataSource)); } @Test public void testRemoveDataSegment() throws IOException { manager.start(); manager.poll(); Assert.assertTrue(manager.isStarted()); final String newDataSource = "wikipedia2"; final DataSegment newSegment = new DataSegment( newDataSource, Intervals.of("2017-10-15T00:00:00.000/2017-10-16T00:00:00.000"), "2017-10-15T20:19:12.565Z", ImmutableMap.of( "type", "s3_zip", "bucket", "test", "key", "wikipedia2/index/y=2017/m=10/d=15/2017-10-16T20:19:12.565Z/0/index.zip" ), ImmutableList.of("dim1", "dim2", "dim3"), ImmutableList.of("count", "value"), NoneShardSpec.instance(), 0, 1234L ); publisher.publishSegment(newSegment); Assert.assertNull(manager.getInventoryValue(newDataSource)); Assert.assertTrue(manager.removeSegment(newDataSource, newSegment.getIdentifier())); } @Test public void testStopAndStart() { // Simulate successive losing and getting the coordinator leadership manager.start(); manager.stop(); manager.start(); manager.stop(); } }
package org.nd4j.linalg.api.parallel.tasks.cpu.accumulation; import org.nd4j.linalg.api.ndarray.INDArray; import org.nd4j.linalg.api.ops.Accumulation; import org.nd4j.linalg.api.ops.executioner.OpExecutionerUtil; import org.nd4j.linalg.api.parallel.tasks.BaseTask; import org.nd4j.linalg.api.parallel.tasks.Task; import org.nd4j.linalg.api.parallel.tasks.TaskExecutorProvider; import org.nd4j.linalg.api.parallel.tasks.cpu.BaseCPUTask; import org.nd4j.linalg.api.shape.tensor.TensorCalculator; import org.nd4j.linalg.api.shape.tensor.TensorCalculatorFactory; import org.nd4j.linalg.factory.Nd4j; import org.nd4j.linalg.util.ArrayUtil; import java.util.ArrayList; import java.util.List; import java.util.concurrent.Future; import java.util.concurrent.RecursiveAction; import java.util.concurrent.RecursiveTask; public class CPUAccumulationAlongDimensionTask extends BaseCPUTask<INDArray> { protected final Accumulation op; protected final int[] dimensions; protected List<Task<Double>> subTasks; public CPUAccumulationAlongDimensionTask(Accumulation op, int parallelThreshold, int... dimensions) { super(op, parallelThreshold); for(int i = 0; i < dimensions.length; i++) if(dimensions[i] < 0) dimensions[i] += op.x().rank(); this.op = op; this.dimensions = dimensions; } @Override public INDArray blockUntilComplete() { if (future == null) { //invokeAsync() not called? invokeAsync(); } INDArray ret; try { ret = future.get(); } catch (Exception e) { throw new RuntimeException(e); } if(ret != null) { if(dimensions.length == 1 && dimensions[0] == 1 && op.x().isMatrix()) ret = ret.reshape(ret.length(),1); return ret; //ForkJoin } //ExecutorService int[] retShape = ArrayUtil.removeIndex(op.x().shape(), dimensions); if(dimensions.length == 1 && dimensions[0] == 1 && op.x().isMatrix()) retShape = new int[] {op.x().length(),1}; INDArray out = Nd4j.create(retShape); int i = 0; for (Task<Double> task : subTasks) { out.putScalar(i++, task.blockUntilComplete()); } op.setZ(out); return out; } @Override public INDArray call() { //Callable: Iterative decomposition int nTensors = op.x().tensorssAlongDimension(dimensions); subTasks = new ArrayList<>(nTensors); for (int i = 0; i < nTensors; i++) { Task<Double> task = new OpForDimTask(i); task.invokeAsync(); subTasks.add(task); } return null; } @Override public INDArray compute() { //Fork Join: Recursive decomposition if(dimensions.length == 1 && !op.isPassThrough()) { TensorCalculator tCalcx = TensorCalculatorFactory.getTensorCalculator(op.x(), dimensions[0]); TensorCalculator tCalcy; if(op.y() != null) tCalcy = TensorCalculatorFactory.getTensorCalculator(op.y(), dimensions[0]); else tCalcy = null; int[] retShape = ArrayUtil.removeIndex(op.x().shape(), dimensions); INDArray out = Nd4j.create(retShape); RecursiveAction action = new CPUAccumulations1dAction(op,threshold,tCalcx, tCalcy, 0, tCalcx.getNumTensors() - 1, out); action.invoke(); op.setZ(out); return out; } else { int nTensors = op.x().tensorssAlongDimension(dimensions); List<RecursiveTask<Double>> subTasks = new ArrayList<>(nTensors); for (int i = 0; i < nTensors; i++) { RecursiveTask<Double> task = new OpForDimTaskFJ(i); task.fork(); subTasks.add(task); } int[] retShape = ArrayUtil.removeIndex(op.x().shape(), dimensions); INDArray out = Nd4j.create(retShape); int i = 0; for (RecursiveTask<Double> task : subTasks) { out.putScalar(i++, task.join()); } op.setZ(out); return out; } } private class OpForDimTask extends BaseTask<Double> { private int tensorNum; private BaseCPUTask<Double> subTask; private Future<Double> future; public OpForDimTask(int tensorNum){ this.tensorNum = tensorNum; } @Override public void invokeAsync() { this.future = TaskExecutorProvider.getTaskExecutor().executeAsync(this); } @Override public Double blockUntilComplete() { try { future.get(); } catch (Exception e) { throw new RuntimeException(e); } return subTask.blockUntilComplete(); } @Override public Double call() { Accumulation opOnDimension = (Accumulation) op.opForDimension(tensorNum, dimensions); INDArray x2 = opOnDimension.x(); INDArray y2 = opOnDimension.y(); boolean canDoDirectly; if (y2 == null) canDoDirectly = OpExecutionerUtil.canDoOpDirectly(x2); else canDoDirectly = OpExecutionerUtil.canDoOpDirectly(x2, y2); if (canDoDirectly) { subTask = new CPUAccumulationTask(opOnDimension, threshold, true); } else { subTask = new CPUAccumulationViaTensorTask(opOnDimension, threshold, true); } subTask.invokeAsync(); return null; } } private class OpForDimTaskFJ extends RecursiveTask<Double> implements Task<Double> { private int tensorNum; private BaseCPUTask<Double> subTask; private Future<Double> future; public OpForDimTaskFJ(int tensorNum){ this.tensorNum = tensorNum; } @Override public Double invokeBlocking() { invokeAsync(); return blockUntilComplete(); } @Override public void invokeAsync() { this.future = TaskExecutorProvider.getTaskExecutor().executeAsync(this); } @Override public Double blockUntilComplete() { return null; } @Override public Double call() { //Callable (should never be called) throw new RuntimeException("Callable.call() called as part of ForkJoin task"); } @Override protected Double compute() { //Fork join Accumulation opOnDimension = (Accumulation) op.opForDimension(tensorNum, dimensions); INDArray x2 = opOnDimension.x(); INDArray y2 = opOnDimension.y(); boolean canDoDirectly; if (y2 == null) canDoDirectly = OpExecutionerUtil.canDoOpDirectly(x2); else canDoDirectly = OpExecutionerUtil.canDoOpDirectly(x2, y2); if (canDoDirectly) { subTask = new CPUAccumulationTask(opOnDimension, threshold, true); } else { subTask = new CPUAccumulationViaTensorTask(opOnDimension, threshold, true); } return subTask.invoke(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.curator; import org.apache.curator.drivers.EventTrace; import org.apache.curator.drivers.TracerDriver; import org.apache.curator.utils.DebugUtils; import org.apache.curator.utils.ThreadUtils; import org.apache.zookeeper.KeeperException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.concurrent.Callable; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicReference; /** * <p>Mechanism to perform an operation on Zookeeper that is safe against * disconnections and "recoverable" errors.</p> * * <p> * If an exception occurs during the operation, the RetryLoop will process it, * check with the current retry policy and either attempt to reconnect or re-throw * the exception * </p> * * Canonical usage:<br> * <pre> * RetryLoop retryLoop = client.newRetryLoop(); * while ( retryLoop.shouldContinue() ) * { * try * { * // do your work * ZooKeeper zk = client.getZooKeeper(); // it's important to re-get the ZK instance in case there was an error and the instance was re-created * * retryLoop.markComplete(); * } * catch ( Exception e ) * { * retryLoop.takeException(e); * } * } * </pre> */ public class RetryLoop { private boolean isDone = false; private int retryCount = 0; private final Logger log = LoggerFactory.getLogger(getClass()); private final long startTimeMs = System.currentTimeMillis(); private final RetryPolicy retryPolicy; private final AtomicReference<TracerDriver> tracer; private static final RetrySleeper sleeper = new RetrySleeper() { @Override public void sleepFor(long time, TimeUnit unit) throws InterruptedException { unit.sleep(time); } }; /** * Returns the default retry sleeper * * @return sleeper */ public static RetrySleeper getDefaultRetrySleeper() { return sleeper; } /** * Convenience utility: creates a retry loop calling the given proc and retrying if needed * * @param client Zookeeper * @param proc procedure to call with retry * @param <T> return type * @return procedure result * @throws Exception any non-retriable errors */ public static<T> T callWithRetry(CuratorZookeeperClient client, Callable<T> proc) throws Exception { T result = null; RetryLoop retryLoop = client.newRetryLoop(); while ( retryLoop.shouldContinue() ) { try { client.internalBlockUntilConnectedOrTimedOut(); result = proc.call(); retryLoop.markComplete(); } catch ( Exception e ) { ThreadUtils.checkInterrupted(e); retryLoop.takeException(e); } } return result; } RetryLoop(RetryPolicy retryPolicy, AtomicReference<TracerDriver> tracer) { this.retryPolicy = retryPolicy; this.tracer = tracer; } /** * If true is returned, make an attempt at the operation * * @return true/false */ public boolean shouldContinue() { return !isDone; } /** * Call this when your operation has successfully completed */ public void markComplete() { isDone = true; } /** * Utility - return true if the given Zookeeper result code is retry-able * * @param rc result code * @return true/false */ public static boolean shouldRetry(int rc) { return (rc == KeeperException.Code.CONNECTIONLOSS.intValue()) || (rc == KeeperException.Code.OPERATIONTIMEOUT.intValue()) || (rc == KeeperException.Code.SESSIONMOVED.intValue()) || (rc == KeeperException.Code.SESSIONEXPIRED.intValue()); } /** * Utility - return true if the given exception is retry-able * * @param exception exception to check * @return true/false */ public static boolean isRetryException(Throwable exception) { if ( exception instanceof KeeperException ) { KeeperException keeperException = (KeeperException)exception; return shouldRetry(keeperException.code().intValue()); } return false; } /** * Pass any caught exceptions here * * @param exception the exception * @throws Exception if not retry-able or the retry policy returned negative */ public void takeException(Exception exception) throws Exception { boolean rethrow = true; if ( isRetryException(exception) ) { if ( !Boolean.getBoolean(DebugUtils.PROPERTY_DONT_LOG_CONNECTION_ISSUES) ) { log.debug("Retry-able exception received", exception); } if ( retryPolicy.allowRetry(retryCount++, System.currentTimeMillis() - startTimeMs, sleeper) ) { new EventTrace("retries-allowed", tracer.get()).commit(); if ( !Boolean.getBoolean(DebugUtils.PROPERTY_DONT_LOG_CONNECTION_ISSUES) ) { log.debug("Retrying operation"); } rethrow = false; } else { new EventTrace("retries-disallowed", tracer.get()).commit(); if ( !Boolean.getBoolean(DebugUtils.PROPERTY_DONT_LOG_CONNECTION_ISSUES) ) { log.debug("Retry policy not allowing retry"); } } } if ( rethrow ) { throw exception; } } }
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University // Copyright (c) 2011, 2012 Open Networking Foundation // Copyright (c) 2012, 2013 Big Switch Networks, Inc. // This library was generated by the LoxiGen Compiler. // See the file LICENSE.txt which should have been included in the source distribution // Automatically generated by LOXI from template of_class.java // Do not modify package org.projectfloodlight.openflow.protocol.ver15; import org.projectfloodlight.openflow.protocol.*; import org.projectfloodlight.openflow.protocol.action.*; import org.projectfloodlight.openflow.protocol.actionid.*; import org.projectfloodlight.openflow.protocol.bsntlv.*; import org.projectfloodlight.openflow.protocol.errormsg.*; import org.projectfloodlight.openflow.protocol.meterband.*; import org.projectfloodlight.openflow.protocol.instruction.*; import org.projectfloodlight.openflow.protocol.instructionid.*; import org.projectfloodlight.openflow.protocol.match.*; import org.projectfloodlight.openflow.protocol.stat.*; import org.projectfloodlight.openflow.protocol.oxm.*; import org.projectfloodlight.openflow.protocol.oxs.*; import org.projectfloodlight.openflow.protocol.queueprop.*; import org.projectfloodlight.openflow.types.*; import org.projectfloodlight.openflow.util.*; import org.projectfloodlight.openflow.exceptions.*; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.Set; import com.google.common.collect.ImmutableSet; import io.netty.buffer.ByteBuf; import com.google.common.hash.PrimitiveSink; import com.google.common.hash.Funnel; class OFBsnPortCounterStatsRequestVer15 implements OFBsnPortCounterStatsRequest { private static final Logger logger = LoggerFactory.getLogger(OFBsnPortCounterStatsRequestVer15.class); // version: 1.5 final static byte WIRE_VERSION = 6; final static int LENGTH = 28; private final static long DEFAULT_XID = 0x0L; private final static Set<OFStatsRequestFlags> DEFAULT_FLAGS = ImmutableSet.<OFStatsRequestFlags>of(); private final static OFPort DEFAULT_PORT_NO = OFPort.ANY; // OF message fields private final long xid; private final Set<OFStatsRequestFlags> flags; private final OFPort portNo; // // Immutable default instance final static OFBsnPortCounterStatsRequestVer15 DEFAULT = new OFBsnPortCounterStatsRequestVer15( DEFAULT_XID, DEFAULT_FLAGS, DEFAULT_PORT_NO ); // package private constructor - used by readers, builders, and factory OFBsnPortCounterStatsRequestVer15(long xid, Set<OFStatsRequestFlags> flags, OFPort portNo) { if(flags == null) { throw new NullPointerException("OFBsnPortCounterStatsRequestVer15: property flags cannot be null"); } if(portNo == null) { throw new NullPointerException("OFBsnPortCounterStatsRequestVer15: property portNo cannot be null"); } this.xid = xid; this.flags = flags; this.portNo = portNo; } // Accessors for OF message fields @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public OFPort getPortNo() { return portNo; } public OFBsnPortCounterStatsRequest.Builder createBuilder() { return new BuilderWithParent(this); } static class BuilderWithParent implements OFBsnPortCounterStatsRequest.Builder { final OFBsnPortCounterStatsRequestVer15 parentMessage; // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsRequestFlags> flags; private boolean portNoSet; private OFPort portNo; BuilderWithParent(OFBsnPortCounterStatsRequestVer15 parentMessage) { this.parentMessage = parentMessage; } @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFBsnPortCounterStatsRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public OFBsnPortCounterStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public OFPort getPortNo() { return portNo; } @Override public OFBsnPortCounterStatsRequest.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } @Override public OFBsnPortCounterStatsRequest build() { long xid = this.xidSet ? this.xid : parentMessage.xid; Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : parentMessage.flags; if(flags == null) throw new NullPointerException("Property flags must not be null"); OFPort portNo = this.portNoSet ? this.portNo : parentMessage.portNo; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); // return new OFBsnPortCounterStatsRequestVer15( xid, flags, portNo ); } } static class Builder implements OFBsnPortCounterStatsRequest.Builder { // OF message fields private boolean xidSet; private long xid; private boolean flagsSet; private Set<OFStatsRequestFlags> flags; private boolean portNoSet; private OFPort portNo; @Override public OFVersion getVersion() { return OFVersion.OF_15; } @Override public OFType getType() { return OFType.STATS_REQUEST; } @Override public long getXid() { return xid; } @Override public OFBsnPortCounterStatsRequest.Builder setXid(long xid) { this.xid = xid; this.xidSet = true; return this; } @Override public OFStatsType getStatsType() { return OFStatsType.EXPERIMENTER; } @Override public Set<OFStatsRequestFlags> getFlags() { return flags; } @Override public OFBsnPortCounterStatsRequest.Builder setFlags(Set<OFStatsRequestFlags> flags) { this.flags = flags; this.flagsSet = true; return this; } @Override public long getExperimenter() { return 0x5c16c7L; } @Override public long getSubtype() { return 0x8L; } @Override public OFPort getPortNo() { return portNo; } @Override public OFBsnPortCounterStatsRequest.Builder setPortNo(OFPort portNo) { this.portNo = portNo; this.portNoSet = true; return this; } // @Override public OFBsnPortCounterStatsRequest build() { long xid = this.xidSet ? this.xid : DEFAULT_XID; Set<OFStatsRequestFlags> flags = this.flagsSet ? this.flags : DEFAULT_FLAGS; if(flags == null) throw new NullPointerException("Property flags must not be null"); OFPort portNo = this.portNoSet ? this.portNo : DEFAULT_PORT_NO; if(portNo == null) throw new NullPointerException("Property portNo must not be null"); return new OFBsnPortCounterStatsRequestVer15( xid, flags, portNo ); } } final static Reader READER = new Reader(); static class Reader implements OFMessageReader<OFBsnPortCounterStatsRequest> { @Override public OFBsnPortCounterStatsRequest readFrom(ByteBuf bb) throws OFParseError { int start = bb.readerIndex(); // fixed value property version == 6 byte version = bb.readByte(); if(version != (byte) 0x6) throw new OFParseError("Wrong version: Expected=OFVersion.OF_15(6), got="+version); // fixed value property type == 18 byte type = bb.readByte(); if(type != (byte) 0x12) throw new OFParseError("Wrong type: Expected=OFType.STATS_REQUEST(18), got="+type); int length = U16.f(bb.readShort()); if(length != 28) throw new OFParseError("Wrong length: Expected=28(28), got="+length); if(bb.readableBytes() + (bb.readerIndex() - start) < length) { // Buffer does not have all data yet bb.readerIndex(start); return null; } if(logger.isTraceEnabled()) logger.trace("readFrom - length={}", length); long xid = U32.f(bb.readInt()); // fixed value property statsType == 65535 short statsType = bb.readShort(); if(statsType != (short) 0xffff) throw new OFParseError("Wrong statsType: Expected=OFStatsType.EXPERIMENTER(65535), got="+statsType); Set<OFStatsRequestFlags> flags = OFStatsRequestFlagsSerializerVer15.readFrom(bb); // pad: 4 bytes bb.skipBytes(4); // fixed value property experimenter == 0x5c16c7L int experimenter = bb.readInt(); if(experimenter != 0x5c16c7) throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter); // fixed value property subtype == 0x8L int subtype = bb.readInt(); if(subtype != 0x8) throw new OFParseError("Wrong subtype: Expected=0x8L(0x8L), got="+subtype); OFPort portNo = OFPort.read4Bytes(bb); OFBsnPortCounterStatsRequestVer15 bsnPortCounterStatsRequestVer15 = new OFBsnPortCounterStatsRequestVer15( xid, flags, portNo ); if(logger.isTraceEnabled()) logger.trace("readFrom - read={}", bsnPortCounterStatsRequestVer15); return bsnPortCounterStatsRequestVer15; } } public void putTo(PrimitiveSink sink) { FUNNEL.funnel(this, sink); } final static OFBsnPortCounterStatsRequestVer15Funnel FUNNEL = new OFBsnPortCounterStatsRequestVer15Funnel(); static class OFBsnPortCounterStatsRequestVer15Funnel implements Funnel<OFBsnPortCounterStatsRequestVer15> { private static final long serialVersionUID = 1L; @Override public void funnel(OFBsnPortCounterStatsRequestVer15 message, PrimitiveSink sink) { // fixed value property version = 6 sink.putByte((byte) 0x6); // fixed value property type = 18 sink.putByte((byte) 0x12); // fixed value property length = 28 sink.putShort((short) 0x1c); sink.putLong(message.xid); // fixed value property statsType = 65535 sink.putShort((short) 0xffff); OFStatsRequestFlagsSerializerVer15.putTo(message.flags, sink); // skip pad (4 bytes) // fixed value property experimenter = 0x5c16c7L sink.putInt(0x5c16c7); // fixed value property subtype = 0x8L sink.putInt(0x8); message.portNo.putTo(sink); } } public void writeTo(ByteBuf bb) { WRITER.write(bb, this); } final static Writer WRITER = new Writer(); static class Writer implements OFMessageWriter<OFBsnPortCounterStatsRequestVer15> { @Override public void write(ByteBuf bb, OFBsnPortCounterStatsRequestVer15 message) { // fixed value property version = 6 bb.writeByte((byte) 0x6); // fixed value property type = 18 bb.writeByte((byte) 0x12); // fixed value property length = 28 bb.writeShort((short) 0x1c); bb.writeInt(U32.t(message.xid)); // fixed value property statsType = 65535 bb.writeShort((short) 0xffff); OFStatsRequestFlagsSerializerVer15.writeTo(bb, message.flags); // pad: 4 bytes bb.writeZero(4); // fixed value property experimenter = 0x5c16c7L bb.writeInt(0x5c16c7); // fixed value property subtype = 0x8L bb.writeInt(0x8); message.portNo.write4Bytes(bb); } } @Override public String toString() { StringBuilder b = new StringBuilder("OFBsnPortCounterStatsRequestVer15("); b.append("xid=").append(xid); b.append(", "); b.append("flags=").append(flags); b.append(", "); b.append("portNo=").append(portNo); b.append(")"); return b.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (getClass() != obj.getClass()) return false; OFBsnPortCounterStatsRequestVer15 other = (OFBsnPortCounterStatsRequestVer15) obj; if( xid != other.xid) return false; if (flags == null) { if (other.flags != null) return false; } else if (!flags.equals(other.flags)) return false; if (portNo == null) { if (other.portNo != null) return false; } else if (!portNo.equals(other.portNo)) return false; return true; } @Override public int hashCode() { final int prime = 31; int result = 1; result = prime * (int) (xid ^ (xid >>> 32)); result = prime * result + ((flags == null) ? 0 : flags.hashCode()); result = prime * result + ((portNo == null) ? 0 : portNo.hashCode()); return result; } }
/* * Copyright (C) 2015 Twitter, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.twitter.sdk.android.core; import androidx.annotation.NonNull; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.twitter.sdk.android.core.internal.TwitterApi; import com.twitter.sdk.android.core.internal.network.OkHttpClientHelper; import com.twitter.sdk.android.core.models.BindingValues; import com.twitter.sdk.android.core.models.BindingValuesAdapter; import com.twitter.sdk.android.core.models.SafeListAdapter; import com.twitter.sdk.android.core.models.SafeMapAdapter; import com.twitter.sdk.android.core.services.AccountService; import com.twitter.sdk.android.core.services.CollectionService; import com.twitter.sdk.android.core.services.ConfigurationService; import com.twitter.sdk.android.core.services.FavoriteService; import com.twitter.sdk.android.core.services.ListService; import com.twitter.sdk.android.core.services.MediaService; import com.twitter.sdk.android.core.services.SearchService; import com.twitter.sdk.android.core.services.StatusesService; import java.util.concurrent.ConcurrentHashMap; import okhttp3.OkHttpClient; import retrofit2.Retrofit; import retrofit2.converter.gson.GsonConverterFactory; /** * A class to allow authenticated access to Twitter API endpoints. * Can be extended to provided additional endpoints by extending and providing Retrofit API * interfaces to {@link com.twitter.sdk.android.core.TwitterApiClient#getService(Class)} */ public class TwitterApiClient { @NonNull final ConcurrentHashMap<Class, Object> services; @NonNull final Retrofit retrofit; /** * Constructs Guest Session based TwitterApiClient. */ public TwitterApiClient() { this(OkHttpClientHelper.getOkHttpClient( TwitterCore.getInstance().getGuestSessionProvider()), new TwitterApi()); } /** * Constructs Guest Session based TwitterApiClient, with custom http client. * * The custom http client can be constructed with {@link okhttp3.Interceptor}, and other * optional params provided in {@link okhttp3.OkHttpClient}. */ public TwitterApiClient(@NonNull OkHttpClient client) { this(OkHttpClientHelper.getCustomOkHttpClient( client, TwitterCore.getInstance().getGuestSessionProvider()), new TwitterApi()); } /** * Constructs User Session based TwitterApiClient. */ public TwitterApiClient(@NonNull TwitterSession session) { this(OkHttpClientHelper.getOkHttpClient( session, TwitterCore.getInstance().getAuthConfig()), new TwitterApi()); } /** * Constructs User Session based TwitterApiClient, with custom http client. * * The custom http client can be constructed with {@link okhttp3.Interceptor}, and other * optional params provided in {@link okhttp3.OkHttpClient}. */ public TwitterApiClient(@NonNull TwitterSession session, @NonNull OkHttpClient client) { this(OkHttpClientHelper.getCustomOkHttpClient( client, session, TwitterCore.getInstance().getAuthConfig()), new TwitterApi()); } TwitterApiClient(@NonNull OkHttpClient client, @NonNull TwitterApi twitterApi) { this.services = buildConcurrentMap(); this.retrofit = buildRetrofit(client, twitterApi); } @NonNull private Retrofit buildRetrofit(@NonNull OkHttpClient httpClient, @NonNull TwitterApi twitterApi) { return new Retrofit.Builder() .client(httpClient) .baseUrl(twitterApi.getBaseHostUrl()) .addConverterFactory(GsonConverterFactory.create(buildGson())) .build(); } private Gson buildGson() { return new GsonBuilder() .registerTypeAdapterFactory(new SafeListAdapter()) .registerTypeAdapterFactory(new SafeMapAdapter()) .registerTypeAdapter(BindingValues.class, new BindingValuesAdapter()) .create(); } @NonNull private ConcurrentHashMap<Class, Object> buildConcurrentMap() { return new ConcurrentHashMap<>(); } /** * @return {@link com.twitter.sdk.android.core.services.AccountService} to access TwitterApi */ public AccountService getAccountService() { return getService(AccountService.class); } /** * @return {@link com.twitter.sdk.android.core.services.FavoriteService} to access TwitterApi */ public FavoriteService getFavoriteService() { return getService(FavoriteService.class); } /** * @return {@link com.twitter.sdk.android.core.services.StatusesService} to access TwitterApi */ public StatusesService getStatusesService() { return getService(StatusesService.class); } /** * @return {@link com.twitter.sdk.android.core.services.SearchService} to access TwitterApi */ public SearchService getSearchService() { return getService(SearchService.class); } /** * @return {@link com.twitter.sdk.android.core.services.ListService} to access TwitterApi */ public ListService getListService() { return getService(ListService.class); } /** * Use CollectionTimeline directly, CollectionService is expected to change. * @return {@link CollectionService} to access TwitterApi */ public CollectionService getCollectionService() { return getService(CollectionService.class); } /** * @return {@link com.twitter.sdk.android.core.services.ConfigurationService} to access TwitterApi */ public ConfigurationService getConfigurationService() { return getService(ConfigurationService.class); } /** * @return {@link com.twitter.sdk.android.core.services.MediaService} to access Twitter API * upload endpoints. */ public MediaService getMediaService() { return getService(MediaService.class); } /** * Converts Retrofit style interface into instance for API access * * @param cls Retrofit style interface * @return instance of cls */ @SuppressWarnings("unchecked") protected <T> T getService(Class<T> cls) { if (!services.contains(cls)) { services.putIfAbsent(cls, retrofit.create(cls)); } return (T) services.get(cls); } }
/* * Copyright (c) 2008-2021, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.scheduledexecutor.impl; import com.hazelcast.cluster.Member; import com.hazelcast.config.Config; import com.hazelcast.core.HazelcastInstance; import com.hazelcast.core.HazelcastInstanceAware; import com.hazelcast.map.IMap; import com.hazelcast.partition.PartitionAware; import com.hazelcast.scheduledexecutor.AutoDisposableTask; import com.hazelcast.scheduledexecutor.IScheduledExecutorService; import com.hazelcast.scheduledexecutor.IScheduledFuture; import com.hazelcast.scheduledexecutor.NamedTask; import com.hazelcast.scheduledexecutor.StatefulTask; import com.hazelcast.test.AssertTask; import com.hazelcast.test.HazelcastTestSupport; import com.hazelcast.test.TestHazelcastInstanceFactory; import java.io.Serializable; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; import java.util.concurrent.CountDownLatch; import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; import static java.lang.System.currentTimeMillis; import static java.lang.Thread.sleep; /** * Common methods used in ScheduledExecutorService tests. */ public class ScheduledExecutorServiceTestSupport extends HazelcastTestSupport { public IScheduledExecutorService getScheduledExecutor(HazelcastInstance[] instances, String name) { return instances[0].getScheduledExecutorService(name); } int getPartitionIdFromPartitionAwareTask(HazelcastInstance instance, PartitionAware task) { return instance.getPartitionService().getPartition(task.getPartitionKey()).getPartitionId(); } protected HazelcastInstance[] createClusterWithCount(int count) { return createClusterWithCount(count, new Config()); } protected HazelcastInstance[] createClusterWithCount(int count, Config config) { TestHazelcastInstanceFactory factory = createHazelcastInstanceFactory(); HazelcastInstance[] instances = factory.newInstances(config, count); waitAllForSafeState(instances); return instances; } int countScheduledTasksOn(IScheduledExecutorService scheduledExecutorService) { Map<Member, List<IScheduledFuture<Double>>> allScheduled = scheduledExecutorService.getAllScheduledFutures(); int total = 0; for (Member member : allScheduled.keySet()) { total += allScheduled.get(member).size(); } return total; } static class StatefulRunnableTask implements Runnable, Serializable, HazelcastInstanceAware, StatefulTask<String, Integer> { final String latchName; final String runCounterName; final String loadCounterName; int status = 0; transient HazelcastInstance instance; StatefulRunnableTask(String runsCountLatchName, String runCounterName, String loadCounterName) { this.latchName = runsCountLatchName; this.runCounterName = runCounterName; this.loadCounterName = loadCounterName; } @Override public void run() { status++; instance.getCPSubsystem().getAtomicLong(runCounterName).set(status); instance.getCPSubsystem().getCountDownLatch(latchName).countDown(); } @Override public void load(Map<String, Integer> snapshot) { status = snapshot.get("status"); instance.getCPSubsystem().getAtomicLong(loadCounterName).incrementAndGet(); } @Override public void save(Map<String, Integer> snapshot) { snapshot.put("status", status); } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.instance = hazelcastInstance; } } static class ICountdownLatchCallableTask implements Callable<Double>, Serializable, HazelcastInstanceAware { final String initLatchName; final String waitLatchName; final String doneLatchName; transient HazelcastInstance instance; ICountdownLatchCallableTask(String initLatchName, String waitLatchName, String doneLatchName) { this.initLatchName = initLatchName; this.waitLatchName = waitLatchName; this.doneLatchName = doneLatchName; } @Override public Double call() { instance.getCPSubsystem().getCountDownLatch(initLatchName).countDown(); assertOpenEventually(instance.getCPSubsystem().getCountDownLatch(waitLatchName)); instance.getCPSubsystem().getCountDownLatch(doneLatchName).countDown(); return 77 * 2.2; } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.instance = hazelcastInstance; } } static class ICountdownLatchMapIncrementCallableTask implements Runnable, Serializable, HazelcastInstanceAware { final String startedLatch; final String finishedLatch; final String waitAfterStartLatch; final String runEntryCounterName; final String mapName; transient HazelcastInstance instance; ICountdownLatchMapIncrementCallableTask(String mapName, String runEntryCounterName, String startedLatch, String finishedLatch, String waitAfterStartLatch) { this.mapName = mapName; this.runEntryCounterName = runEntryCounterName; this.startedLatch = startedLatch; this.finishedLatch = finishedLatch; this.waitAfterStartLatch = waitAfterStartLatch; } @Override public void run() { instance.getCPSubsystem().getAtomicLong(runEntryCounterName).incrementAndGet(); instance.getCPSubsystem().getCountDownLatch(startedLatch).countDown(); try { instance.getCPSubsystem().getCountDownLatch(waitAfterStartLatch).await(1, TimeUnit.MINUTES); } catch (InterruptedException e) { e.printStackTrace(); } IMap<String, Integer> map = instance.getMap(mapName); if (map.get("foo") == 1) { map.put("foo", 2); } instance.getCPSubsystem().getCountDownLatch(finishedLatch).countDown(); } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.instance = hazelcastInstance; } } static class ICountdownLatchRunnableTask implements Runnable, Serializable, HazelcastInstanceAware { final String[] runsCountDownLatchNames; transient HazelcastInstance instance; ICountdownLatchRunnableTask(String... runsCountDownLatchNames) { this.runsCountDownLatchNames = runsCountDownLatchNames; } @Override public void run() { for (String runsCounterLatchName : runsCountDownLatchNames) { instance.getCPSubsystem().getCountDownLatch(runsCounterLatchName).countDown(); } } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.instance = hazelcastInstance; } } static class HotLoopBusyTask implements Runnable, HazelcastInstanceAware, Serializable { private final String runFinishedLatchName; private transient HazelcastInstance instance; HotLoopBusyTask(String runFinishedLatchName) { this.runFinishedLatchName = runFinishedLatchName; } @Override public void run() { long start = currentTimeMillis(); while (true) { try { sleep(5000); if (currentTimeMillis() - start >= 30000) { instance.getCPSubsystem().getCountDownLatch(runFinishedLatchName).countDown(); break; } } catch (InterruptedException e) { // ignore } } } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.instance = hazelcastInstance; } } static class PlainCallableTask implements Callable<Double>, Serializable { private int delta = 0; PlainCallableTask() { } PlainCallableTask(int delta) { this.delta = delta; } @Override public Double call() throws Exception { return calculateResult(delta); } public static double calculateResult(int delta) { return 5 * 5.0 + delta; } } static class EchoTask implements Runnable, Serializable { EchoTask() { } @Override public void run() { System.out.println("Echo ...cho ...oo ..o"); } } static class OneSecondSleepingTask implements Runnable, Serializable { OneSecondSleepingTask() { } @Override public void run() { sleepSeconds(1); } } static class CountableRunTask implements Runnable, Serializable { private final CountDownLatch progress; private final Semaphore suspend; CountableRunTask(CountDownLatch progress, Semaphore suspend) { this.progress = progress; this.suspend = suspend; } @Override public void run() { progress.countDown(); if (progress.getCount() == 0) { try { suspend.acquire(); } catch (InterruptedException e) { Thread.interrupted(); } } } } static class ErroneousCallableTask implements Callable<Double>, Serializable, HazelcastInstanceAware { private String completionLatchName; private transient HazelcastInstance instance; ErroneousCallableTask() { } ErroneousCallableTask(String completionLatchName) { this.completionLatchName = completionLatchName; } @Override public Double call() throws Exception { try { throw new IllegalStateException("Erroneous task"); } finally { if (completionLatchName != null) { instance.getCPSubsystem().getCountDownLatch(completionLatchName).countDown(); } } } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.instance = hazelcastInstance; } } static class ErroneousRunnableTask implements Runnable, Serializable { @Override public void run() { throw new IllegalStateException("Erroneous task"); } } static class PlainInstanceAwareRunnableTask implements Runnable, Serializable, HazelcastInstanceAware { private final String latchName; private transient HazelcastInstance instance; PlainInstanceAwareRunnableTask(String latchName) { this.latchName = latchName; } @Override public void run() { this.instance.getCPSubsystem().getCountDownLatch(latchName).countDown(); } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { instance = hazelcastInstance; } } static class PlainPartitionAwareCallableTask implements Callable<Double>, Serializable, PartitionAware<String> { @Override public Double call() throws Exception { return 5 * 5.0; } @Override public String getPartitionKey() { return "TestKey"; } } static class PlainPartitionAwareRunnableTask implements Runnable, Serializable, PartitionAware<String>, HazelcastInstanceAware { private final String latchName; private transient HazelcastInstance instance; PlainPartitionAwareRunnableTask(String latchName) { this.latchName = latchName; } @Override public void run() { this.instance.getCPSubsystem().getCountDownLatch(latchName).countDown(); } @Override public String getPartitionKey() { return "TestKey"; } @Override public void setHazelcastInstance(HazelcastInstance hazelcastInstance) { this.instance = hazelcastInstance; } } public static class HazelcastInstanceAwareRunnable implements Callable<Boolean>, HazelcastInstanceAware, Serializable, NamedTask { private transient volatile HazelcastInstance instance; private final String name; HazelcastInstanceAwareRunnable(String name) { this.name = name; } @Override public void setHazelcastInstance(final HazelcastInstance instance) { this.instance = instance; } @Override public String getName() { return name; } @Override public Boolean call() { return (instance != null); } } public static class AutoDisposableCallable implements Callable<Boolean>, AutoDisposableTask { @Override public Boolean call() { return true; } } public static class NamedCallable implements Callable<Boolean>, NamedTask, Serializable { public static final String NAME = "NAMED-CALLABLE"; @Override public Boolean call() { return true; } @Override public String getName() { return NAME; } } public static class AllTasksRunningWithinNumOfNodes implements AssertTask { private final IScheduledExecutorService scheduler; private final int expectedNodesWithTasks; AllTasksRunningWithinNumOfNodes(IScheduledExecutorService scheduler, int expectedNodesWithTasks) { this.scheduler = scheduler; this.expectedNodesWithTasks = expectedNodesWithTasks; } @Override public void run() throws Exception { int actualNumOfNodesWithTasks = 0; Map<Member, List<IScheduledFuture<Object>>> allScheduledFutures = scheduler.getAllScheduledFutures(); for (Member member : allScheduledFutures.keySet()) { if (!allScheduledFutures.get(member).isEmpty()) { actualNumOfNodesWithTasks++; } } if (actualNumOfNodesWithTasks != expectedNodesWithTasks) { throw new IllegalStateException("Actual nodes with tasks: " + actualNumOfNodesWithTasks + ". " + "Expected: " + expectedNodesWithTasks); } for (List<IScheduledFuture<Object>> futures : allScheduledFutures.values()) { for (IScheduledFuture future : futures) { if (future.isCancelled()) { throw new IllegalStateException("Scheduled task: " + future.getHandler().getTaskName() + " is cancelled."); } else if (future.getStats().getTotalRuns() == 0) { throw new AssertionError(); } } } } } }
package org.fluentd.logger.sender; import org.fluentd.logger.util.MockFluentd; import org.fluentd.logger.util.MockFluentd.MockProcess; import org.junit.Test; import org.msgpack.MessagePack; import org.msgpack.unpacker.Unpacker; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.BufferedInputStream; import java.io.EOFException; import java.io.IOException; import java.net.Socket; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.CountDownLatch; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; public class TestRawSocketSender { @Test public void testNormal01() throws Exception { // start mock fluentd int port = MockFluentd.randomPort(); final List<Event> elist = new ArrayList<Event>(); MockFluentd fluentd = new MockFluentd(port, new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elist.add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentd.start(); fluentd.waitUntilReady(); // start senders Sender sender = new RawSocketSender("localhost", port); Map<String, Object> data = new HashMap<String, Object>(); data.put("t1k1", "t1v1"); data.put("t1k2", "t1v2"); sender.emit("tag.label1", data); Map<String, Object> data2 = new HashMap<String, Object>(); data2.put("t2k1", "t2v1"); data2.put("t2k2", "t2v2"); sender.emit("tag.label2", data2); // close sender sockets sender.close(); // wait for unpacking event data on fluentd Thread.sleep(2000); // close mock server sockets fluentd.close(); // check data assertEquals(2, elist.size()); { Event e = elist.get(0); assertEquals("tag.label1", e.tag); assertEquals("t1v1", e.data.get("t1k1")); assertEquals("t1v2", e.data.get("t1k2")); } { Event e = elist.get(1); assertEquals("tag.label2", e.tag); assertEquals("t2v1", e.data.get("t2k1")); assertEquals("t2v2", e.data.get("t2k2")); } } @Test public void testNormal02() throws Exception { // start mock fluentd int port = MockFluentd.randomPort(); // Use a random port available final List<Event> elist = new ArrayList<Event>(); MockFluentd fluentd = new MockFluentd(port, new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elist.add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentd.start(); fluentd.waitUntilReady(); // start senders Sender sender = new RawSocketSender("localhost", port); int count = 10000; for (int i = 0; i < count; i++) { String tag = "tag:i"; Map<String, Object> record = new HashMap<String, Object>(); record.put("i", i); record.put("n", "name:" + i); sender.emit(tag, record); } // close sender sockets sender.close(); // wait for unpacking event data on fluentd Thread.sleep(2000); // close mock server sockets fluentd.close(); // check data assertEquals(count, elist.size()); } @Test public void testNormal03() throws Exception { // start mock fluentds final MockFluentd[] fluentds = new MockFluentd[2]; final List[] elists = new List[2]; final int[] ports = new int[2]; ports[0] = MockFluentd.randomPort(); RawSocketSender rawSocketSender = new RawSocketSender("localhost", ports[0]); // it should be failed to connect to fluentd elists[0] = new ArrayList<Event>(); fluentds[0] = new MockFluentd(ports[0], new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elists[0].add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentds[0].start(); fluentds[0].waitUntilReady(); ports[1] = MockFluentd.randomPort(); elists[1] = new ArrayList<Event>(); fluentds[1] = new MockFluentd(ports[1], new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elists[1].add(e); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentds[1].start(); fluentds[1].waitUntilReady(); // start senders Sender[] senders = new Sender[2]; int[] counts = new int[2]; senders[0] = rawSocketSender; counts[0] = 10000; for (int i = 0; i < counts[0]; i++) { String tag = "tag:i"; Map<String, Object> record = new HashMap<String, Object>(); record.put("i", i); record.put("n", "name:" + i); senders[0].emit(tag, record); } senders[1] = new RawSocketSender("localhost", ports[1]); counts[1] = 10000; for (int i = 0; i < counts[1]; i++) { String tag = "tag:i"; Map<String, Object> record = new HashMap<String, Object>(); record.put("i", i); record.put("n", "name:" + i); senders[1].emit(tag, record); } // close sender sockets senders[0].close(); senders[1].close(); // wait for unpacking event data on fluentd Thread.sleep(2000); // close mock server sockets fluentds[0].close(); fluentds[1].close(); // check data assertEquals(counts[0], elists[0].size()); assertEquals(counts[1], elists[1].size()); } @Test public void testTimeout() throws InterruptedException { final AtomicBoolean socketFinished = new AtomicBoolean(false); ExecutorService executor = Executors.newSingleThreadExecutor(); executor.execute(new Runnable() { @Override public void run() { RawSocketSender socketSender = null; try { // try to connect to test network socketSender = new RawSocketSender("192.0.2.1", 24224, 200, 8 * 1024); } finally { if (socketSender != null) { socketSender.close(); } socketFinished.set(true); } } }); while(!socketFinished.get()) Thread.yield(); assertTrue(socketFinished.get()); executor.shutdownNow(); } @Test public void testBufferingAndResending() throws InterruptedException, IOException { final ConcurrentLinkedQueue<Event> readEvents = new ConcurrentLinkedQueue<Event>(); final CountDownLatch countDownLatch = new CountDownLatch(4); int port = MockFluentd.randomPort(); MockProcess mockProcess = new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); readEvents.add(e); countDownLatch.countDown(); } } catch (EOFException e) { // e.printStackTrace(); } } }; MockFluentd fluentd = new MockFluentd(port, mockProcess); fluentd.start(); fluentd.waitUntilReady(); Sender sender = new RawSocketSender("localhost", port); assertFalse(sender.isConnected()); Map<String, Object> data = new HashMap<String, Object>(); data.put("key0", "v0"); sender.emit("tag0", data); assertTrue(sender.isConnected()); // close fluentd to make the next sending failed TimeUnit.MILLISECONDS.sleep(500); fluentd.closeClientSockets(); TimeUnit.MILLISECONDS.sleep(500); data = new HashMap<String, Object>(); data.put("key0", "v1"); sender.emit("tag0", data); assertFalse(sender.isConnected()); // wait to avoid the suppression of reconnection TimeUnit.MILLISECONDS.sleep(500); data = new HashMap<String, Object>(); data.put("key0", "v2"); sender.emit("tag0", data); data = new HashMap<String, Object>(); data.put("key0", "v3"); sender.emit("tag0", data); countDownLatch.await(500, TimeUnit.MILLISECONDS); sender.close(); fluentd.close(); assertEquals(4, readEvents.size()); Event event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v0")); event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v1")); event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v2")); event = readEvents.poll(); assertEquals("tag0", event.tag); assertEquals(1, event.data.size()); assertTrue(event.data.keySet().contains("key0")); assertTrue(event.data.values().contains("v3")); } @Test public void testReconnectAfterBufferFull() throws Exception { final CountDownLatch bufferFull = new CountDownLatch(1); // start mock fluentd int port = MockFluentd.randomPort(); // Use a random port available final List<Event> elist = new ArrayList<Event>(); final MockFluentd fluentd = new MockFluentd(port, new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { try { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); Unpacker unpacker = msgpack.createUnpacker(in); while (true) { Event e = unpacker.read(Event.class); elist.add(e); } } catch (EOFException e) { // ignore } finally { socket.close(); } } }); ExecutorService executor = Executors.newSingleThreadExecutor(); executor.execute(new Runnable() { @Override public void run() { try { bufferFull.await(20, TimeUnit.SECONDS); fluentd.start(); } catch (InterruptedException e) { e.printStackTrace(); } } }); // start senders Sender sender = new RawSocketSender("localhost", port); String tag = "tag"; int i; for (i = 0; i < 1000000; i++) { // Enough to fill the sender's buffer Map<String, Object> record = new HashMap<String, Object>(); record.put("num", i); record.put("str", "name" + i); if (bufferFull.getCount() > 0) { // Fill the sender's buffer if (!sender.emit(tag, record)) { // Buffer full. Need to recover the fluentd bufferFull.countDown(); Thread.sleep(2000); } } else { // Flush the sender's buffer after the fluentd starts sender.emit(tag, record); break; } } // close sender sockets sender.close(); // wait for unpacking event data on fluentd Thread.sleep(2000); // close mock server sockets fluentd.close(); // check data assertEquals(0, bufferFull.getCount()); assertEquals(i, elist.size()); } @Test public void testBufferOverflow() throws Exception { // start mock fluentd int port = MockFluentd.randomPort(); MockFluentd fluentd = new MockFluentd(port, new MockFluentd.MockProcess() { public void process(MessagePack msgpack, Socket socket) throws IOException { BufferedInputStream in = new BufferedInputStream(socket.getInputStream()); try { Unpacker unpacker = msgpack.createUnpacker(in); while (true) { unpacker.read(Event.class); } //socket.close(); } catch (EOFException e) { // ignore } } }); fluentd.start(); // start senders Sender sender = new RawSocketSender("localhost", port, 3000, 256); Map<String, Object> data = new HashMap<String, Object>(); data.put("large", randomString(512)); boolean success = sender.emit("tag.label1", data); assertFalse(success); // close sender sockets sender.close(); // close mock server sockets fluentd.close(); } private static final String CHARS = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ "; private String randomString(int len) { StringBuilder sb = new StringBuilder(len); Random rnd = new Random(); for (int i = 0; i < len; i++) { if (i != 0 && i % 128 == 0) { sb.append("\r\n"); } sb.append(CHARS.charAt(rnd.nextInt(CHARS.length()))); } return sb.toString(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.plugins.index.lucene.directory; import static com.google.common.collect.Lists.newArrayList; import static com.google.common.collect.Sets.newHashSet; import static org.apache.commons.io.FileUtils.ONE_GB; import static org.apache.commons.io.FileUtils.ONE_MB; import static org.apache.jackrabbit.JcrConstants.JCR_DATA; import static org.apache.jackrabbit.oak.InitialContent.INITIAL_CONTENT; import static org.apache.jackrabbit.oak.api.Type.BINARIES; import static org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants.INDEX_DATA_CHILD_NAME; import static org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory.PROP_BLOB_SIZE; import static org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory.PROP_UNIQUE_KEY; import static org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory.UNIQUE_KEY_SIZE; import static org.hamcrest.CoreMatchers.containsString; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import org.apache.commons.io.IOUtils; import org.apache.commons.io.input.NullInputStream; import org.apache.jackrabbit.oak.api.Blob; import org.apache.jackrabbit.oak.api.Type; import org.apache.jackrabbit.oak.plugins.index.lucene.IndexDefinition; import org.apache.jackrabbit.oak.plugins.index.lucene.LuceneIndexConstants; import org.apache.jackrabbit.oak.plugins.index.lucene.directory.ActiveDeletedBlobCollectorFactory; import org.apache.jackrabbit.oak.plugins.index.lucene.directory.BlobFactory; import org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakDirectory; import org.apache.jackrabbit.oak.plugins.index.lucene.directory.OakIndexFile; import org.apache.jackrabbit.oak.plugins.memory.ArrayBasedBlob; import org.apache.jackrabbit.oak.plugins.memory.EmptyNodeState; import org.apache.jackrabbit.oak.plugins.memory.PropertyStates; import org.apache.jackrabbit.oak.segment.SegmentNodeStore; import org.apache.jackrabbit.oak.segment.SegmentNodeStoreBuilders; import org.apache.jackrabbit.oak.segment.SegmentTestConstants; import org.apache.jackrabbit.oak.segment.file.FileStore; import org.apache.jackrabbit.oak.segment.file.FileStoreBuilder; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.spi.state.NodeState; import org.apache.jackrabbit.oak.spi.state.ReadOnlyBuilder; import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; import org.apache.lucene.store.IOContext; import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.InputStreamDataInput; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; public class OakDirectoryTest { @Rule public TemporaryFolder tempFolder = new TemporaryFolder(new File("target")); private Random rnd = new Random(); private NodeState root = INITIAL_CONTENT; private NodeBuilder builder = root.builder(); int fileSize = IndexDefinition.DEFAULT_BLOB_SIZE * 2 + rnd.nextInt(1000); @Test public void writes_DefaultSetup() throws Exception{ Directory dir = createDir(builder, false, "/foo"); assertWrites(dir, IndexDefinition.DEFAULT_BLOB_SIZE); } @Test public void writes_CustomBlobSize() throws Exception{ builder.setProperty(LuceneIndexConstants.BLOB_SIZE, 300); Directory dir = createDir(builder, false, "/foo"); assertWrites(dir, 300); } @Test public void testCompatibility() throws Exception{ builder.setProperty(LuceneIndexConstants.BLOB_SIZE, OakIndexFile.DEFAULT_BLOB_SIZE); Directory dir = createDir(builder, false, "/foo"); byte[] data = assertWrites(dir, OakIndexFile.DEFAULT_BLOB_SIZE); NodeBuilder testNode = builder.child(INDEX_DATA_CHILD_NAME).child("test"); //Remove the size property to simulate old behaviour testNode.removeProperty(PROP_BLOB_SIZE); //Read should still work even if the size property is removed IndexInput i = dir.openInput("test", IOContext.DEFAULT); assertEquals(fileSize, i.length()); byte[] result = new byte[fileSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); } @Test //OAK-2388 public void testOverflow() throws Exception{ Directory dir = createDir(builder, false, "/foo"); NodeBuilder file = builder.child(INDEX_DATA_CHILD_NAME).child("test.txt"); int blobSize = 32768; int dataSize = 90844; file.setProperty(OakDirectory.PROP_BLOB_SIZE, blobSize); List<? super Blob> blobs = new ArrayList<Blob>(dataSize); for (int i = 0; i < dataSize; i++) { blobs.add(new ArrayBasedBlob(new byte[0])); } file.setProperty(PropertyStates.createProperty("jcr:data", blobs, Type.BINARIES)); IndexInput input = dir.openInput("test.txt", IOContext.DEFAULT); assertEquals((long) blobSize * (dataSize - 1), input.length()); } @Test public void saveListing() throws Exception{ builder.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true); Directory dir = createDir(builder, false, "/foo"); Set<String> fileNames = newHashSet(); for (int i = 0; i < 10; i++) { String fileName = "foo" + i; createFile(dir, fileName); fileNames.add(fileName); } dir.close(); dir = createDir(builder, true, "/foo"); assertEquals(fileNames, newHashSet(dir.listAll())); } @Test public void skipSaveListingIfUnchanged() throws Exception{ builder.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true); Directory dir = createDir(builder, false, "/foo"); Set<String> fileNames = newHashSet(); for (int i = 0; i < 10; i++) { String fileName = "foo" + i; createFile(dir, fileName); fileNames.add(fileName); } dir.close(); dir = createDir(new ReadOnlyBuilder(builder.getNodeState()), false, "/foo"); Set<String> files = newHashSet(dir.listAll()); dir.close(); assertEquals(fileNames, files); } // OAK-6562 @Test public void createOutputReInitsFile() throws Exception { builder.setProperty(LuceneIndexConstants.SAVE_DIR_LISTING, true); Directory dir = createDir(builder, false, "/foo"); final String fileName = "foo"; dir.createOutput(fileName, IOContext.DEFAULT); String firstUniqueKey = builder.getChildNode(INDEX_DATA_CHILD_NAME) .getChildNode(fileName).getString(PROP_UNIQUE_KEY); dir.createOutput(fileName, IOContext.DEFAULT); String secondUniqueKey = builder.getChildNode(INDEX_DATA_CHILD_NAME) .getChildNode(fileName).getString(PROP_UNIQUE_KEY); assertFalse("Unique key must change on re-incarnating output with same name", firstUniqueKey.equals(secondUniqueKey)); } byte[] assertWrites(Directory dir, int blobSize) throws IOException { byte[] data = randomBytes(fileSize); IndexOutput o = dir.createOutput("test", IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); assertTrue(dir.fileExists("test")); assertEquals(fileSize, dir.fileLength("test")); IndexInput i = dir.openInput("test", IOContext.DEFAULT); assertEquals(fileSize, i.length()); byte[] result = new byte[fileSize]; i.readBytes(result, 0, result.length); assertTrue(Arrays.equals(data, result)); NodeBuilder testNode = builder.child(INDEX_DATA_CHILD_NAME).child("test"); assertEquals(blobSize, testNode.getProperty(PROP_BLOB_SIZE).getValue(Type.LONG).longValue()); List<Blob> blobs = newArrayList(testNode.getProperty(JCR_DATA).getValue(BINARIES)); assertEquals(blobSize + UNIQUE_KEY_SIZE, blobs.get(0).length()); return data; } private int createFile(Directory dir, String fileName) throws IOException { int size = rnd.nextInt(1000) + 1; byte[] data = randomBytes(size); IndexOutput o = dir.createOutput(fileName, IOContext.DEFAULT); o.writeBytes(data, data.length); o.close(); return size; } private OakDirectory createDir(NodeBuilder builder, boolean readOnly, String indexPath){ return new OakDirectory(builder, new IndexDefinition(root, builder.getNodeState(), indexPath), readOnly); } byte[] randomBytes(int size) { byte[] data = new byte[size]; rnd.nextBytes(data); return data; } @Test public void testCloseOnOriginalIndexInput() throws Exception { Directory dir = createDir(builder, false, "/foo"); NodeBuilder file = builder.child(INDEX_DATA_CHILD_NAME).child("test.txt"); int dataSize = 1024; List<? super Blob> blobs = new ArrayList<Blob>(dataSize); for (int i = 0; i < dataSize; i++) { blobs.add(new ArrayBasedBlob(new byte[0])); } file.setProperty(PropertyStates.createProperty("jcr:data", blobs, Type.BINARIES)); IndexInput input = dir.openInput("test.txt", IOContext.DEFAULT); input.close(); assertClosed(input); } @Test public void testCloseOnClonedIndexInputs() throws Exception { Directory dir = createDir(builder, false, "/foo"); NodeBuilder file = builder.child(INDEX_DATA_CHILD_NAME).child("test.txt"); int dataSize = 1024; List<? super Blob> blobs = new ArrayList<Blob>(dataSize); for (int i = 0; i < dataSize; i++) { blobs.add(new ArrayBasedBlob(new byte[0])); } file.setProperty(PropertyStates.createProperty("jcr:data", blobs, Type.BINARIES)); IndexInput input = dir.openInput("test.txt", IOContext.DEFAULT); IndexInput clone1 = input.clone(); IndexInput clone2 = input.clone(); input.close(); assertClosed(input); assertClosed(clone1); assertClosed(clone2); } private void assertClosed(IndexInput input) throws IOException { try { input.length(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.seek(0); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.getFilePointer(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readInt(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readShort(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readLong(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readByte(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readString(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readStringSet(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readStringStringMap(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readVInt(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readVLong(); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readBytes(null, 0, 0); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } try { input.readBytes(null, 0, 0, false); fail("cannot use IndexInput once closed"); } catch (AlreadyClosedException e) { // expected exception } } @Test public void largeFile() throws Exception{ FileStore store = FileStoreBuilder.fileStoreBuilder(tempFolder.getRoot()) .withMemoryMapping(false) .withBlobStore(new BlackHoleBlobStore()) .build(); SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(store).build(); IndexDefinition defn = new IndexDefinition(INITIAL_CONTENT, EmptyNodeState.EMPTY_NODE, "/foo"); Directory directory = new OakDirectory(nodeStore.getRoot().builder(), defn, false); long expectedSize = ONE_GB * 2 + ONE_MB; String fileName = "test"; writeFile(directory, fileName, expectedSize); assertEquals(expectedSize, directory.fileLength(fileName)); IndexInput input = directory.openInput(fileName, IOContext.DEFAULT); readInputToEnd(expectedSize, input); store.close(); } @Test public void dirNameInExceptionMessage() throws Exception{ String indexPath = "/foo/bar"; Directory dir = createDir(builder, false, indexPath); try { dir.openInput("foo.txt", IOContext.DEFAULT); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); } int fileSize = createFile(dir, "test.txt"); IndexInput in = dir.openInput("test.txt", IOContext.DEFAULT); try { in.seek(fileSize + 1); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); } IndexInput in2 = dir.openInput("test.txt", IOContext.DEFAULT); try { byte[] data = new byte[fileSize + 1]; in2.readBytes(data, 0, fileSize + 1); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); } } @Test public void dirNameInException_Writes() throws Exception{ FailOnDemandBlobStore blobStore = new FailOnDemandBlobStore(); FileStore store = FileStoreBuilder.fileStoreBuilder(tempFolder.getRoot()) .withMemoryMapping(false) .withBlobStore(blobStore) .build(); SegmentNodeStore nodeStore = SegmentNodeStoreBuilders.builder(store).build(); String indexPath = "/foo/bar"; int minFileSize = SegmentTestConstants.MEDIUM_LIMIT; int blobSize = minFileSize + 1000; builder = nodeStore.getRoot().builder(); builder.setProperty(LuceneIndexConstants.BLOB_SIZE, blobSize); Directory dir = createDir(builder, false, indexPath); blobStore.startFailing(); IndexOutput o = dir.createOutput("test1.txt", IOContext.DEFAULT); try{ o.writeBytes(randomBytes(blobSize + 10), blobSize + 10); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); assertThat(e.getMessage(), containsString("test1.txt")); } blobStore.reset(); IndexOutput o3 = dir.createOutput("test3.txt", IOContext.DEFAULT); o3.writeBytes(randomBytes(minFileSize), minFileSize); blobStore.startFailing(); try{ o3.flush(); fail(); } catch (IOException e){ assertThat(e.getMessage(), containsString(indexPath)); assertThat(e.getMessage(), containsString("test3.txt")); } store.close(); } @Test public void readOnlyDirectory() throws Exception{ Directory dir = new OakDirectory(new ReadOnlyBuilder(builder.getNodeState()), new IndexDefinition(root, builder.getNodeState(), "/foo"), true); assertEquals(0, dir.listAll().length); } @Test public void testDirty() throws Exception{ OakDirectory dir = createDir(builder, false, "/foo"); assertFalse(dir.isDirty()); createFile(dir, "a"); assertTrue(dir.isDirty()); dir.close(); dir = createDir(builder, false, "/foo"); assertFalse(dir.isDirty()); dir.openInput("a", IOContext.DEFAULT); assertFalse(dir.isDirty()); dir.deleteFile("a"); assertTrue(dir.isDirty()); dir.close(); } // OAK-6503 @Test public void dontMarkNonBlobStoreBlobsAsDeleted() throws Exception{ final String deletedBlobId = "blobIdentifier"; final String blobIdToString = "NeverEver-Ever-Ever-ShouldThisBeMarkedAsDeleted"; final int fileSize = 1; final AtomicBoolean identifiableBlob = new AtomicBoolean(false); IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); BlobFactory factory = new BlobFactory() { @Override public Blob createBlob(InputStream in) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(in, out); byte[] data = out.toByteArray(); return new ArrayBasedBlob(data) { @Override public String getContentIdentity() { return identifiableBlob.get()?deletedBlobId:null; } @Override public String toString() { return blobIdToString; } }; } }; OakDirectory dir = new OakDirectory(builder, INDEX_DATA_CHILD_NAME, def, false, factory, new ActiveDeletedBlobCollectorFactory.BlobDeletionCallback() { @Override public void deleted(String blobId, Iterable<String> ids) { assertEquals("Only blobs with content identity must be reported as deleted", deletedBlobId, blobId); } @Override public void commitProgress(IndexProgress indexProgress) { } }); writeFile(dir, "file1", fileSize); writeFile(dir, "file2", fileSize); dir.deleteFile("file1"); identifiableBlob.set(true); dir.deleteFile("file2"); dir.close(); } @Test public void blobFactory() throws Exception { final AtomicInteger numBlobs = new AtomicInteger(); final int fileSize = 1024; IndexDefinition def = new IndexDefinition(root, builder.getNodeState(), "/foo"); BlobFactory factory = new BlobFactory() { @Override public Blob createBlob(InputStream in) throws IOException { ByteArrayOutputStream out = new ByteArrayOutputStream(); IOUtils.copy(in, out); byte[] data = out.toByteArray(); assertEquals(fileSize + UNIQUE_KEY_SIZE, data.length); numBlobs.incrementAndGet(); return new ArrayBasedBlob(data); } }; OakDirectory dir = new OakDirectory(builder, INDEX_DATA_CHILD_NAME, def, false, factory); numBlobs.set(0); writeFile(dir, "file", fileSize); assertEquals(1, numBlobs.get()); dir.close(); } @Test public void fileLength() throws Exception { final int fileSize = 1024; final String fileName = "file"; OakDirectory dir = createDir(builder, false, "/foo"); writeFile(dir, fileName, fileSize); assertEquals(fileSize, dir.fileLength(fileName)); try { dir.fileLength("unknown"); fail("must throw FileNotFoundException"); } catch (FileNotFoundException expected) { // expected } dir.close(); } private static void readInputToEnd(long expectedSize, IndexInput input) throws IOException { int COPY_BUFFER_SIZE = 16384; byte[] copyBuffer = new byte[(int) ONE_MB]; long left = expectedSize; while (left > 0) { final int toCopy; if (left > COPY_BUFFER_SIZE) { toCopy = COPY_BUFFER_SIZE; } else { toCopy = (int) left; } input.readBytes(copyBuffer, 0, toCopy); left -= toCopy; } } private static void writeFile(Directory directory, String fileName, long size) throws Exception{ IndexOutput o = directory.createOutput(fileName, IOContext.DEFAULT); o.copyBytes(new InputStreamDataInput(new NullInputStream(size)), size); o.close(); } private static class BlackHoleBlobStore extends MemoryBlobStore { private String blobId; private byte[] data; @Override protected synchronized void storeBlock(byte[] digest, int level, byte[] data) { //Eat up all the writes } @Override public String writeBlob(InputStream in) throws IOException { //Avoid expensive digest calculation as all content is 0 byte. So memorize //the id if same content is passed if (blobId == null) { data = IOUtils.toByteArray(in); blobId = super.writeBlob(new ByteArrayInputStream(data)); return blobId; } else { byte[] bytes = IOUtils.toByteArray(in); if (Arrays.equals(data, bytes)) { return blobId; } return super.writeBlob(new ByteArrayInputStream(bytes)); } } @Override protected byte[] readBlockFromBackend(BlockId id) { return data; } } private static class FailOnDemandBlobStore extends MemoryBlobStore { private boolean fail; @Override public String writeBlob(InputStream in) throws IOException { if (fail) { throw new IOException("Failing on demand"); } return super.writeBlob(in); } public void startFailing(){ fail = true; } public void reset(){ fail = false; } } }
/* * The MIT License * * Copyright 2017 gburdell. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package gblib; import static gblib.Util.abnormalExit; import static gblib.Util.invariant; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Timestamp; import java.util.Map; import java.sql.Types; import java.text.DateFormat; import java.text.ParseException; import java.text.SimpleDateFormat; import java.util.Collections; import java.util.Date; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.List; /** * Top-level interface to facilitate (more) convenient manipulation of * databases. * * @author kpfalzer */ public class Model { public static interface IConnection { public Connection getConnection(); } public Model(String tblName, IConnection conn, boolean zeroOnNull) throws SQLException { m_tblName = tblName; m_conn = conn; m_zeroOnNull = zeroOnNull; setup(); } public Model(String tblName, IConnection conn) throws SQLException { this(tblName, conn, true); } private final boolean m_zeroOnNull; private static final String LOCK_TABLE = "LOCK TABLES @TBL@ WRITE"; private static final String UNLOCK_TABLE = "UNLOCK TABLES"; public String subTable(String s) { return s.replace("@TBL@", getTableName()); } public void lockTable(Connection conn) throws SQLException { conn.setAutoCommit(false); PreparedStatement stmt = Model.getPreparedStatement(conn, subTable(LOCK_TABLE)); invariant(!stmt.execute()); } public void unlockTable(Connection conn) throws SQLException { invariant(!createStatement(conn).execute(UNLOCK_TABLE)); conn.setAutoCommit(true); } public long getMaxID(Connection conn) throws SQLException { String stmt = "SELECT MAX(ID) FROM " + getTableName(); ResultSet rs = createStatement(conn).executeQuery(stmt); boolean b = rs.first(); assert b; long id = rs.getLong(1); return id; } public static int getSqlType(String type) { int r = -1; switch (type) { case "LONG VARCHAR": case "VARCHAR": case "TEXT": r = Types.LONGVARCHAR; break; case "TIMESTAMP": case "DATETIME": r = Types.TIMESTAMP; break; case "INTEGER": case "INT": r = Types.INTEGER; break; case "REAL": case "FLOAT": r = Types.REAL; break; case "CHAR": r = Types.CHAR; break; case "BIT": case "TINYINT": r = Types.BIT; break; default: assert false; } return r; } private final IConnection m_conn; private Connection getConnection() { return m_conn.getConnection(); } public String getTableName() { return m_tblName; } /** * Get column names in order (except ID). * * @return ordered column names. */ public List<String> getColumnNames() { return m_colNames; } private static final String TABLE_COLTYPES = "select distinct column_name, data_type from information_schema.columns where table_name = ?"; private static final String GET_TABLE_NAMES = "select distinct table_name from information_schema.tables"; /** * Set the correct case-sensitive table name. In (real) *Nix, the table name * is case-sensitive; but in MacOSX/Windows it is not. */ private void setTableName() throws SQLException { boolean done = false; try (Connection conn = getConnection()) { ResultSet rs = conn.createStatement().executeQuery(GET_TABLE_NAMES); while (!done && rs.next()) { String realTblName = rs.getString(1); if (m_tblName.equalsIgnoreCase(realTblName)) { m_tblName = realTblName; done = true; } } invariant(done); } } private void setup() throws SQLException { if (null == m_colInfo) { setTableName(); int coli = 1; try (Connection conn = getConnection()) { PreparedStatement stmt = getPreparedStatement(conn, TABLE_COLTYPES); stmt.setString(1, m_tblName); ResultSet rs = stmt.executeQuery(); String colNm, typeNm; while (rs.next()) { colNm = rs.getString("COLUMN_NAME").toUpperCase(); typeNm = rs.getString("DATA_TYPE").toUpperCase(); if (null == m_colInfo) { m_colInfo = new LinkedHashMap<>(); //keep insert/key order } assert !m_colInfo.containsKey(colNm); m_colInfo.put(colNm, new PosType(coli, typeNm)); coli++; } m_colNames = new LinkedList<>(m_colInfo.keySet()); invariant(m_colNames.remove("ID")); StringBuilder bld = new StringBuilder("INSERT INTO "); bld .append(getTableName()) .append(" (") .append(Util.toCSV(getColumnNames())) .append(") VALUES (") .append(Util.toCSV(Util.replicate("?", getColumnNames().size()))) .append(")"); m_insertStmt = bld.toString(); } } } /** * Get index of ID column. * * @return column index. */ private int getIdCol() { return m_colInfo.get("ID").v1; } /** * Since we build up insert prepared statement without an ID, we need to * offset for cols occuring after ID. * * @param col column index to offset. * @return position to update in prepared statement. */ private int getInsertIndex(int col) { if (col > getIdCol()) { col--; } return col; } public static java.sql.Date getCurrentTime() { return new java.sql.Date(System.currentTimeMillis()); } public static Timestamp getCurrentTimeStamp() { return asTimestamp(getCurrentTime()); } public static Timestamp asTimestamp(Date date) { return new Timestamp(date.getTime()); } /** * Create PreparedStatement to insert values into table. * * @param conn database connection. * @param items Map of Object values by key aligned with column names. * @return completed PreparedStatement to insert. * @throws SQLException */ public PreparedStatement insert(Connection conn, Map<String, Object> items) throws SQLException { PreparedStatement stmt = getPreparedStatement(conn, m_insertStmt); invariant(getColumnNames().size() == items.size()); //dont count ID for (String colNm : items.keySet()) { PosType pt = m_colInfo.get(colNm.toUpperCase()); invariant(null != pt); Object val = items.get(colNm); int stmtPos = getInsertIndex(pt.v1); switch (pt.v2) { case Types.INTEGER: Long lng; if ((null == val) && m_zeroOnNull) { lng = 0L; } else if (val instanceof Number) { Number n = Util.downCast(val); lng = n.longValue(); } else { //NOTE: m_zeroOnNull mitigates null here... lng = Long.parseLong(val.toString()); } stmt.setLong(stmtPos, lng); break; case Types.LONGVARCHAR: case Types.CHAR: stmt.setString(stmtPos, val.toString()); break; case Types.TIMESTAMP: try { Date date = (null != val) ? DATE_FMT.parse(val.toString()) : getCurrentTime(); stmt.setTimestamp(stmtPos, asTimestamp(date)); } catch (ParseException ex) { abnormalExit(ex); } break; case Types.BIT: Boolean bitv = (null != val) ? Boolean.parseBoolean(val.toString()) : false; stmt.setBoolean(stmtPos, bitv); break; case Types.REAL: Double dbl; if ((null == val) && m_zeroOnNull) { dbl = 0.0; } else if (val instanceof Number) { Number n = Util.downCast(val); dbl = n.doubleValue(); } else { //NOTE: m_zeroOnNull mitigates null here... dbl = Double.parseDouble(val.toString()); } stmt.setDouble(stmtPos, dbl); break; default: invariant(false); } } return stmt; } //2017-02-26 15:03:52 -0800 private static final DateFormat DATE_FMT = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss Z"); public static Statement createStatement(Connection conn) throws SQLException { return createStatement(conn, true); } public static Statement createStatement(Connection conn, boolean updatable) throws SQLException { return conn.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, (updatable) ? ResultSet.CONCUR_UPDATABLE : ResultSet.CONCUR_READ_ONLY); } public static PreparedStatement getPreparedStatement(Connection conn, String stmt) throws SQLException { return getPreparedStatement(conn, stmt, true); } public static PreparedStatement getPreparedStatement(Connection conn, String stmt, boolean updatable) throws SQLException { return conn.prepareStatement( stmt, ResultSet.TYPE_SCROLL_INSENSITIVE, (updatable) ? ResultSet.CONCUR_UPDATABLE : ResultSet.CONCUR_READ_ONLY); } /** * Get column type. * * @param colnm column name. * @return sql.Types value for valid colnm, else NULL (type). */ public int getColType(String colnm) { colnm = colnm.toUpperCase(); return (m_colInfo.containsKey(colnm)) ? m_colInfo.get(colnm).v2 : Types.NULL; } /** * Wrap value as Java representation of SqlType. */ public static class SqlVal { public SqlVal(char s[]) { m_sqlType = Types.CHAR; m_val = s; } public SqlVal(String s) { m_sqlType = Types.LONGVARCHAR; m_val = s; } public void set(String s) { assert (m_sqlType == Types.LONGVARCHAR || m_sqlType == Types.CHAR); m_val = s; } public SqlVal(float s) { m_sqlType = Types.REAL; m_val = s; } public SqlVal(final ResultSet rs, int col) throws SQLException { m_sqlType = rs.getMetaData().getColumnType(col); switch (m_sqlType) { case Types.INTEGER: m_val = rs.getLong(col); break; case Types.LONGVARCHAR: case Types.CHAR: m_val = rs.getString(col); break; case Types.TIMESTAMP: m_val = rs.getTimestamp(col); break; case Types.REAL: m_val = rs.getFloat(col); break; default: assert false; } } public SqlVal(final ResultSet rs, String col, int sqlType) throws SQLException { m_sqlType = sqlType; switch (m_sqlType) { case Types.INTEGER: m_val = rs.getLong(col); break; case Types.LONGVARCHAR: case Types.CHAR: m_val = rs.getString(col); break; case Types.TIMESTAMP: m_val = rs.getTimestamp(col); break; case Types.REAL: m_val = rs.getFloat(col); break; default: assert false; } } public Long asLong() { return (Long) m_val; } public Timestamp asTimeStamp() { return (Timestamp) m_val; } public String asString() { return (String) m_val; } public Float asFloat() { return (Float) m_val; } public final static String EMPTY = ""; @Override public String toString() { return (m_val != null) ? m_val.toString() : EMPTY; } public int getType() { return m_sqlType; } private final int m_sqlType; private Object m_val = false; } /** * Column index and type. */ public static class PosType extends Pair<Integer, Integer> { public PosType(int col, String type) { super(col, getSqlType(type)); } } private String m_insertStmt; private Map<String, PosType> m_colInfo = null; private List<String> m_colNames = Util.emptyUnmodifiableList(); private String m_tblName; }
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.codeInsight.folding.impl; import com.intellij.codeInsight.folding.CodeFoldingManager; import com.intellij.codeInsight.hint.EditorFragmentComponent; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.components.ProjectComponent; import com.intellij.openapi.editor.Document; import com.intellij.openapi.editor.Editor; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.editor.FoldRegion; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.editor.event.EditorMouseEventArea; import com.intellij.openapi.editor.event.EditorMouseMotionAdapter; import com.intellij.openapi.editor.ex.DocumentBulkUpdateListener; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.openapi.editor.ex.FoldingModelEx; import com.intellij.openapi.fileEditor.impl.text.CodeFoldingState; import com.intellij.openapi.project.DumbAwareRunnable; import com.intellij.openapi.project.Project; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.Key; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.WriteExternalException; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiFile; import com.intellij.ui.LightweightHint; import com.intellij.util.containers.WeakList; import org.jdom.Element; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.awt.*; import java.awt.event.MouseEvent; public class CodeFoldingManagerImpl extends CodeFoldingManager implements ProjectComponent { private final Project myProject; private final WeakList<Document> myDocumentsWithFoldingInfo = new WeakList<Document>(); private final Key<DocumentFoldingInfo> FOLDING_INFO_IN_DOCUMENT_KEY = Key.create("FOLDING_INFO_IN_DOCUMENT_KEY"); private static final Key<Boolean> FOLDING_STATE_INFO_IN_DOCUMENT_KEY = Key.create("FOLDING_STATE_IN_DOCUMENT"); CodeFoldingManagerImpl(Project project) { myProject = project; project.getMessageBus().connect().subscribe(DocumentBulkUpdateListener.TOPIC, new DocumentBulkUpdateListener.Adapter() { public void updateStarted(final Document doc) { resetFoldingInfo(doc); } }); } @NotNull public String getComponentName() { return "CodeFoldingManagerImpl"; } public void initComponent() { } public void disposeComponent() { for (Document document : myDocumentsWithFoldingInfo) { if (document != null) { document.putUserData(FOLDING_INFO_IN_DOCUMENT_KEY, null); } } } public void projectOpened() { final EditorMouseMotionAdapter myMouseMotionListener = new EditorMouseMotionAdapter() { LightweightHint myCurrentHint = null; FoldRegion myCurrentFold = null; public void mouseMoved(EditorMouseEvent e) { if (myProject.isDisposed()) return; if (e.getArea() != EditorMouseEventArea.FOLDING_OUTLINE_AREA) return; LightweightHint hint = null; try { Editor editor = e.getEditor(); if (PsiDocumentManager.getInstance(myProject).isUncommited(editor.getDocument())) return; MouseEvent mouseEvent = e.getMouseEvent(); FoldRegion fold = ((EditorEx)editor).getGutterComponentEx().findFoldingAnchorAt(mouseEvent.getX(), mouseEvent.getY()); if (fold == null) return; if (fold == myCurrentFold && myCurrentHint != null) { hint = myCurrentHint; return; } TextRange psiElementRange = EditorFoldingInfo.get(editor).getPsiElementRange(fold); if (psiElementRange == null) return; int textOffset = psiElementRange.getStartOffset(); Point foldStartXY = editor.visualPositionToXY(editor.offsetToVisualPosition(textOffset)); Rectangle visibleArea = editor.getScrollingModel().getVisibleArea(); if (visibleArea.y > foldStartXY.y) { if (myCurrentHint != null) { myCurrentHint.hide(); myCurrentHint = null; } TextRange textRange = new TextRange(textOffset, fold.getStartOffset()); hint = EditorFragmentComponent.showEditorFragmentHint(editor, textRange, true, true); myCurrentFold = fold; myCurrentHint = hint; } } finally { if (hint == null) { if (myCurrentHint != null) { myCurrentHint.hide(); myCurrentHint = null; } myCurrentFold = null; } } } }; StartupManager.getInstance(myProject).registerPostStartupActivity(new DumbAwareRunnable() { public void run() { EditorFactory.getInstance().getEventMulticaster().addEditorMouseMotionListener(myMouseMotionListener, myProject); } }); } @Override public void releaseFoldings(Editor editor) { final Project project = editor.getProject(); if (project != null && !project.equals(myProject)) return; Document document = editor.getDocument(); PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(document); if (file == null || !file.getViewProvider().isPhysical() || !file.isValid()) return; PsiDocumentManager.getInstance(myProject).commitDocument(document); Editor[] otherEditors = EditorFactory.getInstance().getEditors(document, myProject); if (otherEditors.length == 0) { getDocumentFoldingInfo(document).loadFromEditor(editor); } EditorFoldingInfo.get(editor).dispose(); } @Override public void buildInitialFoldings(final Editor editor) { final Project project = editor.getProject(); if (project == null || !project.equals(myProject)) return; final Document document = editor.getDocument(); //Do not save/restore folding for code fragments final PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(document); if (file == null || !file.getViewProvider().isPhysical() && !ApplicationManager.getApplication().isUnitTestMode()) return; if (!((FoldingModelEx)editor.getFoldingModel()).isFoldingEnabled()) return; if (project.isDisposed() || editor.isDisposed() || !file.isValid()) return; PsiDocumentManager.getInstance(myProject).commitDocument(document); Runnable operation = new Runnable() { public void run() { Runnable runnable = updateFoldRegions(editor, true, true); if (runnable != null) { runnable.run(); } DocumentFoldingInfo documentFoldingInfo = getDocumentFoldingInfo(document); Editor[] editors = EditorFactory.getInstance().getEditors(document, myProject); for (Editor otherEditor : editors) { if (otherEditor == editor) continue; documentFoldingInfo.loadFromEditor(otherEditor); break; } documentFoldingInfo.setToEditor(editor); documentFoldingInfo.clear(); } }; editor.getFoldingModel().runBatchFoldingOperationDoNotCollapseCaret(operation); } public void projectClosed() { } public FoldRegion findFoldRegion(@NotNull Editor editor, int startOffset, int endOffset) { return FoldingUtil.findFoldRegion(editor, startOffset, endOffset); } public FoldRegion[] getFoldRegionsAtOffset(@NotNull Editor editor, int offset) { return FoldingUtil.getFoldRegionsAtOffset(editor, offset); } public void updateFoldRegions(@NotNull Editor editor) { updateFoldRegions(editor, false); } public void updateFoldRegions(Editor editor, boolean quick) { PsiDocumentManager.getInstance(myProject).commitDocument(editor.getDocument()); Runnable runnable = updateFoldRegions(editor, false, quick); if (runnable != null) { runnable.run(); } } @Override public void forceDefaultState(@NotNull final Editor editor) { PsiDocumentManager.getInstance(myProject).commitDocument(editor.getDocument()); Runnable runnable = updateFoldRegions(editor, true, false); if (runnable != null) { runnable.run(); } final FoldRegion[] regions = editor.getFoldingModel().getAllFoldRegions(); editor.getFoldingModel().runBatchFoldingOperation(new Runnable() { public void run() { EditorFoldingInfo foldingInfo = EditorFoldingInfo.get(editor); for (FoldRegion region : regions) { PsiElement element = foldingInfo.getPsiElement(region); if (element != null) { region.setExpanded(!FoldingPolicy.isCollapseByDefault(element)); } } } }); } @Nullable public Runnable updateFoldRegionsAsync(@NotNull Editor editor, boolean firstTime) { return updateFoldRegions(editor, firstTime, false); } @Nullable private Runnable updateFoldRegions(Editor editor, boolean applyDefaultState, boolean quick) { PsiFile file = PsiDocumentManager.getInstance(myProject).getPsiFile(editor.getDocument()); if (file != null) { editor.getDocument().putUserData(FOLDING_STATE_INFO_IN_DOCUMENT_KEY, Boolean.TRUE); return FoldingUpdate.updateFoldRegions(editor, file, applyDefaultState, quick); } else { return null; } } public CodeFoldingState saveFoldingState(@NotNull Editor editor) { DocumentFoldingInfo info = getDocumentFoldingInfo(editor.getDocument()); info.loadFromEditor(editor); return info; } public void restoreFoldingState(@NotNull Editor editor, @NotNull CodeFoldingState state) { ((DocumentFoldingInfo)state).setToEditor(editor); } public void writeFoldingState(@NotNull CodeFoldingState state, @NotNull Element element) throws WriteExternalException { ((DocumentFoldingInfo)state).writeExternal(element); } public CodeFoldingState readFoldingState(@NotNull Element element, @NotNull Document document) { DocumentFoldingInfo info = getDocumentFoldingInfo(document); info.readExternal(element); return info; } private DocumentFoldingInfo getDocumentFoldingInfo(Document document) { DocumentFoldingInfo info = document.getUserData(FOLDING_INFO_IN_DOCUMENT_KEY); if (info == null) { info = new DocumentFoldingInfo(myProject, document); document.putUserData(FOLDING_INFO_IN_DOCUMENT_KEY, info); myDocumentsWithFoldingInfo.add(document); } return info; } private static void resetFoldingInfo(@NotNull final Document document) { final Boolean foldingInfoStatus = document.getUserData(FOLDING_STATE_INFO_IN_DOCUMENT_KEY); if (Boolean.TRUE.equals(foldingInfoStatus)) { final Editor[] editors = EditorFactory.getInstance().getEditors(document); for(Editor editor:editors) { EditorFoldingInfo.resetInfo(editor); } document.putUserData(FOLDING_STATE_INFO_IN_DOCUMENT_KEY, null); } } }
/* * Copyright 2009-2020 Aarhus University * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package dk.brics.tajs.typescript; import dk.au.cs.casa.typescript.types.BooleanLiteral; import dk.au.cs.casa.typescript.types.InterfaceType; import dk.au.cs.casa.typescript.types.IntersectionType; import dk.au.cs.casa.typescript.types.NumberLiteral; import dk.au.cs.casa.typescript.types.Signature; import dk.au.cs.casa.typescript.types.SimpleType; import dk.au.cs.casa.typescript.types.SimpleTypeKind; import dk.au.cs.casa.typescript.types.StringLiteral; import dk.au.cs.casa.typescript.types.Type; import dk.au.cs.casa.typescript.types.UnionType; import dk.brics.tajs.analysis.PropVarOperations; import dk.brics.tajs.analysis.Solver; import dk.brics.tajs.analysis.js.Filtering; import dk.brics.tajs.lattice.FunctionTypeSignatures; import dk.brics.tajs.lattice.ObjectLabel; import dk.brics.tajs.lattice.PKey; import dk.brics.tajs.lattice.Restriction; import dk.brics.tajs.lattice.UnknownValueResolver; import dk.brics.tajs.lattice.Value; import dk.brics.tajs.util.Lists; import dk.brics.tajs.util.Pair; import dk.brics.tajs.util.Triple; import org.apache.log4j.Logger; import java.util.List; import java.util.Map; import java.util.Set; import static dk.brics.tajs.util.Collections.newList; import static dk.brics.tajs.util.Collections.newSet; /** * Type filtering using TypeScript types (assuming that the types are correct). */ public class TypeFiltering { private static Logger log = Logger.getLogger(TypeFiltering.class); // static { // LogManager.getLogger(TypeFiltering.class).setLevel(Level.DEBUG); // } private Filtering filtering; private Solver.SolverInterface c; private PropVarOperations pv; public TypeFiltering(Solver.SolverInterface c) { this.c = c; filtering = c.getAnalysis().getFiltering(); pv = c.getAnalysis().getPropVarOperations(); } /** * Applies type filtering on module.exports according to the type (if non-null). */ public void assumeModuleType(Type t, Value module) { if (t != null) { if (module.isMaybePrimitive()) { log.info("Expected abstract object as module, was " + module); } assumeObjectPropertyHasType(module.getObjectLabels(), PKey.StringPKey.make("exports"), t, newSet()); } } /** * Applies type filtering on a function argument value. */ public Value assumeParameterType(Value v, FunctionTypeSignatures signatures, int i) { // TODO: any other important fields of signatures.getSignatures() or can they safely be ignored? (same for assumeReturnValueType) if (!signatures.isAny() && signatures.getSignatures().size() == 1) { List<Signature.Parameter> ps = signatures.getSignatures().iterator().next().getParameters(); if (i < ps.size()) { Value res = assumeValueHasType(v, ps.get(i).getType(), newSet()); if (log.isDebugEnabled() && c.isScanning() && res.isNone()) // TODO: move to monitoring? log.debug("Function argument filtered to bottom at " + c.getNode().getSourceLocation()); return res; } // TODO: filter away the entire function if i >= ps.size()? } else { log.debug("No type filtering implemented for function signatures " + signatures); // TODO: handle multiple signatures, like union } return v; } /** * Applies type filtering on a function return value. */ public Value assumeReturnValueType(Value v, FunctionTypeSignatures signatures) { if (!signatures.isAny() && signatures.getSignatures().size() == 1) { Value res = assumeValueHasType(v, signatures.getSignatures().iterator().next().getResolvedReturnType(), newSet()); if (log.isDebugEnabled() && c.isScanning() && res.isNone()) // TODO: move to monitoring? (note: this can happen at spurious call edges!) log.debug("Function return value filtered to bottom at " + c.getNode().getSourceLocation()); return res; } else { log.debug("No type filtering implemented for function signatures " + signatures); // TODO: handle multiple signatures, like union } return v; } /** * Converts a simple type to a restriction. * Return null if there is no restriction for the given type. */ private static Restriction simpleTypeToRestriction(SimpleType t) { switch (t.getKind()) { case String: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeAnyStr()); case Number: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeAnyNum()); case Boolean: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeAnyBool()); case Symbol: return new Restriction(Restriction.Kind.TYPEOF_SYMBOL); case Undefined: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeUndef()); case Object: return new Restriction(Restriction.Kind.TYPEOF_OBJECT_OR_FUNCTION); case Null: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeNull()); case Never: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeNone()); case Void: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeUndef().joinNull()); case Enum: return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeAnyNumUInt().joinAnyStr()); case Any: // do nothing default: return null; } } /** * Converts an object type to a restriction. */ private Restriction objectTypeToRestriction(Type t, List<Signature> functionSignatures, List<Signature> constructorSignatures) { ObjectLabel.Kind stdType = TypeScriptDeclLoader.getStdTypes().get(t); if (stdType != null) return new Restriction(Restriction.Kind.OBJKIND).set(stdType); else if (functionSignatures.isEmpty() && constructorSignatures.isEmpty()) // the object property value must be an (non-function) object return new Restriction(Restriction.Kind.TYPEOF_OBJECT); // FIXME: call/constructor signatures may be inherited? else // the object property value must be a function, and when called the parameters and return value must match one of the signatures return new Restriction(Restriction.Kind.TYPED_FUNCTION).set(FunctionTypeSignatures.make(Lists.concat(functionSignatures, constructorSignatures))); // TODO: keep function signatures and constructor signatures separate } /** * Converts a primitive type (including unions) to a restriction. * Return null if there is no restriction for the given type. */ private Restriction primitiveTypeToRestriction(Type t) { return t.accept(new DefaultTypeVisitor<Restriction>() { @Override public Restriction visit(SimpleType t) { return simpleTypeToRestriction(t); } @Override public Restriction visit(StringLiteral t) { return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeStr(t.getText())); } @Override public Restriction visit(BooleanLiteral t) { return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeBool(t.getValue())); } @Override public Restriction visit(NumberLiteral t) { return new Restriction(Restriction.Kind.STRICT_EQUAL).set(Value.makeNum(t.getValue())); } @Override public Restriction visit(UnionType t) { List<Restriction> rs = newList(); for (Type t2 : t.getElements()) { Restriction r = primitiveTypeToRestriction(t2); if (r == null) // if one of the types can't be converted, abort return null; rs.add(r); } return new Restriction(Restriction.Kind.UNION).set(rs); } }); } /** * Replaces interface types for boxed primitives by the corresponding primitive types. */ private Type transformInterfaceTypeToSimpleType(Type t) { ObjectLabel.Kind stdType = TypeScriptDeclLoader.getStdTypes().get(t); if (stdType != null) switch (stdType) { case BOOLEAN: return new SimpleType(SimpleTypeKind.Boolean); case NUMBER: return new SimpleType(SimpleTypeKind.Number); case STRING: return new SimpleType(SimpleTypeKind.String); case OBJECT: return new SimpleType(SimpleTypeKind.Object); } return t; } /** * Assumes that that the given object property has the given type. */ private void assumeObjectPropertyHasType(Set<ObjectLabel> baseobjs, PKey propname, Type t, Set<Triple<Set<ObjectLabel>, PKey, Type>> visited) { Triple<Set<ObjectLabel>, PKey, Type> p = Triple.make(baseobjs, propname, t); if (visited.contains(p)) return; visited.add(p); t = transformInterfaceTypeToSimpleType(t); if (log.isDebugEnabled()) log.debug("Type filtering object property " + baseobjs + "." + propname + " with " + t); Object r = t.accept(new DefaultTypeVisitor<Object>() { @Override public Object visit(InterfaceType t) { filtering.assumeObjectPropertySatisfies(baseobjs, propname, objectTypeToRestriction(t, t.getDeclaredCallSignatures(), t.getDeclaredConstructSignatures())); if (!t.getDeclaredProperties().isEmpty() && ObjectLabel.allowStrongUpdate(baseobjs)) { // the object must have the properties described by t.declaredProperties Value v = pv.readPropertyWithAttributes(baseobjs, propname.toValue()); v = UnknownValueResolver.getRealValue(v, c.getState()); for (Map.Entry<String, Type> propType : t.getDeclaredProperties().entrySet()) assumeObjectPropertyHasType(v.getObjectLabels(), PKey.StringPKey.make(propType.getKey()), propType.getValue(), visited); } for (Type bt : t.getBaseTypes()) // also restrict using the base types assumeObjectPropertyHasType(baseobjs, propname, bt, visited); return true; } @Override public Object visit(IntersectionType t) { for (Type t2 : t.getElements()) assumeObjectPropertyHasType(baseobjs, propname, t2, visited); return true; } @Override public Object visitDefault(Type t) { return filtering.assumeObjectPropertySatisfies(baseobjs, propname, primitiveTypeToRestriction(t)); } }); if (r == null && !isAny(t)) log.debug("No object property type filtering implemented for " + t); // TODO: TupleType, AnonymousType, ClassType, GenericType, ReferenceType, TypeParameterType, ClassInstanceType, ThisType, IndexType, IndexedAccessType? (see also assumeValueHasType) } private boolean isAny(Type t) { return t instanceof SimpleType && ((SimpleType)t).getKind() == SimpleTypeKind.Any; } /** * Assumes that that the given value has the given type. */ private Value assumeValueHasType(Value v, Type t, Set<Pair<Value, Type>> visited) { Pair<Value, Type> p = Pair.make(v, t); if (visited.contains(p)) return v; visited.add(p); t = transformInterfaceTypeToSimpleType(t); if (log.isDebugEnabled()) log.debug("Type filtering value " + v + " with " + t); Value v2 = t.accept(new DefaultTypeVisitor<Value>() { @Override public Value visit(InterfaceType t) { Value res = filtering.assumeValueSatisfies(v, objectTypeToRestriction(t, t.getDeclaredCallSignatures(), t.getDeclaredConstructSignatures())); Set<ObjectLabel> baseobjs = res.getObjectLabels(); if (!t.getDeclaredProperties().isEmpty() && ObjectLabel.allowStrongUpdate(baseobjs)) // the object must have the properties described by t.declaredProperties for (Map.Entry<String, Type> propType : t.getDeclaredProperties().entrySet()) assumeObjectPropertyHasType(baseobjs, PKey.StringPKey.make(propType.getKey()), propType.getValue(), newSet()); for (Type bt : t.getBaseTypes()) // also restrict using the base types res = assumeValueHasType(res, bt, visited); return res; } @Override public Value visit(IntersectionType t) { Value v2 = v; for (Type t2 : t.getElements()) v2 = assumeValueHasType(v2, t2, visited); return v2; } @Override public Value visitDefault(Type t) { return filtering.assumeValueSatisfies(v, primitiveTypeToRestriction(t)); } }); if (v2 == null) { if (!isAny(t)) log.debug("No value type filtering implemented for " + t); // TODO: TupleType, AnonymousType, ClassType, GenericType, ReferenceType, TypeParameterType, ClassInstanceType, ThisType, IndexType, IndexedAccessType? (see also assumeObjectPropertyHasType) return v; } return v2; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable * law or agreed to in writing, software distributed under the License is distributed on an "AS IS" * BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License * for the specific language governing permissions and limitations under the License. */ package org.apache.hadoop.hbase.replication.regionserver; import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InterruptedIOException; import java.math.BigInteger; import java.security.SecureRandom; import java.util.ArrayList; import java.util.Deque; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles.LoadQueueItem; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.token.FsDelegationToken; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Pair; /** * It is used for replicating HFile entries. It will first copy parallely all the hfiles to a local * staging directory and then it will use ({@link LoadIncrementalHFiles} to prepare a collection of * {@link LoadQueueItem} which will finally be loaded(replicated) into the table of this cluster. */ @InterfaceAudience.Private public class HFileReplicator { /** Maximum number of threads to allow in pool to copy hfiles during replication */ public static final String REPLICATION_BULKLOAD_COPY_MAXTHREADS_KEY = "hbase.replication.bulkload.copy.maxthreads"; public static final int REPLICATION_BULKLOAD_COPY_MAXTHREADS_DEFAULT = 10; /** Number of hfiles to copy per thread during replication */ public static final String REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_KEY = "hbase.replication.bulkload.copy.hfiles.perthread"; public static final int REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_DEFAULT = 10; private static final Logger LOG = LoggerFactory.getLogger(HFileReplicator.class); private static final String UNDERSCORE = "_"; private final static FsPermission PERM_ALL_ACCESS = FsPermission.valueOf("-rwxrwxrwx"); private Configuration sourceClusterConf; private String sourceBaseNamespaceDirPath; private String sourceHFileArchiveDirPath; private Map<String, List<Pair<byte[], List<String>>>> bulkLoadHFileMap; private FileSystem sinkFs; private FsDelegationToken fsDelegationToken; private UserProvider userProvider; private Configuration conf; private Connection connection; private Path hbaseStagingDir; private ThreadPoolExecutor exec; private int maxCopyThreads; private int copiesPerThread; public HFileReplicator(Configuration sourceClusterConf, String sourceBaseNamespaceDirPath, String sourceHFileArchiveDirPath, Map<String, List<Pair<byte[], List<String>>>> tableQueueMap, Configuration conf, Connection connection) throws IOException { this.sourceClusterConf = sourceClusterConf; this.sourceBaseNamespaceDirPath = sourceBaseNamespaceDirPath; this.sourceHFileArchiveDirPath = sourceHFileArchiveDirPath; this.bulkLoadHFileMap = tableQueueMap; this.conf = conf; this.connection = connection; userProvider = UserProvider.instantiate(conf); fsDelegationToken = new FsDelegationToken(userProvider, "renewer"); this.hbaseStagingDir = new Path(FSUtils.getRootDir(conf), HConstants.BULKLOAD_STAGING_DIR_NAME); this.maxCopyThreads = this.conf.getInt(REPLICATION_BULKLOAD_COPY_MAXTHREADS_KEY, REPLICATION_BULKLOAD_COPY_MAXTHREADS_DEFAULT); ThreadFactoryBuilder builder = new ThreadFactoryBuilder(); builder.setNameFormat("HFileReplicationCallable-%1$d"); this.exec = new ThreadPoolExecutor(maxCopyThreads, maxCopyThreads, 60, TimeUnit.SECONDS, new LinkedBlockingQueue<>(), builder.build()); this.exec.allowCoreThreadTimeOut(true); this.copiesPerThread = conf.getInt(REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_KEY, REPLICATION_BULKLOAD_COPY_HFILES_PERTHREAD_DEFAULT); sinkFs = FileSystem.get(conf); } public Void replicate() throws IOException { // Copy all the hfiles to the local file system Map<String, Path> tableStagingDirsMap = copyHFilesToStagingDir(); int maxRetries = conf.getInt(HConstants.BULKLOAD_MAX_RETRIES_NUMBER, 10); for (Entry<String, Path> tableStagingDir : tableStagingDirsMap.entrySet()) { String tableNameString = tableStagingDir.getKey(); Path stagingDir = tableStagingDir.getValue(); LoadIncrementalHFiles loadHFiles = null; try { loadHFiles = new LoadIncrementalHFiles(conf); } catch (Exception e) { LOG.error("Failed to initialize LoadIncrementalHFiles for replicating bulk loaded" + " data.", e); throw new IOException(e); } Configuration newConf = HBaseConfiguration.create(conf); newConf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no"); loadHFiles.setConf(newConf); TableName tableName = TableName.valueOf(tableNameString); Table table = this.connection.getTable(tableName); // Prepare collection of queue of hfiles to be loaded(replicated) Deque<LoadQueueItem> queue = new LinkedList<>(); loadHFiles.prepareHFileQueue(stagingDir, table, queue, false); if (queue.isEmpty()) { LOG.warn("Replication process did not find any files to replicate in directory " + stagingDir.toUri()); return null; } try (RegionLocator locator = connection.getRegionLocator(tableName)) { fsDelegationToken.acquireDelegationToken(sinkFs); // Set the staging directory which will be used by LoadIncrementalHFiles for loading the // data loadHFiles.setBulkToken(stagingDir.toString()); doBulkLoad(loadHFiles, table, queue, locator, maxRetries); } finally { cleanup(stagingDir.toString(), table); } } return null; } private void doBulkLoad(LoadIncrementalHFiles loadHFiles, Table table, Deque<LoadQueueItem> queue, RegionLocator locator, int maxRetries) throws IOException { int count = 0; Pair<byte[][], byte[][]> startEndKeys; while (!queue.isEmpty()) { // need to reload split keys each iteration. startEndKeys = locator.getStartEndKeys(); if (count != 0) { LOG.warn("Error occurred while replicating HFiles, retry attempt " + count + " with " + queue.size() + " files still remaining to replicate."); } if (maxRetries != 0 && count >= maxRetries) { throw new IOException("Retry attempted " + count + " times without completing, bailing out."); } count++; // Try bulk load loadHFiles.loadHFileQueue(table, connection, queue, startEndKeys); } } private void cleanup(String stagingDir, Table table) { // Release the file system delegation token fsDelegationToken.releaseDelegationToken(); // Delete the staging directory if (stagingDir != null) { try { sinkFs.delete(new Path(stagingDir), true); } catch (IOException e) { LOG.warn("Failed to delete the staging directory " + stagingDir, e); } } // Do not close the file system /* * if (sinkFs != null) { try { sinkFs.close(); } catch (IOException e) { LOG.warn( * "Failed to close the file system"); } } */ // Close the table if (table != null) { try { table.close(); } catch (IOException e) { LOG.warn("Failed to close the table.", e); } } } private Map<String, Path> copyHFilesToStagingDir() throws IOException { Map<String, Path> mapOfCopiedHFiles = new HashMap<>(); Pair<byte[], List<String>> familyHFilePathsPair; List<String> hfilePaths; byte[] family; Path familyStagingDir; int familyHFilePathsPairsListSize; int totalNoOfHFiles; List<Pair<byte[], List<String>>> familyHFilePathsPairsList; FileSystem sourceFs = null; try { Path sourceClusterPath = new Path(sourceBaseNamespaceDirPath); /* * Path#getFileSystem will by default get the FS from cache. If both source and sink cluster * has same FS name service then it will return peer cluster FS. To avoid this we explicitly * disable the loading of FS from cache, so that a new FS is created with source cluster * configuration. */ String sourceScheme = sourceClusterPath.toUri().getScheme(); String disableCacheName = String.format("fs.%s.impl.disable.cache", new Object[] { sourceScheme }); sourceClusterConf.setBoolean(disableCacheName, true); sourceFs = sourceClusterPath.getFileSystem(sourceClusterConf); User user = userProvider.getCurrent(); // For each table name in the map for (Entry<String, List<Pair<byte[], List<String>>>> tableEntry : bulkLoadHFileMap .entrySet()) { String tableName = tableEntry.getKey(); // Create staging directory for each table Path stagingDir = createStagingDir(hbaseStagingDir, user, TableName.valueOf(tableName)); familyHFilePathsPairsList = tableEntry.getValue(); familyHFilePathsPairsListSize = familyHFilePathsPairsList.size(); // For each list of family hfile paths pair in the table for (int i = 0; i < familyHFilePathsPairsListSize; i++) { familyHFilePathsPair = familyHFilePathsPairsList.get(i); family = familyHFilePathsPair.getFirst(); hfilePaths = familyHFilePathsPair.getSecond(); familyStagingDir = new Path(stagingDir, Bytes.toString(family)); totalNoOfHFiles = hfilePaths.size(); // For each list of hfile paths for the family List<Future<Void>> futures = new ArrayList<>(); Callable<Void> c; Future<Void> future; int currentCopied = 0; // Copy the hfiles parallely while (totalNoOfHFiles > currentCopied + this.copiesPerThread) { c = new Copier(sourceFs, familyStagingDir, hfilePaths.subList(currentCopied, currentCopied + this.copiesPerThread)); future = exec.submit(c); futures.add(future); currentCopied += this.copiesPerThread; } int remaining = totalNoOfHFiles - currentCopied; if (remaining > 0) { c = new Copier(sourceFs, familyStagingDir, hfilePaths.subList(currentCopied, currentCopied + remaining)); future = exec.submit(c); futures.add(future); } for (Future<Void> f : futures) { try { f.get(); } catch (InterruptedException e) { InterruptedIOException iioe = new InterruptedIOException( "Failed to copy HFiles to local file system. This will be retried again " + "by the source cluster."); iioe.initCause(e); throw iioe; } catch (ExecutionException e) { throw new IOException("Failed to copy HFiles to local file system. This will " + "be retried again by the source cluster.", e); } } } // Add the staging directory to this table. Staging directory contains all the hfiles // belonging to this table mapOfCopiedHFiles.put(tableName, stagingDir); } return mapOfCopiedHFiles; } finally { if (sourceFs != null) { sourceFs.close(); } if(exec != null) { exec.shutdown(); } } } private Path createStagingDir(Path baseDir, User user, TableName tableName) throws IOException { String tblName = tableName.getNameAsString().replace(":", UNDERSCORE); int RANDOM_WIDTH = 320; int RANDOM_RADIX = 32; String doubleUnderScore = UNDERSCORE + UNDERSCORE; String randomDir = user.getShortName() + doubleUnderScore + tblName + doubleUnderScore + (new BigInteger(RANDOM_WIDTH, new SecureRandom()).toString(RANDOM_RADIX)); return createStagingDir(baseDir, user, randomDir); } private Path createStagingDir(Path baseDir, User user, String randomDir) throws IOException { Path p = new Path(baseDir, randomDir); sinkFs.mkdirs(p, PERM_ALL_ACCESS); sinkFs.setPermission(p, PERM_ALL_ACCESS); return p; } /** * This class will copy the given hfiles from the given source file system to the given local file * system staging directory. */ private class Copier implements Callable<Void> { private FileSystem sourceFs; private Path stagingDir; private List<String> hfiles; public Copier(FileSystem sourceFs, final Path stagingDir, final List<String> hfiles) throws IOException { this.sourceFs = sourceFs; this.stagingDir = stagingDir; this.hfiles = hfiles; } @Override public Void call() throws IOException { Path sourceHFilePath; Path localHFilePath; int totalHFiles = hfiles.size(); for (int i = 0; i < totalHFiles; i++) { sourceHFilePath = new Path(sourceBaseNamespaceDirPath, hfiles.get(i)); localHFilePath = new Path(stagingDir, sourceHFilePath.getName()); try { FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, false, conf); // If any other exception other than FNFE then we will fail the replication requests and // source will retry to replicate these data. } catch (FileNotFoundException e) { LOG.info("Failed to copy hfile from " + sourceHFilePath + " to " + localHFilePath + ". Trying to copy from hfile archive directory.", e); sourceHFilePath = new Path(sourceHFileArchiveDirPath, hfiles.get(i)); try { FileUtil.copy(sourceFs, sourceHFilePath, sinkFs, localHFilePath, false, conf); } catch (FileNotFoundException e1) { // This will mean that the hfile does not exists any where in source cluster FS. So we // cannot do anything here just log and continue. LOG.debug("Failed to copy hfile from " + sourceHFilePath + " to " + localHFilePath + ". Hence ignoring this hfile from replication..", e1); continue; } } sinkFs.setPermission(localHFilePath, PERM_ALL_ACCESS); } return null; } } }
/* * Copyright 2016 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.schemaorg.core; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Multimap; import com.google.schemaorg.SchemaOrgType; import com.google.schemaorg.SchemaOrgTypeImpl; import com.google.schemaorg.ValueType; import com.google.schemaorg.core.datatype.Number; import com.google.schemaorg.core.datatype.Text; import com.google.schemaorg.core.datatype.URL; import com.google.schemaorg.goog.GoogConstants; import com.google.schemaorg.goog.PopularityScoreSpecification; /** Implementation of {@link PropertyValueSpecification}. */ public class PropertyValueSpecificationImpl extends IntangibleImpl implements PropertyValueSpecification { private static final ImmutableSet<String> PROPERTY_SET = initializePropertySet(); private static ImmutableSet<String> initializePropertySet() { ImmutableSet.Builder<String> builder = ImmutableSet.builder(); builder.add(CoreConstants.PROPERTY_ADDITIONAL_TYPE); builder.add(CoreConstants.PROPERTY_ALTERNATE_NAME); builder.add(CoreConstants.PROPERTY_DEFAULT_VALUE); builder.add(CoreConstants.PROPERTY_DESCRIPTION); builder.add(CoreConstants.PROPERTY_IMAGE); builder.add(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE); builder.add(CoreConstants.PROPERTY_MAX_VALUE); builder.add(CoreConstants.PROPERTY_MIN_VALUE); builder.add(CoreConstants.PROPERTY_MULTIPLE_VALUES); builder.add(CoreConstants.PROPERTY_NAME); builder.add(CoreConstants.PROPERTY_POTENTIAL_ACTION); builder.add(CoreConstants.PROPERTY_READONLY_VALUE); builder.add(CoreConstants.PROPERTY_SAME_AS); builder.add(CoreConstants.PROPERTY_STEP_VALUE); builder.add(CoreConstants.PROPERTY_URL); builder.add(CoreConstants.PROPERTY_VALUE_MAX_LENGTH); builder.add(CoreConstants.PROPERTY_VALUE_MIN_LENGTH); builder.add(CoreConstants.PROPERTY_VALUE_NAME); builder.add(CoreConstants.PROPERTY_VALUE_PATTERN); builder.add(CoreConstants.PROPERTY_VALUE_REQUIRED); builder.add(GoogConstants.PROPERTY_DETAILED_DESCRIPTION); builder.add(GoogConstants.PROPERTY_POPULARITY_SCORE); return builder.build(); } static final class BuilderImpl extends SchemaOrgTypeImpl.BuilderImpl<PropertyValueSpecification.Builder> implements PropertyValueSpecification.Builder { @Override public PropertyValueSpecification.Builder addAdditionalType(URL value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, value); } @Override public PropertyValueSpecification.Builder addAdditionalType(String value) { return addProperty(CoreConstants.PROPERTY_ADDITIONAL_TYPE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addAlternateName(Text value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, value); } @Override public PropertyValueSpecification.Builder addAlternateName(String value) { return addProperty(CoreConstants.PROPERTY_ALTERNATE_NAME, Text.of(value)); } @Override public PropertyValueSpecification.Builder addDefaultValue(Text value) { return addProperty(CoreConstants.PROPERTY_DEFAULT_VALUE, value); } @Override public PropertyValueSpecification.Builder addDefaultValue(Thing value) { return addProperty(CoreConstants.PROPERTY_DEFAULT_VALUE, value); } @Override public PropertyValueSpecification.Builder addDefaultValue(Thing.Builder value) { return addProperty(CoreConstants.PROPERTY_DEFAULT_VALUE, value.build()); } @Override public PropertyValueSpecification.Builder addDefaultValue(String value) { return addProperty(CoreConstants.PROPERTY_DEFAULT_VALUE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addDescription(Text value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, value); } @Override public PropertyValueSpecification.Builder addDescription(String value) { return addProperty(CoreConstants.PROPERTY_DESCRIPTION, Text.of(value)); } @Override public PropertyValueSpecification.Builder addImage(ImageObject value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public PropertyValueSpecification.Builder addImage(ImageObject.Builder value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value.build()); } @Override public PropertyValueSpecification.Builder addImage(URL value) { return addProperty(CoreConstants.PROPERTY_IMAGE, value); } @Override public PropertyValueSpecification.Builder addImage(String value) { return addProperty(CoreConstants.PROPERTY_IMAGE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addMainEntityOfPage(CreativeWork value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public PropertyValueSpecification.Builder addMainEntityOfPage(CreativeWork.Builder value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value.build()); } @Override public PropertyValueSpecification.Builder addMainEntityOfPage(URL value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, value); } @Override public PropertyValueSpecification.Builder addMainEntityOfPage(String value) { return addProperty(CoreConstants.PROPERTY_MAIN_ENTITY_OF_PAGE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addMaxValue(Number value) { return addProperty(CoreConstants.PROPERTY_MAX_VALUE, value); } @Override public PropertyValueSpecification.Builder addMaxValue(String value) { return addProperty(CoreConstants.PROPERTY_MAX_VALUE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addMinValue(Number value) { return addProperty(CoreConstants.PROPERTY_MIN_VALUE, value); } @Override public PropertyValueSpecification.Builder addMinValue(String value) { return addProperty(CoreConstants.PROPERTY_MIN_VALUE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addMultipleValues(Boolean value) { return addProperty(CoreConstants.PROPERTY_MULTIPLE_VALUES, value); } @Override public PropertyValueSpecification.Builder addMultipleValues(String value) { return addProperty(CoreConstants.PROPERTY_MULTIPLE_VALUES, Text.of(value)); } @Override public PropertyValueSpecification.Builder addName(Text value) { return addProperty(CoreConstants.PROPERTY_NAME, value); } @Override public PropertyValueSpecification.Builder addName(String value) { return addProperty(CoreConstants.PROPERTY_NAME, Text.of(value)); } @Override public PropertyValueSpecification.Builder addPotentialAction(Action value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value); } @Override public PropertyValueSpecification.Builder addPotentialAction(Action.Builder value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, value.build()); } @Override public PropertyValueSpecification.Builder addPotentialAction(String value) { return addProperty(CoreConstants.PROPERTY_POTENTIAL_ACTION, Text.of(value)); } @Override public PropertyValueSpecification.Builder addReadonlyValue(Boolean value) { return addProperty(CoreConstants.PROPERTY_READONLY_VALUE, value); } @Override public PropertyValueSpecification.Builder addReadonlyValue(String value) { return addProperty(CoreConstants.PROPERTY_READONLY_VALUE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addSameAs(URL value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, value); } @Override public PropertyValueSpecification.Builder addSameAs(String value) { return addProperty(CoreConstants.PROPERTY_SAME_AS, Text.of(value)); } @Override public PropertyValueSpecification.Builder addStepValue(Number value) { return addProperty(CoreConstants.PROPERTY_STEP_VALUE, value); } @Override public PropertyValueSpecification.Builder addStepValue(String value) { return addProperty(CoreConstants.PROPERTY_STEP_VALUE, Text.of(value)); } @Override public PropertyValueSpecification.Builder addUrl(URL value) { return addProperty(CoreConstants.PROPERTY_URL, value); } @Override public PropertyValueSpecification.Builder addUrl(String value) { return addProperty(CoreConstants.PROPERTY_URL, Text.of(value)); } @Override public PropertyValueSpecification.Builder addValueMaxLength(Number value) { return addProperty(CoreConstants.PROPERTY_VALUE_MAX_LENGTH, value); } @Override public PropertyValueSpecification.Builder addValueMaxLength(String value) { return addProperty(CoreConstants.PROPERTY_VALUE_MAX_LENGTH, Text.of(value)); } @Override public PropertyValueSpecification.Builder addValueMinLength(Number value) { return addProperty(CoreConstants.PROPERTY_VALUE_MIN_LENGTH, value); } @Override public PropertyValueSpecification.Builder addValueMinLength(String value) { return addProperty(CoreConstants.PROPERTY_VALUE_MIN_LENGTH, Text.of(value)); } @Override public PropertyValueSpecification.Builder addValueName(Text value) { return addProperty(CoreConstants.PROPERTY_VALUE_NAME, value); } @Override public PropertyValueSpecification.Builder addValueName(String value) { return addProperty(CoreConstants.PROPERTY_VALUE_NAME, Text.of(value)); } @Override public PropertyValueSpecification.Builder addValuePattern(Text value) { return addProperty(CoreConstants.PROPERTY_VALUE_PATTERN, value); } @Override public PropertyValueSpecification.Builder addValuePattern(String value) { return addProperty(CoreConstants.PROPERTY_VALUE_PATTERN, Text.of(value)); } @Override public PropertyValueSpecification.Builder addValueRequired(Boolean value) { return addProperty(CoreConstants.PROPERTY_VALUE_REQUIRED, value); } @Override public PropertyValueSpecification.Builder addValueRequired(String value) { return addProperty(CoreConstants.PROPERTY_VALUE_REQUIRED, Text.of(value)); } @Override public PropertyValueSpecification.Builder addDetailedDescription(Article value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value); } @Override public PropertyValueSpecification.Builder addDetailedDescription(Article.Builder value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, value.build()); } @Override public PropertyValueSpecification.Builder addDetailedDescription(String value) { return addProperty(GoogConstants.PROPERTY_DETAILED_DESCRIPTION, Text.of(value)); } @Override public PropertyValueSpecification.Builder addPopularityScore( PopularityScoreSpecification value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value); } @Override public PropertyValueSpecification.Builder addPopularityScore( PopularityScoreSpecification.Builder value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, value.build()); } @Override public PropertyValueSpecification.Builder addPopularityScore(String value) { return addProperty(GoogConstants.PROPERTY_POPULARITY_SCORE, Text.of(value)); } @Override public PropertyValueSpecification build() { return new PropertyValueSpecificationImpl(properties, reverseMap); } } public PropertyValueSpecificationImpl( Multimap<String, ValueType> properties, Multimap<String, Thing> reverseMap) { super(properties, reverseMap); } @Override public String getFullTypeName() { return CoreConstants.TYPE_PROPERTY_VALUE_SPECIFICATION; } @Override public boolean includesProperty(String property) { return PROPERTY_SET.contains(CoreConstants.NAMESPACE + property) || PROPERTY_SET.contains(GoogConstants.NAMESPACE + property) || PROPERTY_SET.contains(property); } @Override public ImmutableList<SchemaOrgType> getDefaultValueList() { return getProperty(CoreConstants.PROPERTY_DEFAULT_VALUE); } @Override public ImmutableList<SchemaOrgType> getMaxValueList() { return getProperty(CoreConstants.PROPERTY_MAX_VALUE); } @Override public ImmutableList<SchemaOrgType> getMinValueList() { return getProperty(CoreConstants.PROPERTY_MIN_VALUE); } @Override public ImmutableList<SchemaOrgType> getMultipleValuesList() { return getProperty(CoreConstants.PROPERTY_MULTIPLE_VALUES); } @Override public ImmutableList<SchemaOrgType> getReadonlyValueList() { return getProperty(CoreConstants.PROPERTY_READONLY_VALUE); } @Override public ImmutableList<SchemaOrgType> getStepValueList() { return getProperty(CoreConstants.PROPERTY_STEP_VALUE); } @Override public ImmutableList<SchemaOrgType> getValueMaxLengthList() { return getProperty(CoreConstants.PROPERTY_VALUE_MAX_LENGTH); } @Override public ImmutableList<SchemaOrgType> getValueMinLengthList() { return getProperty(CoreConstants.PROPERTY_VALUE_MIN_LENGTH); } @Override public ImmutableList<SchemaOrgType> getValueNameList() { return getProperty(CoreConstants.PROPERTY_VALUE_NAME); } @Override public ImmutableList<SchemaOrgType> getValuePatternList() { return getProperty(CoreConstants.PROPERTY_VALUE_PATTERN); } @Override public ImmutableList<SchemaOrgType> getValueRequiredList() { return getProperty(CoreConstants.PROPERTY_VALUE_REQUIRED); } }
/** * Copyright 2010 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.regionserver; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Scan; import java.io.IOException; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.NavigableSet; /** * Scanner scans both the memstore and the HStore. Coalesce KeyValue stream * into List<KeyValue> for a single row. */ class StoreScanner implements KeyValueScanner, InternalScanner, ChangedReadersObserver { static final Log LOG = LogFactory.getLog(StoreScanner.class); private Store store; private ScanQueryMatcher matcher; private KeyValueHeap heap; private boolean cacheBlocks; // Used to indicate that the scanner has closed (see HBASE-1107) // Doesnt need to be volatile because it's always accessed via synchronized methods private boolean closing = false; private final boolean isGet; // if heap == null and lastTop != null, you need to reseek given the key below private KeyValue lastTop = null; /** * Opens a scanner across memstore, snapshot, and all StoreFiles. * * @param store who we scan * @param scan the spec * @param columns which columns we are scanning * @throws IOException */ StoreScanner(Store store, Scan scan, final NavigableSet<byte[]> columns) throws IOException { this.store = store; this.cacheBlocks = scan.getCacheBlocks(); matcher = new ScanQueryMatcher(scan, store.getFamily().getName(), columns, store.ttl, store.comparator.getRawComparator(), store.versionsToReturn(scan.getMaxVersions()), false); this.isGet = scan.isGetScan(); // pass columns = try to filter out unnecessary ScanFiles List<KeyValueScanner> scanners = getScanners(scan, columns); // Seek all scanners to the start of the Row (or if the exact maching row key does not // exist, then to the start of the next matching Row). for(KeyValueScanner scanner : scanners) { scanner.seek(matcher.getStartKey()); } // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, store.comparator); this.store.addChangedReaderObserver(this); } /** * Used for major compactions.<p> * * Opens a scanner across specified StoreFiles. * @param store who we scan * @param scan the spec * @param scanners ancilliary scanners */ StoreScanner(Store store, Scan scan, List<? extends KeyValueScanner> scanners, boolean retainDeletesInOutput) throws IOException { this.store = store; this.cacheBlocks = false; this.isGet = false; matcher = new ScanQueryMatcher(scan, store.getFamily().getName(), null, store.ttl, store.comparator.getRawComparator(), store.versionsToReturn(scan.getMaxVersions()), retainDeletesInOutput); // Seek all scanners to the initial key for(KeyValueScanner scanner : scanners) { scanner.seek(matcher.getStartKey()); } // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, store.comparator); } // Constructor for testing. StoreScanner(final Scan scan, final byte [] colFamily, final long ttl, final KeyValue.KVComparator comparator, final NavigableSet<byte[]> columns, final List<KeyValueScanner> scanners) throws IOException { this.store = null; this.isGet = false; this.cacheBlocks = scan.getCacheBlocks(); this.matcher = new ScanQueryMatcher(scan, colFamily, columns, ttl, comparator.getRawComparator(), scan.getMaxVersions(), false); // Seek all scanners to the initial key for(KeyValueScanner scanner : scanners) { scanner.seek(matcher.getStartKey()); } heap = new KeyValueHeap(scanners, comparator); } /* * @return List of scanners ordered properly. */ private List<KeyValueScanner> getScanners() throws IOException { // First the store file scanners // TODO this used to get the store files in descending order, // but now we get them in ascending order, which I think is // actually more correct, since memstore get put at the end. List<StoreFileScanner> sfScanners = StoreFileScanner .getScannersForStoreFiles(store.getStorefiles(), cacheBlocks, isGet); List<KeyValueScanner> scanners = new ArrayList<KeyValueScanner>(sfScanners.size()+1); scanners.addAll(sfScanners); // Then the memstore scanners scanners.addAll(this.store.memstore.getScanners()); return scanners; } /* * @return List of scanners to seek, possibly filtered by StoreFile. */ private List<KeyValueScanner> getScanners(Scan scan, final NavigableSet<byte[]> columns) throws IOException { boolean memOnly; boolean filesOnly; if (scan instanceof InternalScan) { InternalScan iscan = (InternalScan)scan; memOnly = iscan.isCheckOnlyMemStore(); filesOnly = iscan.isCheckOnlyStoreFiles(); } else { memOnly = false; filesOnly = false; } List<KeyValueScanner> scanners = new LinkedList<KeyValueScanner>(); // First the store file scanners if (memOnly == false) { List<StoreFileScanner> sfScanners = StoreFileScanner .getScannersForStoreFiles(store.getStorefiles(), cacheBlocks, isGet); // include only those scan files which pass all filters for (StoreFileScanner sfs : sfScanners) { if (sfs.shouldSeek(scan, columns)) { scanners.add(sfs); } } } // Then the memstore scanners if ((filesOnly == false) && (this.store.memstore.shouldSeek(scan))) { scanners.addAll(this.store.memstore.getScanners()); } return scanners; } public synchronized KeyValue peek() { if (this.heap == null) { return this.lastTop; } return this.heap.peek(); } public KeyValue next() { // throw runtime exception perhaps? throw new RuntimeException("Never call StoreScanner.next()"); } public synchronized void close() { if (this.closing) return; this.closing = true; // under test, we dont have a this.store if (this.store != null) this.store.deleteChangedReaderObserver(this); if (this.heap != null) this.heap.close(); this.heap = null; // CLOSED! this.lastTop = null; // If both are null, we are closed. } public synchronized boolean seek(KeyValue key) throws IOException { if (this.heap == null) { List<KeyValueScanner> scanners = getScanners(); heap = new KeyValueHeap(scanners, store.comparator); } return this.heap.seek(key); } /** * Get the next row of values from this Store. * @param outResult * @param limit * @return true if there are more rows, false if scanner is done */ public synchronized boolean next(List<KeyValue> outResult, int limit) throws IOException { //DebugPrint.println("SS.next"); checkReseek(); // if the heap was left null, then the scanners had previously run out anyways, close and // return. if (this.heap == null) { close(); return false; } KeyValue peeked = this.heap.peek(); if (peeked == null) { close(); return false; } // only call setRow if the row changes; avoids confusing the query matcher // if scanning intra-row if ((matcher.row == null) || !peeked.matchingRow(matcher.row)) { matcher.setRow(peeked.getRow()); } KeyValue kv; List<KeyValue> results = new ArrayList<KeyValue>(); LOOP: while((kv = this.heap.peek()) != null) { // kv is no longer immutable due to KeyOnlyFilter! use copy for safety KeyValue copyKv = kv.shallowCopy(); ScanQueryMatcher.MatchCode qcode = matcher.match(copyKv); //DebugPrint.println("SS peek kv = " + kv + " with qcode = " + qcode); switch(qcode) { case INCLUDE: results.add(copyKv); this.heap.next(); if (limit > 0 && (results.size() == limit)) { break LOOP; } continue; case DONE: // copy jazz outResult.addAll(results); return true; case DONE_SCAN: close(); // copy jazz outResult.addAll(results); return false; case SEEK_NEXT_ROW: // This is just a relatively simple end of scan fix, to short-cut end us if there is a // endKey in the scan. if (!matcher.moreRowsMayExistAfter(kv)) { outResult.addAll(results); return false; } reseek(matcher.getKeyForNextRow(kv)); break; case SEEK_NEXT_COL: reseek(matcher.getKeyForNextColumn(kv)); break; case SKIP: this.heap.next(); break; case SEEK_NEXT_USING_HINT: KeyValue nextKV = matcher.getNextKeyHint(kv); if (nextKV != null) { reseek(nextKV); } else { heap.next(); } break; default: throw new RuntimeException("UNEXPECTED"); } } if (!results.isEmpty()) { // copy jazz outResult.addAll(results); return true; } // No more keys close(); return false; } public synchronized boolean next(List<KeyValue> outResult) throws IOException { return next(outResult, -1); } // Implementation of ChangedReadersObserver public synchronized void updateReaders() throws IOException { if (this.closing) return; // All public synchronized API calls will call 'checkReseek' which will cause // the scanner stack to reseek if this.heap==null && this.lastTop != null. // But if two calls to updateReaders() happen without a 'next' or 'peek' then we // will end up calling this.peek() which would cause a reseek in the middle of a updateReaders // which is NOT what we want, not to mention could cause an NPE. So we early out here. if (this.heap == null) return; // this could be null. this.lastTop = this.peek(); //DebugPrint.println("SS updateReaders, topKey = " + lastTop); // close scanners to old obsolete Store files this.heap.close(); // bubble thru and close all scanners. this.heap = null; // the re-seeks could be slow (access HDFS) free up memory ASAP // Let the next() call handle re-creating and seeking } private void checkReseek() throws IOException { if (this.heap == null && this.lastTop != null) { resetScannerStack(this.lastTop); this.lastTop = null; // gone! } // else dont need to reseek } private void resetScannerStack(KeyValue lastTopKey) throws IOException { if (heap != null) { throw new RuntimeException("StoreScanner.reseek run on an existing heap!"); } /* When we have the scan object, should we not pass it to getScanners() * to get a limited set of scanners? We did so in the constructor and we * could have done it now by storing the scan object from the constructor */ List<KeyValueScanner> scanners = getScanners(); for(KeyValueScanner scanner : scanners) { scanner.seek(lastTopKey); } // Combine all seeked scanners with a heap heap = new KeyValueHeap(scanners, store.comparator); // Reset the state of the Query Matcher and set to top row. // Only reset and call setRow if the row changes; avoids confusing the // query matcher if scanning intra-row. KeyValue kv = heap.peek(); if (kv == null) { kv = lastTopKey; } if ((matcher.row == null) || !kv.matchingRow(matcher.row)) { matcher.reset(); matcher.setRow(kv.getRow()); } } @Override public synchronized boolean reseek(KeyValue kv) throws IOException { //Heap cannot be null, because this is only called from next() which //guarantees that heap will never be null before this call. return this.heap.reseek(kv); } @Override public long getSequenceID() { return 0; } }
package org.apache.solr.search.facet; /* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.IdentityHashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.solr.common.SolrException; import org.apache.solr.common.util.SimpleOrderedMap; import org.apache.solr.handler.component.ResponseBuilder; import org.apache.solr.request.SolrRequestInfo; import org.apache.solr.schema.SchemaField; import org.apache.solr.search.BitDocSet; import org.apache.solr.search.DocIterator; import org.apache.solr.search.DocSet; import org.apache.solr.search.QParser; import org.apache.solr.search.SolrIndexSearcher; import org.apache.solr.search.SyntaxError; public class FacetProcessor<FacetRequestT extends FacetRequest> { protected SimpleOrderedMap<Object> response; protected FacetContext fcontext; protected FacetRequestT freq; LinkedHashMap<String,SlotAcc> accMap; protected SlotAcc[] accs; protected CountSlotAcc countAcc; FacetProcessor(FacetContext fcontext, FacetRequestT freq) { this.fcontext = fcontext; this.freq = freq; } public void process() throws IOException { handleDomainChanges(); } protected void handleDomainChanges() throws IOException { if (freq.domain == null) return; handleFilterExclusions(); handleBlockJoin(); } private void handleBlockJoin() throws IOException { if (!(freq.domain.toChildren || freq.domain.toParent)) return; // TODO: avoid query parsing per-bucket somehow... String parentStr = freq.domain.parents; Query parentQuery; try { QParser parser = QParser.getParser(parentStr, null, fcontext.req); parentQuery = parser.getQuery(); } catch (SyntaxError err) { throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing block join parent specification: " + parentStr); } BitDocSet parents = fcontext.searcher.getDocSetBits(parentQuery); DocSet input = fcontext.base; DocSet result; if (freq.domain.toChildren) { DocSet filt = fcontext.searcher.getDocSetBits( new MatchAllDocsQuery() ); result = BlockJoin.toChildren(input, parents, filt, fcontext.qcontext); } else { result = BlockJoin.toParents(input, parents, fcontext.qcontext); } fcontext.base = result; } private void handleFilterExclusions() throws IOException { List<String> excludeTags = freq.domain.excludeTags; if (excludeTags == null || excludeTags.size() == 0) { return; } // TODO: somehow remove responsebuilder dependency ResponseBuilder rb = SolrRequestInfo.getRequestInfo().getResponseBuilder(); Map tagMap = (Map) rb.req.getContext().get("tags"); if (tagMap == null) { // no filters were tagged return; } IdentityHashMap<Query,Boolean> excludeSet = new IdentityHashMap<>(); for (String excludeTag : excludeTags) { Object olst = tagMap.get(excludeTag); // tagMap has entries of List<String,List<QParser>>, but subject to change in the future if (!(olst instanceof Collection)) continue; for (Object o : (Collection<?>)olst) { if (!(o instanceof QParser)) continue; QParser qp = (QParser)o; try { excludeSet.put(qp.getQuery(), Boolean.TRUE); } catch (SyntaxError syntaxError) { // This should not happen since we should only be retrieving a previously parsed query throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, syntaxError); } } } if (excludeSet.size() == 0) return; List<Query> qlist = new ArrayList<>(); // add the base query if (!excludeSet.containsKey(rb.getQuery())) { qlist.add(rb.getQuery()); } // add the filters if (rb.getFilters() != null) { for (Query q : rb.getFilters()) { if (!excludeSet.containsKey(q)) { qlist.add(q); } } } // now walk back up the context tree // TODO: we lose parent exclusions... for (FacetContext curr = fcontext; curr != null; curr = curr.parent) { if (curr.filter != null) { qlist.add( curr.filter ); } } // recompute the base domain fcontext.base = fcontext.searcher.getDocSet(qlist); } public Object getResponse() { return null; } protected void createAccs(int docCount, int slotCount) throws IOException { accMap = new LinkedHashMap<>(); // allow a custom count acc to be used if (countAcc == null) { countAcc = new CountSlotArrAcc(fcontext, slotCount); countAcc.key = "count"; } for (Map.Entry<String,AggValueSource> entry : freq.getFacetStats().entrySet()) { SlotAcc acc = entry.getValue().createSlotAcc(fcontext, docCount, slotCount); acc.key = entry.getKey(); accMap.put(acc.key, acc); } accs = new SlotAcc[accMap.size()]; int i=0; for (SlotAcc acc : accMap.values()) { accs[i++] = acc; } } protected void resetStats() { countAcc.reset(); for (SlotAcc acc : accs) { acc.reset(); } } protected void processStats(SimpleOrderedMap<Object> bucket, DocSet docs, int docCount) throws IOException { if (docCount == 0 && !freq.processEmpty || freq.getFacetStats().size() == 0) { bucket.add("count", docCount); return; } createAccs(docCount, 1); int collected = collect(docs, 0); countAcc.incrementCount(0, collected); assert collected == docCount; addStats(bucket, 0); } protected void processSubs(SimpleOrderedMap<Object> response, Query filter, DocSet domain) throws IOException { // TODO: what if a zero bucket has a sub-facet with an exclusion that would yield results? // should we check for domain-altering exclusions, or even ask the sub-facet for // it's domain and then only skip it if it's 0? if (domain == null || domain.size() == 0 && !freq.processEmpty) { return; } for (Map.Entry<String,FacetRequest> sub : freq.getSubFacets().entrySet()) { // make a new context for each sub-facet since they can change the domain FacetContext subContext = fcontext.sub(filter, domain); FacetProcessor subProcessor = sub.getValue().createFacetProcessor(subContext); subProcessor.process(); response.add( sub.getKey(), subProcessor.getResponse() ); } } int collect(DocSet docs, int slot) throws IOException { int count = 0; SolrIndexSearcher searcher = fcontext.searcher; final List<LeafReaderContext> leaves = searcher.getIndexReader().leaves(); final Iterator<LeafReaderContext> ctxIt = leaves.iterator(); LeafReaderContext ctx = null; int segBase = 0; int segMax; int adjustedMax = 0; for (DocIterator docsIt = docs.iterator(); docsIt.hasNext(); ) { final int doc = docsIt.nextDoc(); if (doc >= adjustedMax) { do { ctx = ctxIt.next(); if (ctx == null) { // should be impossible throw new RuntimeException("INTERNAL FACET ERROR"); } segBase = ctx.docBase; segMax = ctx.reader().maxDoc(); adjustedMax = segBase + segMax; } while (doc >= adjustedMax); assert doc >= ctx.docBase; setNextReader(ctx); } count++; collect(doc - segBase, slot); // per-seg collectors } return count; } void collect(int segDoc, int slot) throws IOException { if (accs != null) { for (SlotAcc acc : accs) { acc.collect(segDoc, slot); } } } void setNextReader(LeafReaderContext ctx) throws IOException { // countAcc.setNextReader is a no-op for (SlotAcc acc : accs) { acc.setNextReader(ctx); } } void addStats(SimpleOrderedMap<Object> target, int slotNum) throws IOException { int count = countAcc.getCount(slotNum); target.add("count", count); if (count > 0 || freq.processEmpty) { for (SlotAcc acc : accs) { acc.setValues(target, slotNum); } } } public void fillBucket(SimpleOrderedMap<Object> bucket, Query q, DocSet result) throws IOException { boolean needDocSet = freq.getFacetStats().size() > 0 || freq.getSubFacets().size() > 0; // TODO: always collect counts or not??? int count; if (result != null) { count = result.size(); } else if (needDocSet) { if (q == null) { result = fcontext.base; // result.incref(); // OFF-HEAP } else { result = fcontext.searcher.getDocSet(q, fcontext.base); } count = result.size(); } else { if (q == null) { count = fcontext.base.size(); } else { count = fcontext.searcher.numDocs(q, fcontext.base); } } try { processStats(bucket, result, (int) count); processSubs(bucket, q, result); } finally { if (result != null) { // result.decref(); // OFF-HEAP result = null; } } } public static DocSet getFieldMissing(SolrIndexSearcher searcher, DocSet docs, String fieldName) throws IOException { SchemaField sf = searcher.getSchema().getField(fieldName); DocSet hasVal = searcher.getDocSet(sf.getType().getRangeQuery(null, sf, null, null, false, false)); DocSet answer = docs.andNot(hasVal); // hasVal.decref(); // OFF-HEAP return answer; } public static Query getFieldMissingQuery(SolrIndexSearcher searcher, String fieldName) throws IOException { SchemaField sf = searcher.getSchema().getField(fieldName); Query hasVal = sf.getType().getRangeQuery(null, sf, null, null, false, false); BooleanQuery.Builder noVal = new BooleanQuery.Builder(); noVal.add(hasVal, BooleanClause.Occur.MUST_NOT); return noVal.build(); } }
/* * This file is part of choco-solver, http://choco-solver.org/ * * Copyright (c) 2019, IMT Atlantique. All rights reserved. * * Licensed under the BSD 4-clause license. * * See LICENSE file in the project root for full license information. */ package org.chocosolver.solver.variables; import org.chocosolver.solver.Cause; import org.chocosolver.solver.Model; import org.chocosolver.solver.constraints.checker.DomainBuilder; import org.chocosolver.solver.exception.ContradictionException; import org.chocosolver.util.iterators.DisposableRangeIterator; import org.chocosolver.util.iterators.DisposableValueIterator; import org.testng.Assert; import org.testng.annotations.Test; import java.util.Random; import static org.chocosolver.solver.search.strategy.Search.randomSearch; import static org.testng.Assert.assertEquals; /** * <br/> * * @author Charles Prud'homme * @since 23/08/11 */ public class ViewMinusTest { @Test(groups="1s", timeOut=60000) public void test1() { Model model = new Model(); IntVar X = model.intVar("X", 1, 10, false); IntVar Y = model.intMinusView(X); try { if(!model.getSettings().enableViews()) model.getSolver().propagate(); Assert.assertFalse(Y.isInstantiated()); Assert.assertEquals(Y.getLB(), -10); Assert.assertEquals(Y.getUB(), -1); Assert.assertTrue(Y.contains(-5)); Assert.assertEquals(Y.nextValue(-11), -10); Assert.assertEquals(Y.nextValue(-5), -4); Assert.assertEquals(Y.nextValue(-1), Integer.MAX_VALUE); Assert.assertEquals(Y.previousValue(0), -1); Assert.assertEquals(Y.previousValue(-4), -5); Assert.assertEquals(Y.previousValue(-10), Integer.MIN_VALUE); Y.updateLowerBound(-9, Cause.Null); if(!model.getSettings().enableViews()) model.getSolver().propagate(); Assert.assertEquals(Y.getLB(), -9); Assert.assertEquals(X.getUB(), 9); Y.updateUpperBound(-2, Cause.Null); if(!model.getSettings().enableViews()) model.getSolver().propagate(); Assert.assertEquals(Y.getUB(), -2); Assert.assertEquals(X.getLB(), 2); Y.removeValue(-4, Cause.Null); if(!model.getSettings().enableViews()) model.getSolver().propagate(); Assert.assertFalse(Y.contains(-4)); Assert.assertFalse(X.contains(4)); Y.removeInterval(-8, -6, Cause.Null); if(!model.getSettings().enableViews()) model.getSolver().propagate(); Assert.assertFalse(Y.contains(-8)); Assert.assertFalse(Y.contains(-7)); Assert.assertFalse(Y.contains(-6)); Assert.assertFalse(X.contains(6)); Assert.assertFalse(X.contains(7)); Assert.assertFalse(X.contains(8)); Assert.assertEquals(X.getDomainSize(), 4); Assert.assertEquals(Y.getDomainSize(), 4); Y.instantiateTo(-5, Cause.Null); if(!model.getSettings().enableViews()) model.getSolver().propagate(); Assert.assertTrue(X.isInstantiated()); Assert.assertTrue(Y.isInstantiated()); Assert.assertEquals(X.getValue(), 5); Assert.assertEquals(Y.getValue(), -5); } catch (ContradictionException ignored) { } } @Test(groups="10s", timeOut=60000) public void test2() { Random random = new Random(); for (int seed = 0; seed < 2000; seed++) { random.setSeed(seed); Model ref = new Model(); { IntVar[] xs = new IntVar[2]; xs[0] = ref.intVar("x", 1, 15, true); xs[1] = ref.intVar("y", -15, -1, true); ref.sum(xs, "=", 0).post(); ref.getSolver().setSearch(randomSearch(xs, seed)); } Model model = new Model(); { IntVar[] xs = new IntVar[2]; xs[0] = model.intVar("x", 1, 15, true); xs[1] = model.intMinusView(xs[0]); model.sum(xs, "=", 0).post(); model.getSolver().setSearch(randomSearch(xs, seed)); } while (ref.getSolver().solve()) ; while (model.getSolver().solve()) ; assertEquals(model.getSolver().getSolutionCount(), ref.getSolver().getSolutionCount()); } } @Test(groups="10s", timeOut=60000) public void test3() { Random random = new Random(); for (int seed = 0; seed < 2000; seed++) { random.setSeed(seed); Model ref = new Model(); { IntVar[] xs = new IntVar[2]; xs[0] = ref.intVar("x", 1, 15, false); xs[1] = ref.intVar("y", -15, -1, false); ref.sum(xs, "=", 0).post(); ref.getSolver().setSearch(randomSearch(xs, seed)); } Model model = new Model(); { IntVar[] xs = new IntVar[2]; xs[0] = model.intVar("x", 1, 15, false); xs[1] = model.intMinusView(xs[0]); model.sum(xs, "=", 0).post(); model.getSolver().setSearch(randomSearch(xs, seed)); } while (ref.getSolver().solve()) ; while (model.getSolver().solve()) ; assertEquals(model.getSolver().getSolutionCount(), ref.getSolver().getSolutionCount()); } } @Test(groups="1s", timeOut=60000) public void testIt1() { Random random = new Random(); for (int seed = 0; seed < 200; seed++) { random.setSeed(seed); Model model = new Model(); int[][] domains = DomainBuilder.buildFullDomains(1, -5, 5, random, random.nextDouble(), random.nextBoolean()); IntVar o = model.intVar("o", domains[0][0], domains[0][domains[0].length - 1], true); IntVar v = model.intMinusView(o); DisposableValueIterator vit = v.getValueIterator(true); while (vit.hasNext()) { Assert.assertTrue(o.contains(-vit.next())); } vit.dispose(); vit = v.getValueIterator(false); while (vit.hasPrevious()) { Assert.assertTrue(o.contains(-vit.previous())); } vit.dispose(); DisposableRangeIterator rit = v.getRangeIterator(true); while (rit.hasNext()) { Assert.assertTrue(o.contains(-rit.min())); Assert.assertTrue(o.contains(-rit.max())); rit.next(); } rit = v.getRangeIterator(false); while (rit.hasPrevious()) { Assert.assertTrue(o.contains(-rit.min())); Assert.assertTrue(o.contains(-rit.max())); rit.previous(); } } } @Test(groups="1s", timeOut=60000) public void testIt2() { Random random = new Random(); for (int seed = 0; seed < 200; seed++) { random.setSeed(seed); Model model = new Model(); int[][] domains = DomainBuilder.buildFullDomains(1, -5, 5, random, random.nextDouble(), random.nextBoolean()); IntVar o = model.intVar("o", domains[0]); IntVar v = model.intMinusView(o); if(!model.getSettings().enableViews()){ try { model.getSolver().propagate(); }catch (Exception e){ e.printStackTrace(); throw new UnsupportedOperationException(); } } DisposableValueIterator vit = v.getValueIterator(true); while (vit.hasNext()) { Assert.assertTrue(o.contains(-vit.next())); } vit.dispose(); vit = v.getValueIterator(false); while (vit.hasPrevious()) { Assert.assertTrue(o.contains(-vit.previous())); } vit.dispose(); DisposableRangeIterator rit = v.getRangeIterator(true); while (rit.hasNext()) { Assert.assertTrue(o.contains(-rit.min())); Assert.assertTrue(o.contains(-rit.max())); rit.next(); } rit = v.getRangeIterator(false); while (rit.hasPrevious()) { Assert.assertTrue(o.contains(-rit.min())); Assert.assertTrue(o.contains(-rit.max())); rit.previous(); } } } @Test(groups="1s", timeOut=60000) public void testB1() throws ContradictionException { Model model = new Model(); IntVar v_0 = model.intVar("v_0",new int[]{-3,-2,-1,0,3,4}); IntVar v_1 = model.intVar("v_1",-4); IntVar v_2 = model.intVar("v_2",new int[]{-3,-2,-1,0,4}); model.times(v_0, v_1, v_2).post(); System.out.println(model.toString()); model.getSolver().propagate(); System.out.println(model.toString()); while(model.getSolver().solve()); Assert.assertEquals(model.getSolver().getSolutionCount(), 2); } }
package com.handstudio.android.hzgrapherlib.graphview; import java.util.WeakHashMap; import android.content.Context; import android.graphics.Bitmap; import android.graphics.BitmapFactory; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.DashPathEffect; import android.graphics.LinearGradient; import android.graphics.Matrix; import android.graphics.Paint; import android.graphics.Paint.Style; import android.graphics.Rect; import android.graphics.Shader; import android.util.AttributeSet; import android.util.Log; import android.view.MotionEvent; import android.view.SurfaceHolder; import android.view.SurfaceHolder.Callback; import android.view.SurfaceView; import com.handstudio.android.hzgrapherlib.animation.GraphAnimation; import com.handstudio.android.hzgrapherlib.canvas.GraphCanvasWrapper; import com.handstudio.android.hzgrapherlib.error.ErrorCode; import com.handstudio.android.hzgrapherlib.error.ErrorDetector; import com.handstudio.android.hzgrapherlib.path.GraphPath; import com.handstudio.android.hzgrapherlib.util.Spline; import com.handstudio.android.hzgrapherlib.vo.GraphNameBox; import com.handstudio.android.hzgrapherlib.vo.curvegraph.CurveGraphVO; public class CurveGraphView extends SurfaceView implements Callback{ public static final String TAG = "CurveGraphView"; private SurfaceHolder mHolder; private DrawThread mDrawThread; private CurveGraphVO mCurveGraphVO = null; private Spline spline = null; //Constructor public CurveGraphView(Context context, CurveGraphVO vo) { super(context); mCurveGraphVO = vo; initView(context, vo); } private void initView(Context context, CurveGraphVO vo) { ErrorCode ec = ErrorDetector.checkGraphObject(vo); ec.printError(); mHolder = getHolder(); mHolder.addCallback(this); } @Override public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { } @Override public void surfaceCreated(SurfaceHolder holder) { if(mDrawThread == null){ mDrawThread = new DrawThread(mHolder, getContext()); mDrawThread.start(); } } @Override public void surfaceDestroyed(SurfaceHolder holder) { if(mDrawThread != null){ mDrawThread.setRunFlag(false); mDrawThread = null; } } private static final Object touchLock = new Object(); // touch synchronize @Override public boolean onTouchEvent(MotionEvent event) { int action = event.getAction(); if(mDrawThread == null ){ return false; } if(action == MotionEvent.ACTION_DOWN){ synchronized (touchLock) { mDrawThread.isDirty = true; } return true; }else if(action == MotionEvent.ACTION_MOVE){ synchronized (touchLock) { mDrawThread.isDirty = true; } return true; }else if(action == MotionEvent.ACTION_UP){ synchronized (touchLock) { mDrawThread.isDirty = true; } return true; } return super.onTouchEvent(event); } class DrawThread extends Thread{ SurfaceHolder mHolder; Context mCtx; boolean isRun = true; boolean isDirty = true; Matrix matrix = new Matrix(); int height = getHeight(); int width = getWidth(); //graph length int xLength = width - (mCurveGraphVO.getPaddingLeft() + mCurveGraphVO.getPaddingRight() + mCurveGraphVO.getMarginRight()); int yLength = height - (mCurveGraphVO.getPaddingBottom() + mCurveGraphVO.getPaddingTop() + mCurveGraphVO.getMarginTop()); //chart length int chartXLength = width - (mCurveGraphVO.getPaddingLeft() + mCurveGraphVO.getPaddingRight()); int chartYLength = height - (mCurveGraphVO.getPaddingBottom() + mCurveGraphVO.getPaddingTop()); Paint p = new Paint(); Paint pCircle = new Paint(); Paint pCurve = new Paint(); Paint pBaseLine = new Paint(); Paint pBaseLineX = new Paint(); Paint pMarkText = new Paint(); //animation float anim = 0.0f; boolean isAnimation = false; boolean isDrawRegion = false; long animStartTime = -1; int animationType = 0; WeakHashMap<Integer, Bitmap> arrIcon = new WeakHashMap<Integer, Bitmap>(); Bitmap bg = null; public DrawThread(SurfaceHolder holder, Context context) { mHolder = holder; mCtx = context; int size = mCurveGraphVO.getArrGraph().size(); for (int i = 0; i < size; i++) { int bitmapResource = mCurveGraphVO.getArrGraph().get(i).getBitmapResource(); if(bitmapResource != -1){ arrIcon.put(i, BitmapFactory.decodeResource(getResources(), bitmapResource)); }else{ if(arrIcon.get(i) != null){ arrIcon.remove(i); } } } int bgResource = mCurveGraphVO.getGraphBG(); if(bgResource != -1){ Bitmap tempBg = BitmapFactory.decodeResource(getResources(), bgResource); bg = Bitmap.createScaledBitmap(tempBg, width, height, true); tempBg.recycle(); } } public void setRunFlag(boolean bool){ isRun = bool; } @Override public void run() { Canvas canvas = null; GraphCanvasWrapper graphCanvasWrapper = null; Log.e(TAG,"height = " + height); Log.e(TAG,"width = " + width); setPaint(); isAnimation(); isDrawRegion(); animStartTime = System.currentTimeMillis(); while(isRun){ //draw only on dirty mode if(!isDirty){ try { Thread.sleep(100); } catch (InterruptedException e1) { e1.printStackTrace(); } continue; } canvas = mHolder.lockCanvas(); graphCanvasWrapper = new GraphCanvasWrapper(canvas, width, height, mCurveGraphVO.getPaddingLeft(), mCurveGraphVO.getPaddingBottom()); try { Thread.sleep(0000); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } calcTimePass(); synchronized(mHolder){ synchronized (touchLock) { try { //bg color canvas.drawColor(Color.WHITE); if(bg != null){ canvas.drawBitmap(bg, 0, 0, null); } // x coord dot Line drawBaseLine(graphCanvasWrapper); // y coord graphCanvasWrapper.drawLine(0, 0, 0, chartYLength, pBaseLine); // x coord graphCanvasWrapper.drawLine(0, 0, chartXLength, 0, pBaseLine); // x, y coord mark drawXMark(graphCanvasWrapper); drawYMark(graphCanvasWrapper); // x, y coord text drawXText(graphCanvasWrapper); drawYText(graphCanvasWrapper); // Draw Graph drawGraphRegion(graphCanvasWrapper); drawGraph(graphCanvasWrapper); // Draw group name drawGraphName(canvas); } catch (Exception e) { e.printStackTrace(); } finally { if(graphCanvasWrapper.getCanvas() != null){ mHolder.unlockCanvasAndPost(graphCanvasWrapper.getCanvas()); } } } } } } /** * time calculate */ private void calcTimePass(){ if(isAnimation){ long curTime = System.currentTimeMillis(); long gapTime = curTime - animStartTime; long animDuration = mCurveGraphVO.getAnimation().getDuration(); if(gapTime >= animDuration) gapTime = animDuration; anim = (float) gapTime / (float) animDuration; }else{ isDirty = false; } } /** * draw graph name */ private void drawGraphName(Canvas canvas) { GraphNameBox gnb = mCurveGraphVO.getGraphNameBox(); if(gnb != null){ int nameboxWidth = 0; int nameboxHeight = 0; int nameboxIconWidth = gnb.getNameboxIconWidth(); int nameboxIconHeight = gnb.getNameboxIconHeight(); int nameboxMarginTop = gnb.getNameboxMarginTop(); int nameboxMarginRight = gnb.getNameboxMarginRight(); int nameboxPadding = gnb.getNameboxPadding(); int nameboxTextIconMargin = gnb.getNameboxIconMargin(); int nameboxIconMargin = gnb.getNameboxIconMargin(); int nameboxTextSize = gnb.getNameboxTextSize(); int maxTextWidth = 0; int maxTextHeight = 0; Paint nameRextPaint = new Paint(); nameRextPaint.setFlags(Paint.ANTI_ALIAS_FLAG); nameRextPaint.setAntiAlias(true); //text anti alias nameRextPaint.setFilterBitmap(true); // bitmap anti alias nameRextPaint.setColor(Color.BLUE); nameRextPaint.setStrokeWidth(3); nameRextPaint.setStyle(Style.STROKE); Paint pIcon = new Paint(); pIcon.setFlags(Paint.ANTI_ALIAS_FLAG); pIcon.setAntiAlias(true); //text anti alias pIcon.setFilterBitmap(true); // bitmap anti alias pIcon.setColor(Color.BLUE); pIcon.setStrokeWidth(3); pIcon.setStyle(Style.FILL_AND_STROKE); Paint pNameText = new Paint(); pNameText.setFlags(Paint.ANTI_ALIAS_FLAG); pNameText.setAntiAlias(true); //text anti alias pNameText.setTextSize(nameboxTextSize); pNameText.setColor(Color.BLACK); int graphSize = mCurveGraphVO.getArrGraph().size(); for (int i = 0; i < graphSize; i++) { String text = mCurveGraphVO.getArrGraph().get(i).getName(); Rect rect = new Rect(); pNameText.getTextBounds(text, 0, text.length(), rect); if(rect.width() > maxTextWidth){ maxTextWidth = rect.width(); maxTextHeight = rect.height(); } mCurveGraphVO.getArrGraph().get(i).getName(); } mCurveGraphVO.getArrGraph().get(0).getName(); nameboxWidth = 1 * maxTextWidth + nameboxTextIconMargin + nameboxIconWidth; int maxCellHight = maxTextHeight; if(nameboxIconHeight > maxTextHeight){ maxCellHight = nameboxIconHeight; } nameboxHeight = graphSize * maxCellHight + (graphSize-1) * nameboxIconMargin; canvas.drawRect(width - (nameboxMarginRight + nameboxWidth) - nameboxPadding*2, nameboxMarginTop, width - nameboxMarginRight, nameboxMarginTop + nameboxHeight + nameboxPadding*2, nameRextPaint); for (int i = 0; i < graphSize; i++) { pIcon.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); canvas.drawRect(width - (nameboxMarginRight + nameboxWidth) - nameboxPadding, nameboxMarginTop + (maxCellHight * i) + nameboxPadding + (nameboxIconMargin * i), width - (nameboxMarginRight + maxTextWidth) - nameboxPadding - nameboxTextIconMargin, nameboxMarginTop + maxCellHight * (i+1) + nameboxPadding + nameboxIconMargin * i, pIcon); String text = mCurveGraphVO.getArrGraph().get(i).getName(); canvas.drawText(text, width - (nameboxMarginRight + maxTextWidth) - nameboxPadding, nameboxMarginTop + maxTextHeight/2 + maxCellHight * i + maxCellHight/2 + nameboxPadding + nameboxIconMargin * i, pNameText); } } } /** * check graph Curve animation */ private void isAnimation() { if(mCurveGraphVO.getAnimation() != null){ isAnimation = true; }else{ isAnimation = false; } animationType = mCurveGraphVO.getAnimation().getAnimation(); } /** * check graph Curve region animation */ private void isDrawRegion() { if(mCurveGraphVO.isDrawRegion()){ isDrawRegion = true; }else{ isDrawRegion = false; } } /** * draw Base Line */ private void drawBaseLine(GraphCanvasWrapper graphCanvas) { for (int i = 1; mCurveGraphVO.getIncrement() * i <= mCurveGraphVO.getMaxValue(); i++) { float y = yLength * mCurveGraphVO.getIncrement() * i/mCurveGraphVO.getMaxValue(); graphCanvas.drawLine(0, y, chartXLength, y, pBaseLineX); } } /** * set graph Curve color */ private void setPaint() { p = new Paint(); p.setFlags(Paint.ANTI_ALIAS_FLAG); p.setAntiAlias(true); //text anti alias p.setFilterBitmap(true); // bitmap anti alias p.setColor(Color.BLUE); p.setStrokeWidth(3); p.setStyle(Style.STROKE); pCircle = new Paint(); pCircle.setFlags(Paint.ANTI_ALIAS_FLAG); pCircle.setAntiAlias(true); //text anti alias pCircle.setFilterBitmap(true); // bitmap anti alias pCircle.setColor(Color.BLUE); pCircle.setStrokeWidth(3); pCircle.setStyle(Style.FILL_AND_STROKE); pCurve = new Paint(); pCurve.setFlags(Paint.ANTI_ALIAS_FLAG); pCurve.setAntiAlias(true); //text anti alias pCurve.setFilterBitmap(true); // bitmap anti alias pCurve.setShader(new LinearGradient(0, 300f, 0, 0f, Color.BLACK, Color.WHITE, Shader.TileMode.MIRROR)); pBaseLine = new Paint(); pBaseLine.setFlags(Paint.ANTI_ALIAS_FLAG); pBaseLine.setAntiAlias(true); //text anti alias pBaseLine.setFilterBitmap(true); // bitmap anti alias pBaseLine.setColor(Color.GRAY); pBaseLine.setStrokeWidth(3); pBaseLineX = new Paint(); pBaseLineX.setFlags(Paint.ANTI_ALIAS_FLAG); pBaseLineX.setAntiAlias(true); //text anti alias pBaseLineX.setFilterBitmap(true); // bitmap anti alias pBaseLineX.setColor(0xffcccccc); pBaseLineX.setStrokeWidth(3); pBaseLineX.setStyle(Style.STROKE); pBaseLineX.setPathEffect(new DashPathEffect(new float[] {10,5}, 0)); pMarkText = new Paint(); pMarkText.setFlags(Paint.ANTI_ALIAS_FLAG); pMarkText.setAntiAlias(true); //text anti alias pMarkText.setColor(Color.BLACK); } /** * draw Graph Region */ private void drawGraphRegion(GraphCanvasWrapper graphCanvas) { if(isDrawRegion){ if (isAnimation){ drawGraphRegionWithAnimation(graphCanvas); }else{ drawGraphRegionWithoutAnimation(graphCanvas); } } } /** * draw Graph */ private void drawGraph(GraphCanvasWrapper graphCanvas) { if (isAnimation){ drawGraphWithAnimation(graphCanvas); }else{ drawGraphWithoutAnimation(graphCanvas); } } /** * draw graph without animation */ private void drawGraphRegionWithoutAnimation(GraphCanvasWrapper graphCanvas) { boolean isDrawRegion = mCurveGraphVO.isDrawRegion(); for (int i = 0; i < mCurveGraphVO.getArrGraph().size(); i++) { GraphPath regionPath = new GraphPath(width, height, mCurveGraphVO.getPaddingLeft(), mCurveGraphVO.getPaddingBottom()); boolean firstSet = false; float xGap = xLength/(mCurveGraphVO.getArrGraph().get(i).getCoordinateArr().length-1); float[] x = setAxisX(xGap, i); float[] y = setAxisY(i); // Creates a monotone cubic spline from a given set of control points. spline = Spline.createMonotoneCubicSpline(x, y); p.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); pCircle.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); spline = Spline.createMonotoneCubicSpline(x, y); // draw Region for (float j = x[0]; j < x[x.length-1]; j++) { if (!firstSet) { regionPath.moveTo(j, spline.interpolate(j)); firstSet = true; } else regionPath.lineTo((j+1), spline.interpolate((j+1))); } if(isDrawRegion){ regionPath.lineTo(x[x.length-1], 0); regionPath.lineTo(0, 0); Paint pBg = new Paint(); pBg.setFlags(Paint.ANTI_ALIAS_FLAG); pBg.setAntiAlias(true); //text anti alias pBg.setFilterBitmap(true); // bitmap anti alias pBg.setStyle(Style.FILL); pBg.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); graphCanvas.getCanvas().drawPath(regionPath, pBg); } } } /** * draw graph with animation */ private void drawGraphRegionWithAnimation(GraphCanvasWrapper graphCanvas) { //for draw animation boolean isDrawRegion = mCurveGraphVO.isDrawRegion(); for (int i = 0; i < mCurveGraphVO.getArrGraph().size(); i++) { GraphPath regionPath = new GraphPath(width, height, mCurveGraphVO.getPaddingLeft(), mCurveGraphVO.getPaddingBottom()); boolean firstSet = false; float xGap = xLength/(mCurveGraphVO.getArrGraph().get(i).getCoordinateArr().length-1); float moveX = 0; float[] x = setAxisX(xGap, i); float[] y = setAxisY(i); // Creates a monotone cubic spline from a given set of control points. spline = Spline.createMonotoneCubicSpline(x, y); p.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); pCircle.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); // draw line for (float j = x[0]; j <= x[x.length-1]; j++) { if (!firstSet) { regionPath.moveTo(j, spline.interpolate(j)); firstSet = true; } else { moveX = j * anim; regionPath.lineTo(moveX, spline.interpolate(moveX)); } } if(isDrawRegion){ if (animationType == GraphAnimation.CURVE_REGION_ANIMATION_1) { moveX += xGap * anim; if(moveX >= xLength){ moveX = xLength; } } regionPath.lineTo(moveX, 0); regionPath.lineTo(0, 0); Paint pBg = new Paint(); pBg.setFlags(Paint.ANTI_ALIAS_FLAG); pBg.setAntiAlias(true); //text anti alias pBg.setFilterBitmap(true); // bitmap anti alias pBg.setStyle(Style.FILL); pBg.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); graphCanvas.getCanvas().drawPath(regionPath, pBg); } if (anim==1) isDirty = false; } } /** * draw graph without animation */ private void drawGraphWithoutAnimation(GraphCanvasWrapper graphCanvas) { for (int i = 0; i < mCurveGraphVO.getArrGraph().size(); i++) { GraphPath curvePath = new GraphPath(width, height, mCurveGraphVO.getPaddingLeft(), mCurveGraphVO.getPaddingBottom()); boolean firstSet = false; float xGap = xLength/(mCurveGraphVO.getArrGraph().get(i).getCoordinateArr().length-1); float[] x = setAxisX(xGap, i); float[] y = setAxisY(i); // Creates a monotone cubic spline from a given set of control points. spline = Spline.createMonotoneCubicSpline(x, y); p.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); pCircle.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); Bitmap icon = arrIcon.get(i); // draw line for (float j = x[0]; j < x[x.length-1]; j++) { if (!firstSet) { curvePath.moveTo(j, spline.interpolate(j)); firstSet = true; } else curvePath.lineTo((j+1), spline.interpolate((j+1))); } // draw point for (int j = 0; j < mCurveGraphVO.getArrGraph().get(i).getCoordinateArr().length; j++) { float pointX = xGap * j; float pointY = yLength * mCurveGraphVO.getArrGraph().get(i).getCoordinateArr()[j]/mCurveGraphVO.getMaxValue(); if(icon == null) { graphCanvas.drawCircle(pointX, pointY, 4, pCircle); }else{ graphCanvas.drawBitmapIcon(icon, pointX, pointY, null); } } graphCanvas.getCanvas().drawPath(curvePath, p); } } /** * draw graph with animation */ private void drawGraphWithAnimation(GraphCanvasWrapper graphCanvas) { //for draw animation for (int i = 0; i < mCurveGraphVO.getArrGraph().size(); i++) { GraphPath curvePath = new GraphPath(width, height, mCurveGraphVO.getPaddingLeft(), mCurveGraphVO.getPaddingBottom()); boolean firstSet = false; float xGap = xLength/(mCurveGraphVO.getArrGraph().get(i).getCoordinateArr().length-1); float pointNum = (mCurveGraphVO.getArrGraph().get(0).getCoordinateArr().length * anim) / 1; float[] x = setAxisX(xGap, i); float[] y = setAxisY(i); // Creates a monotone cubic spline from a given set of control points. spline = Spline.createMonotoneCubicSpline(x, y); p.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); pCircle.setColor(mCurveGraphVO.getArrGraph().get(i).getColor()); Bitmap icon = arrIcon.get(i); // draw line for (float j = x[0]; j <= x[x.length-1]; j++) { if (!firstSet) { curvePath.moveTo(j, spline.interpolate(j)); firstSet = true; } else { curvePath.lineTo(((j) * anim), spline.interpolate(((j) * anim))); } } graphCanvas.getCanvas().drawPath(curvePath, p); // draw point for (int j = 0; j < pointNum+1; j++) { if(j < mCurveGraphVO.getArrGraph().get(i).getCoordinateArr().length){ if(icon == null){ graphCanvas.drawCircle(x[j], y[j], 4, pCircle); }else{ graphCanvas.drawBitmapIcon(icon, x[j], y[j], null); } } } if (anim==1) isDirty = false; } } /** * draw X Mark */ private void drawXMark(GraphCanvasWrapper graphCanvas) { float x = 0; float xGap = xLength/(mCurveGraphVO.getArrGraph().get(0).getCoordinateArr().length-1); for (int i = 0; i < mCurveGraphVO.getArrGraph().get(0).getCoordinateArr().length; i++) { x = xGap * i; graphCanvas.drawLine(x, 0, x, -10, pBaseLine); } } /** * draw Y Mark */ private void drawYMark(GraphCanvasWrapper canvas) { for (int i = 0; mCurveGraphVO.getIncrement() * i <= mCurveGraphVO.getMaxValue(); i++) { float y = yLength * mCurveGraphVO.getIncrement() * i/mCurveGraphVO.getMaxValue(); canvas.drawLine(0, y, -10, y, pBaseLine); } } /** * draw X Text */ private void drawXText(GraphCanvasWrapper graphCanvas) { float x = 0; float xGap = xLength/(mCurveGraphVO.getArrGraph().get(0).getCoordinateArr().length-1); for (int i = 0; i < mCurveGraphVO.getLegendArr().length; i++) { x = xGap * i; String text = mCurveGraphVO.getLegendArr()[i]; pMarkText.measureText(text); pMarkText.setTextSize(20); Rect rect = new Rect(); pMarkText.getTextBounds(text, 0, text.length(), rect); graphCanvas.drawText(text, x -(rect.width()/2), -(20 + rect.height()), pMarkText); } } /** * draw Y Text */ private void drawYText(GraphCanvasWrapper graphCanvas) { for (int i = 0; mCurveGraphVO.getIncrement() * i <= mCurveGraphVO.getMaxValue(); i++) { String mark = Float.toString(mCurveGraphVO.getIncrement() * i); float y = yLength * mCurveGraphVO.getIncrement() * i/mCurveGraphVO.getMaxValue(); pMarkText.measureText(mark); pMarkText.setTextSize(20); Rect rect = new Rect(); pMarkText.getTextBounds(mark, 0, mark.length(), rect); // Log.e(TAG, "rect = height()" + rect.height()); // Log.e(TAG, "rect = width()" + rect.width()); graphCanvas.drawText(mark, -(rect.width() + 20), y-rect.height()/2, pMarkText); } } /** * set point X Coordinate */ private float[] setAxisX(float xGap, int graphNum){ float[] axisX = new float[mCurveGraphVO.getArrGraph().get(graphNum).getCoordinateArr().length]; for (int i = 0; i < mCurveGraphVO.getArrGraph().get(graphNum).getCoordinateArr().length; i++) { axisX[i] = xGap*i; } return axisX; } /** * set point Y Coordinate */ private float[] setAxisY(int graphNum){ float[] axisY = new float[mCurveGraphVO.getArrGraph().get(graphNum).getCoordinateArr().length]; for (int i = 0; i < mCurveGraphVO.getArrGraph().get(graphNum).getCoordinateArr().length; i++) { axisY[i] = yLength*mCurveGraphVO.getArrGraph().get(graphNum).getCoordinateArr()[i]/mCurveGraphVO.getMaxValue();; } return axisY; } } }
package net.hexogendev.hexogen.api.configuration; import java.util.List; import java.util.Map; import java.util.Set; /** * Represents a section of a {@link Configuration} */ public interface ConfigurationSection { /** * Gets a set containing all keys in this section. * <p /> * If deep is set to true, then this will contain all the keys within any child {@link ConfigurationSection}s (and their children, etc). These will be in a valid path notation for you to use. * <p /> * If deep is set to false, then this will contain only the keys of any direct children, and not their own children. * * @param deep * Whether or not to get a deep list, as opposed to a shallow list. * @return Set of keys contained within this ConfigurationSection. */ public Set<String> getKeys(boolean deep); /** * Gets a Map containing all keys and their values for this section. * <p /> * If deep is set to true, then this will contain all the keys and values within any child {@link ConfigurationSection}s (and their children, etc). These keys will be in a valid path notation for you to use. * <p /> * If deep is set to false, then this will contain only the keys and values of any direct children, and not their own children. * * @param deep * Whether or not to get a deep list, as opposed to a shallow list. * @return Map of keys and values of this section. */ public Map<String, Object> getValues(boolean deep); /** * Checks if this {@link ConfigurationSection} contains the given path. * <p /> * If the value for the requested path does not exist but a default value has been specified, this will return true. * * @param path * Path to check for existence. * @return True if this section contains the requested path, either via default or being set. * @throws IllegalArgumentException * Thrown when path is null. */ public boolean contains(String path); /** * Checks if this {@link ConfigurationSection} has a value set for the given path. * <p /> * If the value for the requested path does not exist but a default value has been specified, this will still return false. * * @param path * Path to check for existence. * @return True if this section contains the requested path, regardless of having a default. * @throws IllegalArgumentException * Thrown when path is null. */ public boolean isSet(String path); /** * Gets the path of this {@link ConfigurationSection} from its root {@link Configuration} * <p /> * For any {@link Configuration} themselves, this will return an empty string. * <p /> * If the section is no longer contained within its root for any reason, such as being replaced with a different value, this may return null. * <p /> * To retrieve the single name of this section, that is, the final part of the path returned by this method, you may use {@link #getName()}. * * @return Path of this section relative to its root */ public String getCurrentPath(); /** * Gets the name of this individual {@link ConfigurationSection}, in the path. * <p /> * This will always be the final part of {@link #getCurrentPath()}, unless the section is orphaned. * * @return Name of this section */ public String getName(); /** * Gets the root {@link Configuration} that contains this {@link ConfigurationSection} * <p /> * For any {@link Configuration} themselves, this will return its own object. * <p /> * If the section is no longer contained within its root for any reason, such as being replaced with a different value, this may return null. * * @return Root configuration containing this section. */ public Configuration getRoot(); /** * Gets the parent {@link ConfigurationSection} that directly contains this {@link ConfigurationSection}. * <p /> * For any {@link Configuration} themselves, this will return null. * <p /> * If the section is no longer contained within its parent for any reason, such as being replaced with a different value, this may return null. * * @return Parent section containing this section. */ public ConfigurationSection getParent(); /** * Gets the requested Object by path. * <p /> * If the Object does not exist but a default value has been specified, this will return the default value. If the Object does not exist and no default value was specified, this will return null. * * @param path * Path of the Object to get. * @return Requested Object. */ public Object get(String path); /** * Gets the requested Object by path, returning a default value if not found. * <p /> * If the Object does not exist then the specified default value will returned regardless of if a default has been identified in the root {@link Configuration}. * * @param path * Path of the Object to get. * @param def * The default value to return if the path is not found. * @return Requested Object. */ public Object get(String path, Object def); /** * Sets the specified path to the given value. * <p /> * If value is null, the entry will be removed. Any existing entry will be replaced, regardless of what the new value is. * <p /> * Some implementations may have limitations on what you may store. See their individual javadocs for details. No implementations should allow you to store {@link Configuration}s or {@link ConfigurationSection}s, please use {@link #createSection(java.lang.String)} for that. * * @param path * Path of the object to set. * @param value * New value to set the path to. */ public void set(String path, Object value); /** * Creates an empty {@link ConfigurationSection} at the specified path. * <p /> * Any value that was previously set at this path will be overwritten. If the previous value was itself a {@link ConfigurationSection}, it will be orphaned. * * @param path * Path to create the section at. * @return Newly created section */ public ConfigurationSection createSection(String path); /** * Creates a {@link ConfigurationSection} at the specified path, with specified values. * <p /> * Any value that was previously set at this path will be overwritten. If the previous value was itself a {@link ConfigurationSection}, it will be orphaned. * * @param path * Path to create the section at. * @param map * The values to used. * @return Newly created section */ public ConfigurationSection createSection(String path, Map<?, ?> map); // Primitives /** * Gets the requested String by path. * <p /> * If the String does not exist but a default value has been specified, this will return the default value. If the String does not exist and no default value was specified, this will return null. * * @param path * Path of the String to get. * @return Requested String. */ public String getString(String path); /** * Gets the requested String by path, returning a default value if not found. * <p /> * If the String does not exist then the specified default value will returned regardless of if a default has been identified in the root {@link Configuration}. * * @param path * Path of the String to get. * @param def * The default value to return if the path is not found or is not a String. * @return Requested String. */ public String getString(String path, String def); /** * Checks if the specified path is a String. * <p /> * If the path exists but is not a String, this will return false. If the path does not exist, this will return false. If the path does not exist but a default value has been specified, this will check if that default value is a String and return appropriately. * * @param path * Path of the String to check. * @return Whether or not the specified path is a String. */ public boolean isString(String path); /** * Gets the requested int by path. * <p /> * If the int does not exist but a default value has been specified, this will return the default value. If the int does not exist and no default value was specified, this will return 0. * * @param path * Path of the int to get. * @return Requested int. */ public int getInt(String path); /** * Gets the requested int by path, returning a default value if not found. * <p /> * If the int does not exist then the specified default value will returned regardless of if a default has been identified in the root {@link Configuration}. * * @param path * Path of the int to get. * @param def * The default value to return if the path is not found or is not an int. * @return Requested int. */ public int getInt(String path, int def); /** * Checks if the specified path is an int. * <p /> * If the path exists but is not a int, this will return false. If the path does not exist, this will return false. If the path does not exist but a default value has been specified, this will check if that default value is a int and return appropriately. * * @param path * Path of the int to check. * @return Whether or not the specified path is an int. */ public boolean isInt(String path); /** * Gets the requested boolean by path. * <p /> * If the boolean does not exist but a default value has been specified, this will return the default value. If the boolean does not exist and no default value was specified, this will return false. * * @param path * Path of the boolean to get. * @return Requested boolean. */ public boolean getBoolean(String path); /** * Gets the requested boolean by path, returning a default value if not found. * <p /> * If the boolean does not exist then the specified default value will returned regardless of if a default has been identified in the root {@link Configuration}. * * @param path * Path of the boolean to get. * @param def * The default value to return if the path is not found or is not a boolean. * @return Requested boolean. */ public boolean getBoolean(String path, boolean def); /** * Checks if the specified path is a boolean. * <p /> * If the path exists but is not a boolean, this will return false. If the path does not exist, this will return false. If the path does not exist but a default value has been specified, this will check if that default value is a boolean and return appropriately. * * @param path * Path of the boolean to check. * @return Whether or not the specified path is a boolean. */ public boolean isBoolean(String path); /** * Gets the requested double by path. * <p /> * If the double does not exist but a default value has been specified, this will return the default value. If the double does not exist and no default value was specified, this will return 0. * * @param path * Path of the double to get. * @return Requested double. */ public double getDouble(String path); /** * Gets the requested double by path, returning a default value if not found. * <p /> * If the double does not exist then the specified default value will returned regardless of if a default has been identified in the root {@link Configuration}. * * @param path * Path of the double to get. * @param def * The default value to return if the path is not found or is not a double. * @return Requested double. */ public double getDouble(String path, double def); /** * Checks if the specified path is a double. * <p /> * If the path exists but is not a double, this will return false. If the path does not exist, this will return false. If the path does not exist but a default value has been specified, this will check if that default value is a double and return appropriately. * * @param path * Path of the double to check. * @return Whether or not the specified path is a double. */ public boolean isDouble(String path); /** * Gets the requested long by path. * <p /> * If the long does not exist but a default value has been specified, this will return the default value. If the long does not exist and no default value was specified, this will return 0. * * @param path * Path of the long to get. * @return Requested long. */ public long getLong(String path); /** * Gets the requested long by path, returning a default value if not found. * <p /> * If the long does not exist then the specified default value will returned regardless of if a default has been identified in the root {@link Configuration}. * * @param path * Path of the long to get. * @param def * The default value to return if the path is not found or is not a long. * @return Requested long. */ public long getLong(String path, long def); /** * Checks if the specified path is a long. * <p /> * If the path exists but is not a long, this will return false. If the path does not exist, this will return false. If the path does not exist but a default value has been specified, this will check if that default value is a long and return appropriately. * * @param path * Path of the long to check. * @return Whether or not the specified path is a long. */ public boolean isLong(String path); // Java /** * Gets the requested List by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return null. * * @param path * Path of the List to get. * @return Requested List. */ public List<?> getList(String path); /** * Gets the requested List by path, returning a default value if not found. * <p /> * If the List does not exist then the specified default value will returned regardless of if a default has been identified in the root {@link Configuration}. * * @param path * Path of the List to get. * @param def * The default value to return if the path is not found or is not a List. * @return Requested List. */ public List<?> getList(String path, List<?> def); /** * Checks if the specified path is a List. * <p /> * If the path exists but is not a List, this will return false. If the path does not exist, this will return false. If the path does not exist but a default value has been specified, this will check if that default value is a List and return appropriately. * * @param path * Path of the List to check. * @return Whether or not the specified path is a List. */ public boolean isList(String path); /** * Gets the requested List of String by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a String if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of String. */ public List<String> getStringList(String path); /** * Gets the requested List of Integer by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Integer if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Integer. */ public List<Integer> getIntegerList(String path); /** * Gets the requested List of Boolean by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Boolean if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Boolean. */ public List<Boolean> getBooleanList(String path); /** * Gets the requested List of Double by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Double if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Double. */ public List<Double> getDoubleList(String path); /** * Gets the requested List of Float by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Float if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Float. */ public List<Float> getFloatList(String path); /** * Gets the requested List of Long by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Long if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Long. */ public List<Long> getLongList(String path); /** * Gets the requested List of Byte by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Byte if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Byte. */ public List<Byte> getByteList(String path); /** * Gets the requested List of Character by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Character if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Character. */ public List<Character> getCharacterList(String path); /** * Gets the requested List of Short by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Short if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Short. */ public List<Short> getShortList(String path); /** * Gets the requested List of Maps by path. * <p /> * If the List does not exist but a default value has been specified, this will return the default value. If the List does not exist and no default value was specified, this will return an empty List. * <p /> * This method will attempt to cast any values into a Map if possible, but may miss any values out if they are not compatible. * * @param path * Path of the List to get. * @return Requested List of Maps. */ public List<Map<?, ?>> getMapList(String path); /** * Gets the requested ConfigurationSection by path. * <p /> * If the ConfigurationSection does not exist but a default value has been specified, this will return the default value. If the ConfigurationSection does not exist and no default value was specified, this will return null. * * @param path * Path of the ConfigurationSection to get. * @return Requested ConfigurationSection. */ public ConfigurationSection getConfigurationSection(String path); /** * Checks if the specified path is a ConfigurationSection. * <p /> * If the path exists but is not a ConfigurationSection, this will return false. If the path does not exist, this will return false. If the path does not exist but a default value has been specified, this will check if that default value is a ConfigurationSection and return appropriately. * * @param path * Path of the ConfigurationSection to check. * @return Whether or not the specified path is a ConfigurationSection. */ public boolean isConfigurationSection(String path); /** * Gets the equivalent {@link ConfigurationSection} from the default {@link Configuration} defined in {@link #getRoot()}. * <p /> * If the root contains no defaults, or the defaults doesn't contain a value for this path, or the value at this path is not a {@link ConfigurationSection} then this will return null. * * @return Equivalent section in root configuration */ public ConfigurationSection getDefaultSection(); /** * Sets the default value in the root at the given path as provided. * <p /> * If no source {@link Configuration} was provided as a default collection, then a new {@link MemoryConfiguration} will be created to hold the new default value. * <p /> * If value is null, the value will be removed from the default Configuration source. * <p /> * If the value as returned by {@link #getDefaultSection()} is null, then this will create a new section at the path, replacing anything that may have existed there previously. * * @param path * Path of the value to set. * @param value * Value to set the default to. * @throws IllegalArgumentException * Thrown if path is null. */ public void addDefault(String path, Object value); /** * Returns a root-level comment. * * @param key * the property key * @return the comment or <code>null</code> */ public String getComment(String key); /** * Set a root-level comment. * * @param key * the property key * @param comment * the comment. May be <code>null</code>, in which case the comment is removed. */ public void setComment(String key, String... comment); /** * Returns root-level comments. * * @return map of root-level comments */ public Map<String, String> getComments(boolean deep); /** * Set root-level comments from a map. * * @param comments * comment map */ public void setComments(Map<String, String> comments); }